Exemple #1
0
def main():
    """Run Infiniworld in single player mode."""
    log.setup('infiniworld_solo.log')
    logger = logging.getLogger()
    logger.debug("Starting...")
    event_manager = evtman.EventManager()
    game_loop_controller = loop.GameLoopController(event_manager)
    world_model = world.gen.GenerateWorld(event_manager, 3, (128, 128))
    player_controller = player.PlayerController(event_manager)
    with pygame_.Pygame():
        pygame_view = pygame_.PygameView(event_manager,
                                         u"Infiniworld", (800, 480))
        pygame_controller = pygame_.PygameController(event_manager)
        # Run the game until a QuitEvent is posted.
        game_loop_controller.run()
        logger.info("Stopping...")
    # Unregistering at the end is not necessary but I do it so that PyDev and
    # PyLint stop complaining about unused variables.
    game_loop_controller.unregister()
    world_model.unregister()
    player_controller.unregister()
    pygame_view.unregister()
    pygame_controller.unregister()
    #
    logging.shutdown()
Exemple #2
0
def boot():
    gc.collect()
    log.setup()
    # config.setup()

    # Setup display
    display.setup()
    log.success("display setup")

    display.text("wifi")
    wifi.setup()

    log.info("trying to connect to wifi")
    for _ in range(25):
        if wifi.isconnected():
            log.success("connected to wifi {}".format(wifi.get_wifi_ip()))
            break
        machine.sleep(200)
    else:
        log.error("could not connect to wifi")
        return

    # pylint: disable=no-member
    #asyncio.get_event_loop().run_until_complete(
    #    display.scroll_text("{}".format(wifi.get_wifi_ip())))
    machine.sleep(500)

    # Set time
    display.text("time")
    clock.setup()
    log.success("time is synced")

    display.text("done")
Exemple #3
0
def main():
  log.setup()
  logger = logging.getLogger(__name__)
  logger.info("App starting")
  try:
    utils.check_python_version()
    logger.info(f'Reading config file "{CONFIG_FILENAME}"')
    with open(CONFIG_FILENAME) as f:
      config_parser = configparser.ConfigParser()
      config_parser.read_file(f)
      logger.info('Config: %s', config_parser.items('general'))
      config = config_parser['general']

      fonts = utils.get_fonts(config)
      images = get_weather_images()

      logger.info('Import epd control library')
      (epd_so, panel_size) = utils.get_epd_data(config)

      logger.info("Initial refresh")
      refresh.refresh(panel_size, fonts, images, config, epd_so, True) # Once in the beginning

      logger.info('Starting scheduler')
      scheduler = BlockingScheduler()
      scheduler.add_job(lambda: main_loop(panel_size, fonts, images, config, epd_so), 'cron', minute='*/1')
      scheduler.start()

  except FileNotFoundError as e:
    logger.exception(f'Error opening file "{CONFIG_FILENAME}": %s', str(e))

  except KeyboardInterrupt:    
    logger.warning("KeyboardInterrupt error")

  except Exception as e:
    logger.exception('Unexpected error: %s', str(e))
Exemple #4
0
def main():
    global logger
    cl_opts = parse_command_line()
    log.setup(cl_opts.job_dir)
    logger = log.logger()
    logger.info('=== New session ===')
    j = job.Job(cl_opts.job_dir, cl_opts.restart)
    j.send_all()
Exemple #5
0
    def __init__(self, config_dir='conf/'):
        self.config = None
        self.subsystems = None
        self.aws = None
        self.weather = None

        self.board = Board()
        self.read_config(config_dir)
        self.logging_config = self.config.get('logging', None).get('config_file', None)
        logging.setup(self.logging_config)
Exemple #6
0
    def __init__(self, name, path="config/"):
        # Prepare logger
        log.setup()
        self.logger = log.logging.getLogger(name)

        # Read config
        fname = name + '.yaml'
        self.config = Config(path=path + fname).load()
        self.logger.debug("[%u] Config is loaded " % os.getpid())
        self.logger.debug("[%u] Config is %s" % (os.getpid(), self.config))
Exemple #7
0
def main(argv):
    app = QApplication(argv)
    main_window = QMainWindow()
    ui = client.Ui_MainWindow()
    ui.setupUi(main_window)
    log.setup('ftp')
    qss_style = open('./ui/style.qss').read()
    main_window.setStyleSheet(qss_style)
    main_window.show()
    sys.exit(app.exec_())
Exemple #8
0
def main():
    try:
        config.parse_args()
        log.setup('glance')

        server = wsgi.Server()
        server.start(config.load_paste_app('glance-api'), default_port=9292)
        server.wait()
    except exception.WorkerCreationFailure as e:
        fail(2, e)
    except RuntimeError as e:
        fail(1, e)
def main():
    
    '''Entry point for script'''

    global precip_tick_count
    global precip_accu
 
    precip_tick_count = 0
    precip_accu       = 0


    #---------------------------------------------------------------------------
    # SET UP LOGGER
    #---------------------------------------------------------------------------
    logger = log.setup('root', '/home/pi/weather/logs/read_rain_gauge.log')

    logger.info('--- Read Rain Gauge Script Started ---')
    

    #---------------------------------------------------------------------------
    # LOAD DRIVERS
    #---------------------------------------------------------------------------
    try:
        pi = pigpio.pi()

    except Exception, e:
        logger.error('Failed to connect to PIGPIO ({error_v}). Exiting...'.format(
            error_v=e))
        sys.exit()
def erb():
    working_dir = get_working_dir()
    log.setup(logging.DEBUG, path=working_dir)

    window_size = default_input('Window size', 3)
    n_nodes = default_input('N Nodes', 100)
    connectivity = default_input('Connectivity', 2)
    f = default_input('From', 0)
    t = default_input('To', n_nodes + 1)
    s = default_input('Step', n_nodes / 10)
    r = range(f, t, s)

    distribution = estimate_reservoir_distribution(
        30, n_nodes, connectivity, r, window_size)

    name = '[NN:{}-WS:{}-K:{}]-distribution'.format(n_nodes, window_size, connectivity)
    dump(distribution, name, folder=working_dir)
def create_app(config_filename):
    app = f.Flask(__name__)
    app.config.from_pyfile(config_filename)

    # Enables logging. Put it right after app.
    if not os.path.exists('logs'):
        os.mkdir('logs')
    log.setup(app, log_file='logs/error.log')

    app.register_blueprint(kid_edu_bp.bp, url_prefix='/kid/edu/')
    app.register_blueprint(kid_game_bp.bp, url_prefix='/kid/game/')

    @app.route('/healthz', methods=['GET'])
    def healthz():
        return f.jsonify(status='ok')

    return app
Exemple #12
0
def erb():
    working_dir = get_working_dir()
    log.setup(logging.DEBUG, path=working_dir)

    window_size = default_input('Window size', 3)
    n_nodes = default_input('N Nodes', 100)
    connectivity = default_input('Connectivity', 2)
    f = default_input('From', 0)
    t = default_input('To', n_nodes + 1)
    s = default_input('Step', n_nodes / 10)
    r = range(f, t, s)

    distribution = estimate_reservoir_distribution(30, n_nodes, connectivity,
                                                   r, window_size)

    name = '[NN:{}-WS:{}-K:{}]-distribution'.format(n_nodes, window_size,
                                                    connectivity)
    dump(distribution, name, folder=working_dir)
Exemple #13
0
def main():
    import log

    parser = argparse.ArgumentParser()
    parser.add_argument('-c', '--config', default=None)
    parser.add_argument('-l', '--logfile', default=log.DEFAULT_FILENAME)
    parser.add_argument('-v', '--verbose', action='store_true', default=False)
    parser.add_argument('command')

    args = parser.parse_args()
    log.setup(filename=args.logfile, debug_stdout=args.verbose)
    logger = logging.getLogger('main')

    try:
        cls = command.import_command(args.command)
        cls(config.Config(args.config)).main()
    except command.CommandException, e:
        logger.error(str(e))
        sys.exit(1)
Exemple #14
0
def _start_things(reactor, settings):
    """
    Asynchronous, Twisted based, main code.

    Sets up all needed objects, some dependent on the `settings` configuration
    dict, and starts the player manager.

    Exits when the player manager terminates.
    """

    # Setup the logging system.
    log_level = settings.get('loglevel', 'warn')
    log_levels = settings.get('loglevels', {})
    log.setup(level=log_level, namespace_levels=log_levels)

    # Create a call wiring object and tell it what to with `set_log_level` calls.
    wiring = wires.Wires()
    wiring.set_log_level.wire(log.set_level)

    # Create the input and player managers.
    input_manager = inputs.InputManager(reactor, wiring, settings)
    player_manager = player.PlayerManager(reactor, wiring, settings)

    # Both will be ayncrhronously started and stopped.
    startables = (input_manager, player_manager)

    # Before starting, ensure a clean stop.
    reactor.addSystemEventTrigger('before', 'shutdown', _stop_things,
                                  startables)

    # Start all things.
    for index, startable in enumerate(startables, start=1):
        try:
            yield startable.start()
        except Exception as e:
            # On failure logs should help diagnose.
            msg = 'Failed starting: %s\n' % e
            sys.stderr.write(msg.encode('utf-8'))
            raise SystemExit(-index)

    # Don't exit unless the player manager is ever done.
    yield player_manager.done
Exemple #15
0
def test():
    logger = log.setup('ftp')
    logger.info("testing with 104.238.181.33:21")
    ftp = FTP("104.238.181.33", 21, "vtta", "***")
    ftp.send("NOOP")
    ftp.recv(200)
    ftp.download("foo")
    ftp.upload("bar")
    ftp.list()
    ftp.send("QUIT")
    ftp.recv(221)
def main():
    
    '''Entry point for script'''

    global precip_tick_count
    global precip_accu

    precip_tick_count = 0
    precip_accu       = 0

    script_name = os.path.basename(sys.argv[0])
    folder_loc  = os.path.dirname(os.path.realpath(sys.argv[0]))
    folder_loc  = folder_loc.replace('scripts', '')


    #---------------------------------------------------------------------------
    # SET UP LOGGER AND WATCHDOG
    #---------------------------------------------------------------------------
    logger = log.setup('root', '{folder}/logs/{script}.log'.format(
                                                    folder= s.SYS_FOLDER,
                                                    script= script_name[:-3]))

    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script= script_name))


    #---------------------------------------------------------------------------
    # SET UP WATCHDOG
    #---------------------------------------------------------------------------
    err_file    = '{fl}/data/error.json'.format(fl= folder_loc)
    wd_counter  = wd.ErrorCode(err_file, '0001')    # Script READ_RAIN_GAUGE stopped
    wd_err      = wd.ErrorCode(err_file, '0002')    # Script READ_RAIN_GAUGE error
    wd_acc_err  = wd.ErrorCode(err_file, '0004')    # Accumulate precipitation failed


    #---------------------------------------------------------------------------
    # CHECK SCRIPT IS NOT ALREADY RUNNING
    #---------------------------------------------------------------------------    
    if check_process.is_running(script_name):
        wd_err.set()
        sys.exit()


    #---------------------------------------------------------------------------
    # LOAD DRIVERS
    #---------------------------------------------------------------------------
    try:
        pi = pigpio.pi()

    except Exception, e:
        logger.error('Failed to connect to PIGPIO ({error_v}). Exiting...'.format(
            error_v=e))
        wd_err.set()
        sys.exit()
Exemple #17
0
    def __init__(self, filename, error_code):

        AllErrors.__init__(self, filename)
        self.err_code   = error_code

        script_name = os.path.basename(sys.argv[0])
        folder_loc  = os.path.dirname(os.path.realpath(sys.argv[0]))
        folder_loc  = folder_loc.replace('scripts', '')
        self.error_log  = log.setup('error', '{folder}/logs/error.log'.format(
                                                    folder= folder_loc,
                                                    script= script_name[:-3]))
def main():
    
    '''Entry point for script'''

    script_name = os.path.basename(sys.argv[0])
    folder_loc  = os.path.dirname(os.path.realpath(sys.argv[0]))
    folder_loc  = folder_loc.replace('scripts', '')


    #---------------------------------------------------------------------------
    # Set up logger
    #---------------------------------------------------------------------------
    logger = log.setup('root', '{folder}/logs/{script}.log'.format(
                                                    folder= folder_loc,
                                                    script= script_name[:-3]))

    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script= script_name)) 
    

    #---------------------------------------------------------------------------
    # SET UP WATCHDOG
    #---------------------------------------------------------------------------
    err_file    = '{fl}/data/error.json'.format(fl= folder_loc)
    wd_err      = wd.ErrorCode(err_file, '0003')


    #---------------------------------------------------------------------------
    # CHECK SCRIPT IS NOT ALREADY RUNNING
    #---------------------------------------------------------------------------    
    if check_process.is_running(script_name):
        wd_err.set()
        sys.exit()
  

    #---------------------------------------------------------------------------
    # Load PIGPIO
    #---------------------------------------------------------------------------
    try:
        pi = pigpio.pi()

    except Exception, e:
        logger.error('Failed to connect to PIGPIO ({error_v}). Exiting...'.format(
            error_v=e), exc_info=True)
        wd_err.set()
        sys.exit()
Exemple #19
0
def main():
    '''Entry point for script'''

    #---------------------------------------------------------------------------
    # Set up logger
    #---------------------------------------------------------------------------
    logger = log.setup('root', '/home/pi/weather/logs/read_sensors.log')

    logger.info('--- Read Sensor Script Started ---')

    #---------------------------------------------------------------------------
    # Load PIGPIO
    #---------------------------------------------------------------------------
    try:
        pi = pigpio.pi()

    except Exception, e:
        logger.error(
            'Failed to connect to PIGPIO ({error_v}). Exiting...'.format(
                error_v=e))
        sys.exit()
Exemple #20
0
def main():
    
    '''Entry point for script'''

    script_name = os.path.basename(sys.argv[0])

    #---------------------------------------------------------------------------
    # Set up logger
    #---------------------------------------------------------------------------
    logger = log.setup('root', '{folder}/logs/{script}.log'.format(
                                                    folder= s.SYS_FOLDER,
                                                    script= script_name[:-3]))

    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script= script_name)) 


    #---------------------------------------------------------------------------
    # SET UP RRD DATA AND TOOL
    #---------------------------------------------------------------------------
    rrd = rrd_tools.RrdFile('{fd1}{fd2}{fl}'.format(fd1= s.SYS_FOLDER,
                                                        fd2= s.DATA_FOLDER,
                                                        fl= s.RRDTOOL_RRD_FILE))
    
    if not os.path.exists(s.RRDTOOL_RRD_FILE):
        rrd.create_file(s.SENSOR_SET, 
                        s.RRDTOOL_RRA, 
                        s.UPDATE_RATE, 
                        s.RRDTOOL_HEARTBEAT, 
                        int(time.time() + s.UPDATE_RATE))
        logger.info('RRD file not found. New file created')

    elif sorted(rrd.ds_list()) != sorted(list(s.SENSOR_SET.keys())):
            logger.error('Data sources in RRD file does not match set up.')
            sys.exit()

    else:
        logger.info('RRD file found and checked OK')
def main():
    
    '''Entry point for script'''


    #---------------------------------------------------------------------------
    # Set up logger
    #---------------------------------------------------------------------------
    logger = log.setup('root', '/home/pi/weather/logs/read_sensors.log')

    logger.info('--- Read Sensor Script Started ---')   
    

    #---------------------------------------------------------------------------
    # Load PIGPIO
    #---------------------------------------------------------------------------
    try:
        pi = pigpio.pi()

    except Exception, e:
        logger.error('Failed to connect to PIGPIO ({error_v}). Exiting...'.format(
            error_v=e))
        sys.exit()
Exemple #22
0
import requests
import json
import os
from os import path

import log

logger = log.setup('root', 'importer.log')
import os.path

elastic_url = "http://127.0.0.1:9200"
store_url = "http://127.0.0.1:8081"

headers = {'Content-type': 'application/json', "authtoken": "gbme"}


class DecimalEncoder(json.JSONEncoder):
    #    def totimestamp(self, dt, epoch=datetime(1970,1,1)):
    #        td = dt - epoch
    #        # return td.total_seconds()
    #        return int(((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 1e3)+(CONFIG.global_config.timezone_offset*3600000))

    def default(self, o):
        if isinstance(o, decimal.Decimal):
            return float(o)
        if isintance(o, bool):
            if o: return 1
            return 0
        if isinstance(o, datetime):
            return self.totimestamp(o)
        return super(DecimalEncoder, self).default(o)
Exemple #23
0
def main():
    opts = parse_opts_from_cli()
    log.setup('vmtp', debug=opts.debug, logfile=opts.logfile)
    run_vmtp(opts)
Exemple #24
0
def main():
    cli_opts = [
        cfg.StrOpt("config",
                   short="c",
                   default=None,
                   help="Override default values with a config file"),
        cfg.StrOpt("topology",
                   short="t",
                   default=None,
                   help="Topology files for compute hosts"),
        cfg.StrOpt("tenants-list",
                   short="l",
                   default=None,
                   help="Existing tenant and user lists for reusing"),
        cfg.StrOpt("tested-rc",
                   default=None,
                   help="Tested cloud openrc credentials file"),
        cfg.StrOpt("testing-rc",
                   default=None,
                   help="Testing cloud openrc credentials file"),
        cfg.StrOpt("tested-passwd",
                   default=None,
                   secret=True,
                   help="Tested cloud password"),
        cfg.StrOpt("testing-passwd",
                   default=None,
                   secret=True,
                   help="Testing cloud password"),
        cfg.StrOpt("json",
                   default=None,
                   help='store results in JSON format file'),
        cfg.BoolOpt("no-env",
                    default=False,
                    help="Do not read env variables"),
        cfg.BoolOpt("show-config",
                    default=False,
                    help="Show the default configuration")
    ]
    CONF.register_cli_opts(cli_opts)
    CONF.set_default("verbose", True)
    full_version = __version__ + ', VM image: ' + kb_vm_agent.get_image_name()
    CONF(sys.argv[1:], project="kloudbuster", version=full_version)

    if CONF.show_config:
        print resource_string(__name__, "cfg.scale.yaml")
        sys.exit(0)

    logging.setup("kloudbuster")
    try:
        kb_config = KBConfig()
        kb_config.init_with_cli()
    except TypeError:
        LOG.error('Error parsing the configuration file')
        sys.exit(1)

    # The KloudBuster class is just a wrapper class
    # levarages tenant and user class for resource creations and deletion
    kloudbuster = KloudBuster(
        kb_config.cred_tested, kb_config.cred_testing,
        kb_config.server_cfg, kb_config.client_cfg,
        kb_config.topo_cfg, kb_config.tenants_list)
    if kloudbuster.check_and_upload_images():
        kloudbuster.run()

    if CONF.json:
        '''Save results in JSON format file.'''
        LOG.info('Saving results in json file: ' + CONF.json + "...")
        with open(CONF.json, 'w') as jfp:
            json.dump(kloudbuster.final_result, jfp, indent=4, sort_keys=True)
def main():
    
    '''Entry point for script'''

    #---------------------------------------------------------------------------
    # SET UP LOGGER
    #--------------------------------------------------------------------------- 
    logger = log.setup('root', '/home/pi/weather/logs/wstation.log')

    logger.info('--- Read Rain Gauge Script Started ---')


    #---------------------------------------------------------------------------
    # SET UP RRD DATA AND TOOL
    #---------------------------------------------------------------------------
    rrd = rrd_tools.RrdFile(s.RRDTOOL_RRD_FILE)

    if not os.path.exists(s.RRDTOOL_RRD_FILE):
        rrd.create_file(s.SENSOR_SET, 
                        s.RRDTOOL_RRA, 
                        s.UPDATE_RATE, 
                        s.RRDTOOL_HEARTBEAT, 
                        int(time.time() + s.UPDATE_RATE))
        logger.info('RRD file not found. New file created')

    elif sorted(rrd.ds_list()) != sorted(list(s.SENSOR_SET.keys())):
            logger.error('Data sources in RRD file does not match set up.')
            sys.exit()

    else:
        logger.info('RRD file found and checked OK')



    #---------------------------------------------------------------------------
    # SCRIPTS
    #---------------------------------------------------------------------------

    #Set up to read sensors using cron job
    try:
        cmd='python /home/pi/weather/read_sensors.py'
        cron = CronTab()
        job = cron.new(command= cmd, comment= 'weather station job')
        if not cron.find_command(cmd):
            job.minute.during(4, 59).every(s.UPDATE_RATE/60)
            cron.write()
            logger.info('CronTab file updated.')
            logger.debug(cron.render())
        else:
            logger.info('Command already in CronTab file')

    except ValueError:
        logger.error('CronTab file could not be updated. Exiting...')
        sys.exit()


    #Run read rain gauge script if not already running
    cmd = '/home/pi/weather/read_rain_gauge.py'
    script_not_running = check_process_is_running(cmd)
    if script_not_running:
        logger.info('Script read_rain_gauge.py already runnning.')
        logger.info(script_not_running)
    else:
        logger.info('Start Read Rain Gauge script')
        status = subprocess.Popen(['python', cmd])
        logger.info(status)


    logger.info('--- Wstation Script Finished ---')
Exemple #26
0
# -*- coding: latin-1 -*-
from bottle import route, post, delete, get, run, template, request, redirect, response, static_file
import requests
import json
from datetime import datetime, timedelta
import log
import uuid
import urllib

logger = log.setup('root', 'harrybot.log')
import secrets

port = 8083

import time
from slackclient import SlackClient

token = secrets.global_config.slackkey  # found at https://api.slack.com/web#authentication

sc = SlackClient(token)
print sc.api_call("api.test")
#print sc.api_call("channels.info", channel="1234567890")
#print sc.api_call(
##    "chat.postMessage", channel="#bottest", text="Hello from Python! :tada:",
#    username='******', icon_emoji=':robot_face:'
#)

# -*- coding: latin-1 -*-
import re

regs = [
Exemple #27
0
def config_loggers(*args, **kwags):
    # This prevents celery reconfiguring the logging
    import log
    log.setup(force=True)
def rrd_export(rrd_file, data_sources, rra_list, output_xml_folder):
    
    '''
    Exports and XML file per RRA from an RRD file.

        rrd_file -      rrd file location and script_name
        data_sources -  a list of the data_sources in the rrd file
        rra_list -      a dictionary of the RRA set up in the RRD. Should be in the 
                        format:
                            XML filename: (Consolidation type, Resolution (minutes), 
                                                                Recording Period (days))
                        e.g.
                            {'wd_avg_1d.xml':  ('LAST',       5,      1.17), 
                             'wd_avg_2d.xml':  ('AVERAGE',   30,      2)}
        output_xml_folder - folder to place output XML files.
    '''


    script_name = os.path.basename(sys.argv[0])
    folder_loc  = os.path.dirname(os.path.realpath(sys.argv[0]))
    folder_loc  = folder_loc.replace('scripts', '')


    #---------------------------------------------------------------------------
    # Set up logger
    #---------------------------------------------------------------------------
    logger = log.setup('root', '{folder}/logs/{script}.log'.format(
                                                    folder= folder_loc,
                                                    script= script_name[:-3]))

    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script= script_name)) 
    

    #---------------------------------------------------------------------------
    # SET UP WATCHDOG
    #---------------------------------------------------------------------------
    err_file    = '{fl}/data/error.json'.format(fl= folder_loc)
    wd_err      = wd.ErrorCode(err_file, '0006')


    #---------------------------------------------------------------------------
    # CHECK SCRIPT IS NOT ALREADY RUNNING
    #---------------------------------------------------------------------------    
    if check_process.is_running(script_name):
        wd_err.set()
        sys.exit()
  

    #---------------------------------------------------------------------------
    # Check Rrd File
    #---------------------------------------------------------------------------
    rrd = rrd_tools.RrdFile('{fd1}/data/{fl}'.format(fd1= folder_loc,
                                                    fl= s.RRDTOOL_RRD_FILE))
        
    if not rrd.check_ds_list_match(list(s.SENSOR_SET.keys())):
        wd_err.set()
        sys.exit()


    #---------------------------------------------------------------------------
    # Export RRD to XML
    #---------------------------------------------------------------------------
    try:
        for xml_file in rra_list:
            rrd.export( start= 'now-{rec_period:.0f}h'.format(
                                        rec_period= rra_list[xml_file][2] * 24),
                        end= 'now',
                        cf= rra_list[xml_file][0],
                        step= rra_list[xml_file][1] * 60,
                        ds_list= data_sources,
                        output_file= '{fd1}{fl}'.format(fd1= output_xml_folder, 
                                                        fl= xml_file))
    except ValueError:
        logger.warning('Failed to export RRD ({value_error})'.format(
            value_error=ValueError))
        wd_err.set()
                

    logger.info('--- Script Finished ---')
Exemple #29
0
def main():
    cli_opts = [
        cfg.StrOpt("config",
                   short="c",
                   default=None,
                   help="Override default values with a config file",
                   metavar="<config file>"),
        cfg.BoolOpt("storage",
                    default=False,
                    help="Running KloudBuster to test storage performance"),
        cfg.BoolOpt("multicast",
                    default=False,
                    help="Running KloudBuster to test multicast performance"),
        cfg.StrOpt("topology",
                   short="t",
                   default=None,
                   help="Topology file for compute hosts",
                   metavar="<topology file>"),
        cfg.StrOpt("tenants-list",
                   short="l",
                   default=None,
                   help="Existing tenant and user lists for reusing",
                   metavar="<tenants file>"),
        cfg.StrOpt("rc",
                   default=None,
                   help="Tested cloud openrc credentials file (same as --tested-rc)",
                   metavar="<rc file>"),
        cfg.StrOpt("tested-rc",
                   default=None,
                   help="Tested cloud openrc credentials file",
                   metavar="<rc file>"),
        cfg.StrOpt("testing-rc",
                   default=None,
                   help="Testing cloud openrc credentials file",
                   metavar="<rc file>"),
        cfg.StrOpt("passwd",
                   default=None,
                   secret=True,
                   help="Tested cloud password (same as --tested-pwd)",
                   metavar="<password>"),
        cfg.StrOpt("tested-passwd",
                   default=None,
                   secret=True,
                   help="Tested cloud password",
                   metavar="<password>"),
        cfg.StrOpt("testing-passwd",
                   default=None,
                   secret=True,
                   help="Testing cloud password",
                   metavar="<password>"),
        cfg.StrOpt("html",
                   default=None,
                   help='store results in HTML file',
                   metavar="<dest html file>"),
        cfg.StrOpt("label",
                   default=None,
                   help='label for the title in HTML file',
                   metavar="<title>"),
        cfg.BoolOpt("headless",
                    default=False,
                    help="do not show chart in the browser (default=False, only used if --html)"),
        cfg.StrOpt("json",
                   default=None,
                   help='store results in JSON format file',
                   metavar="<dest json file>"),
        cfg.StrOpt("csv",
                   default=None,
                   help='store results in CSV format, multicast only.',
                   metavar="<csv file>"),
        cfg.BoolOpt("no-env",
                    default=False,
                    help="Do not read env variables"),
        cfg.BoolOpt("show-config",
                    default=False,
                    help="Show the default configuration"),
        cfg.StrOpt("charts-from-json",
                   default=None,
                   help='create charts from json results and exit (requires --html)',
                   metavar="<source json file>"),
    ]
    CONF.register_cli_opts(cli_opts)
    CONF.set_default("verbose", True)
    full_version = __version__ + ', VM image: ' + kb_vm_agent.get_image_name()
    CONF(sys.argv[1:], project="kloudbuster", version=full_version)
    logging.setup("kloudbuster")

    if CONF.rc and not CONF.tested_rc:
        CONF.tested_rc = CONF.rc

    if CONF.passwd and not CONF.tested_passwd:
        CONF.tested_passwd = CONF.passwd

    if CONF.charts_from_json:
        if not CONF.html:
            LOG.error('Destination html filename must be specified using --html.')
            sys.exit(1)
        with open(CONF.charts_from_json, 'r') as jfp:
            json_results = json.load(jfp)
        generate_charts(json_results, CONF.html, None)
        sys.exit(0)

    if CONF.show_config:
        print resource_string(__name__, "cfg.scale.yaml")
        sys.exit(0)

    if CONF.multicast and CONF.storage:
        LOG.error('--multicast and --storage can not both be chosen.')
        sys.exit(1)

    try:
        kb_config = KBConfig()
        kb_config.init_with_cli()
    except TypeError:
        LOG.error('Error parsing the configuration file')
        sys.exit(1)

    # The KloudBuster class is just a wrapper class
    # levarages tenant and user class for resource creations and deletion
    kloudbuster = KloudBuster(
        kb_config.cred_tested, kb_config.cred_testing,
        kb_config.server_cfg, kb_config.client_cfg,
        kb_config.topo_cfg, kb_config.tenants_list,
        storage_mode=CONF.storage, multicast_mode=CONF.multicast)
    if kloudbuster.check_and_upload_images():
        kloudbuster.run()

    if CONF.json:
        '''Save results in JSON format file.'''
        LOG.info('Saving results in json file: ' + CONF.json + "...")
        with open(CONF.json, 'w') as jfp:
            json.dump(kloudbuster.final_result, jfp, indent=4, sort_keys=True)

    if CONF.multicast and CONF.csv and 'kb_result' in kloudbuster.final_result:
        '''Save results in JSON format file.'''
        if len(kloudbuster.final_result['kb_result']) > 0:
            LOG.info('Saving results in csv file: ' + CONF.csv + "...")
            with open(CONF.csv, 'w') as jfp:
                jfp.write(KBRunner_Multicast.json_to_csv(kloudbuster.final_result['kb_result'][0]))

    if CONF.html:
        generate_charts(kloudbuster.final_result, CONF.html, kb_config.config_scale)
def main():
    
    '''
    Passed arguments:
        --syncerr     - disables reporting of sync errors 
    '''

    script_name = os.path.basename(sys.argv[0])
    folder_loc  = os.path.dirname(os.path.realpath(sys.argv[0]))
    folder_loc  = folder_loc.replace('scripts', '')

    
    #---------------------------------------------------------------------------
    # Set up logger
    #---------------------------------------------------------------------------
    logger = log.setup('root', '{folder}/logs/{script}.log'.format(
                                                    folder= folder_loc,
                                                    script= script_name[:-3]))
    
    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script= script_name)) 

    
    #---------------------------------------------------------------------------
    # CHECK SCRIPT IS NOT ALREADY RUNNING
    #---------------------------------------------------------------------------    
    if check_process.is_running(script_name):
        logger.error('Script already running.')
        sys.exit()
            
    try:       
        #-------------------------------------------------------------------
        # Check and action passed arguments
        #-------------------------------------------------------------------
        sync_err = False
        if len(sys.argv) > 1:
            if '--syncerr' in sys.argv:
                logger.info('User requested NO ERROR feedback.')
                sync_err = True
 

        #-------------------------------------------------------------------
        # Get data from config file
        #-------------------------------------------------------------------               
        with open('{fl}/data/config.json'.format(fl= s.SYS_FOLDER), 'r') as f:
            config = json.load(f)

        ts_host_addr  = config['thingspeak']['THINGSPEAK_HOST_ADDR']
        ts_channel_id = config['thingspeak']['THINGSPEAK_CHANNEL_ID']
        ts_api_key    = config['thingspeak']['THINGSPEAK_API_KEY']

        
        #-------------------------------------------------------------------
        # Sync data
        #-------------------------------------------------------------------   
        sync(   ts_host_addr, 
                ts_api_key, 
                ts_channel_id,
                list(s.SENSOR_SET.keys()), 
                s.UPDATE_RATE, 
                '{fd1}{fd2}{fl}'.format(fd1= s.SYS_FOLDER,
                                        fd2= s.DATA_FOLDER,
                                        fl= s.RRDTOOL_RRD_FILE),
                sync_err,
                '{fl}/data/error.json'.format(fl= folder_loc))

    except Exception, e:
        logger.error('Update failed ({error_v}). Exiting...'.format(
            error_v=e), exc_info=True)
        sys.exit()
def main():
    '''
    Grabs data from weather underground and rrd file to generate prediction
    for the water depth
    '''

    script_name = os.path.basename(sys.argv[0])
    folder_loc = os.path.dirname(os.path.realpath(sys.argv[0]))
    folder_loc = folder_loc.replace('scripts', '')

    #---------------------------------------------------------------------------
    # SET UP LOGGER
    #---------------------------------------------------------------------------
    logger = log.setup(
        'root', '{folder}/logs/{script}.log'.format(folder=folder_loc,
                                                    script=script_name[:-3]))
    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script=script_name))

    #---------------------------------------------------------------------------
    # CHECK SCRIPT IS NOT ALREADY RUNNING
    #---------------------------------------------------------------------------
    if check_process.is_running(script_name):
        logger.error('Script already runnning. Exiting...')
        logger.error(other_script_found)
        sys.exit()

    #---------------------------------------------------------------------------
    # CHECK RRD FILE
    #---------------------------------------------------------------------------
    rrd = rrd_tools.RrdFile('{fd1}/data/{fl}'.format(fd1=folder_loc,
                                                     fl=s.RRDTOOL_RRD_FILE))

    if not rrd.check_ds_list_match(list(s.SENSOR_SET.keys())):
        logger.error('Data sources in RRD file does not match set up.')
        logger.error('Exiting...')
        sys.exit()

    #-------------------------------------------------------------------
    # GET DATA FROM CONFIG FILE
    #-------------------------------------------------------------------
    try:
        with open('{fl}/data/config.json'.format(fl=folder_loc), 'r') as f:
            config = json.load(f)

        location = [
            config['irrigation']['COORD_NORTH'],
            config['irrigation']['COORD_SOUTH']
        ]
        alarm_enable = config['irrigation']['ALARM_ENABLE']
        alarm_level = config['irrigation']['ALARM_LEVEL']
        kc = config['irrigation']['CROP_FACTOR_KC']
        days = config['irrigation']['RECOMMENDED_WATERING_DAYS']
        root_depth = config['irrigation']['ROOT_DEPTH']
        soil_type = config['irrigation']['SOIL_TYPE']
        irrig_full = config['irrigation']['IRRIG_FULL']
        irrig_partial = config['irrigation']['IRRIG_PARTIAL']
        maker_ch_addr = config['maker_channel']['MAKER_CH_ADDR']
        maker_ch_key = config['maker_channel']['MAKER_CH_KEY']

    except Exception, e:
        logger.error(
            'Unable to load config data ({error_v}). Exiting...'.format(
                error_v=e),
            exc_info=True)
        sys.exit()
Exemple #32
0
# end DjDataService


def parse_args():
    parser = argparse.ArgumentParser()
    parser.add_argument('-H', '--host', type=str, required=False,
                        default=VARS['DJDATASERVICE']['host'],
                        help='Listening host or ip address, default: %s' % (
                            VARS['DJDATASERVICE']['host']))
    parser.add_argument('-p', '--port', type=str, required=False,
                        default=VARS['DJDATASERVICE']['port'],
                        help='Listening port, default: %d' % (
                            VARS['DJDATASERVICE']['port']))
    return parser.parse_args()


if __name__ == '__main__':
    args = parse_args()
    log.setup(VARS, "dj_data_service")
    try:
        server = BaseHTTPServer.HTTPServer((args.host, args.port),
                                           DjDataService)
        logging.debug('Started server on %s:%d' % (args.host, args.port))

        # Wait forever for incoming htto requests
        server.serve_forever()

    except KeyboardInterrupt:
        logging.debug('^C received, shutting down the server')
        server.socket.close()
Exemple #33
0
def main():
    cli_opts = [
        cfg.StrOpt("config",
                   short="c",
                   default=None,
                   help="Override default values with a config file",
                   metavar="<config file>"),
        cfg.BoolOpt("storage",
                    default=False,
                    help="Running KloudBuster to test storage performance"),
        cfg.BoolOpt("multicast",
                    default=False,
                    help="Running KloudBuster to test multicast performance"),
        cfg.StrOpt("topology",
                   short="t",
                   default=None,
                   help="Topology file for compute hosts",
                   metavar="<topology file>"),
        cfg.StrOpt("tenants-list",
                   short="l",
                   default=None,
                   help="Existing tenant and user lists for reusing",
                   metavar="<tenants file>"),
        cfg.StrOpt(
            "rc",
            default=None,
            help="Tested cloud openrc credentials file (same as --tested-rc)",
            metavar="<rc file>"),
        cfg.StrOpt("tested-rc",
                   default=None,
                   help="Tested cloud openrc credentials file",
                   metavar="<rc file>"),
        cfg.StrOpt("testing-rc",
                   default=None,
                   help="Testing cloud openrc credentials file",
                   metavar="<rc file>"),
        cfg.StrOpt("passwd",
                   default=None,
                   secret=True,
                   help="Tested cloud password (same as --tested-pwd)",
                   metavar="<password>"),
        cfg.StrOpt("tested-passwd",
                   default=None,
                   secret=True,
                   help="Tested cloud password",
                   metavar="<password>"),
        cfg.StrOpt("testing-passwd",
                   default=None,
                   secret=True,
                   help="Testing cloud password",
                   metavar="<password>"),
        cfg.StrOpt("html",
                   default=None,
                   help='store results in HTML file',
                   metavar="<dest html file>"),
        cfg.StrOpt("label",
                   default=None,
                   help='label for the title in HTML file',
                   metavar="<title>"),
        cfg.BoolOpt(
            "headless",
            default=False,
            help=
            "do not show chart in the browser (default=False, only used if --html)"
        ),
        cfg.StrOpt("json",
                   default=None,
                   help='store results in JSON format file',
                   metavar="<dest json file>"),
        cfg.StrOpt("csv",
                   default=None,
                   help='store results in CSV format, multicast only.',
                   metavar="<csv file>"),
        cfg.BoolOpt("no-env", default=False, help="Do not read env variables"),
        cfg.BoolOpt("show-config",
                    default=False,
                    help="Show the default configuration"),
        cfg.StrOpt(
            "charts-from-json",
            default=None,
            help='create charts from json results and exit (requires --html)',
            metavar="<source json file>"),
    ]
    CONF.register_cli_opts(cli_opts)
    CONF(sys.argv[1:], project="kloudbuster", version=__version__)
    logging.setup("kloudbuster")

    if CONF.rc and not CONF.tested_rc:
        CONF.tested_rc = CONF.rc

    if CONF.passwd and not CONF.tested_passwd:
        CONF.tested_passwd = CONF.passwd

    if CONF.charts_from_json:
        if not CONF.html:
            LOG.error(
                'Destination html filename must be specified using --html.')
            sys.exit(1)
        with open(CONF.charts_from_json, 'r') as jfp:
            json_results = json.load(jfp)
        generate_charts(json_results, CONF.html, None)
        sys.exit(0)

    if CONF.show_config:
        print resource_string(__name__, "cfg.scale.yaml")
        sys.exit(0)

    if CONF.multicast and CONF.storage:
        LOG.error('--multicast and --storage can not both be chosen.')
        sys.exit(1)

    try:
        kb_config = KBConfig()
        kb_config.init_with_cli()
    except TypeError:
        LOG.exception('Error parsing the configuration file')
        sys.exit(1)

    # The KloudBuster class is just a wrapper class
    # levarages tenant and user class for resource creations and deletion
    kloudbuster = KloudBuster(kb_config.cred_tested,
                              kb_config.cred_testing,
                              kb_config.server_cfg,
                              kb_config.client_cfg,
                              kb_config.topo_cfg,
                              kb_config.tenants_list,
                              storage_mode=CONF.storage,
                              multicast_mode=CONF.multicast)
    if kloudbuster.check_and_upload_images():
        kloudbuster.run()

    if CONF.json:
        '''Save results in JSON format file.'''
        LOG.info('Saving results in json file: ' + CONF.json + "...")
        with open(CONF.json, 'w') as jfp:
            json.dump(kloudbuster.final_result, jfp, indent=4, sort_keys=True)

    if CONF.multicast and CONF.csv and 'kb_result' in kloudbuster.final_result:
        '''Save results in JSON format file.'''
        if len(kloudbuster.final_result['kb_result']) > 0:
            LOG.info('Saving results in csv file: ' + CONF.csv + "...")
            with open(CONF.csv, 'w') as jfp:
                jfp.write(
                    KBRunner_Multicast.json_to_csv(
                        kloudbuster.final_result['kb_result'][0]))

    if CONF.html:
        generate_charts(kloudbuster.final_result, CONF.html,
                        kb_config.config_scale)
Exemple #34
0
    def running_config_POST(self, arg):
        try:
            # Expectation:
            # {
            #  'credentials': {'tested-rc': '<STRING>', 'tested-passwd': '<STRING>',
            #                  'testing-rc': '<STRING>', 'testing-passwd': '<STRING>'},
            #  'kb_cfg': {<USER_OVERRIDED_CONFIGS>},
            #  'topo_cfg': {<TOPOLOGY_CONFIGS>}
            #  'tenants_cfg': {<TENANT_AND_USER_LISTS_FOR_REUSING>}
            # }
            user_config = json.loads(arg)

            # Parsing credentials from application input
            cred_config = user_config["credentials"]
            cred_tested = Credentials(openrc_contents=cred_config["tested-rc"], pwd=cred_config["tested-passwd"])
            if "testing-rc" in cred_config and cred_config["testing-rc"] != cred_config["tested-rc"]:
                cred_testing = Credentials(openrc_contents=cred_config["testing-rc"], pwd=cred_config["testing-passwd"])
            else:
                # Use the same openrc file for both cases
                cred_testing = cred_tested

            session_id = hashlib.md5(str(cred_config)).hexdigest()
            kb_config = KBConfig()
            if KBSessionManager.has(session_id):
                response.status = 403
                response.text = u"Session is already existed."
                return response.text

            # Parsing server and client configs from application input
            # Save the public key into a temporary file
            if "public_key" in user_config["kb_cfg"]:
                pubkey_filename = "/tmp/kb_public_key.pub"
                f = open(pubkey_filename, "w")
                f.write(user_config["kb_cfg"]["public_key_file"])
                f.close()
                kb_config.config_scale["public_key_file"] = pubkey_filename

            if "prompt_before_run" in user_config["kb_cfg"]:
                kb_config.config_scale["prompt_before_run"] = False

            if user_config["kb_cfg"]:
                alt_config = Configuration.from_string(user_config["kb_cfg"]).configure()
                kb_config.config_scale = kb_config.config_scale.merge(alt_config)

            # Parsing topology configs from application input
            if "topo_cfg" in user_config:
                topo_cfg = Configuration.from_string(user_config["topo_cfg"]).configure()
            else:
                topo_cfg = None

            # Parsing tenants configs from application input
            if "tenants_list" in user_config:
                tenants_list = Configuration.from_string(user_config["tenants_list"]).configure()
            else:
                tenants_list = None
        except Exception:
            response.status = 400
            response.text = u"Error while parsing configurations: \n%s" % (traceback.format_exc)
            return response.text

        logging.setup("kloudbuster", logfile="/tmp/kb_log_%s" % session_id)
        kb_config.init_with_rest_api(
            cred_tested=cred_tested, cred_testing=cred_testing, topo_cfg=topo_cfg, tenants_list=tenants_list
        )

        kb_session = KBSession()
        kb_session.kb_config = kb_config
        KBSessionManager.add(session_id, kb_session)

        return str(session_id)
def create_reservoir():
    connectivity = default_input('connectivity', 2)
    n_nodes = default_input('n_nodes', 100)
    input_connectivity = default_input('input_connectivity', 50)
    rbn_reservoir = rbn_node.RBNNode(connectivity=connectivity,
                                     output_dim=n_nodes,
                                     input_connectivity=input_connectivity)

    return rbn_reservoir


if __name__ == '__main__':
    # Set pickle working dir
    working_dir = get_working_dir()

    log.setup(logging.DEBUG, path=working_dir)
    log_git_info()

    # Create datasets
    use_existing_dataset = user_confirms('Use existing dataset in folder?')
    if use_existing_dataset:
        test_dataset, _ = glob_load(working_dir + '*-dataset')[0]
        dataset_description = '[dataset_from_folder]'
    else:
        datasets, dataset_description = create_dataset()
        training_dataset, test_dataset = datasets[:-1], datasets[-1]

    if not use_existing_dataset and not user_denies('Pickle test dataset?'):
        dump(test_dataset, dataset_description + '-dataset',
             folder=working_dir)
Exemple #36
0
from log import setup
from config import map as configmap

logroot = setup(configmap)


def get_logger(name):
    return logroot.get_logger(name)
Exemple #37
0
from bottle import route, post, delete, get, run, template, request, redirect, response, static_file
import requests
import json
from datetime import datetime, timedelta
import log
import uuid
import urllib
import config as CONFIG
logger = log.setup('root', 'sxsw.log')
from email.utils import parseaddr
from passlib.apps import custom_app_context as pwd_context
import os
import sqlite3
import secrets

#token = secrets.global_config.slackkey      # found at https://api.slack.com/web#authentication

#sc = SlackClient(token)
#print sc.api_call("api.test")


authdir = "/vagrant/auth/"
conn = sqlite3.connect('/vagrant/sxsw2.db')
logger.debug(CONFIG)
logger.debug(CONFIG.global_config.domain)
domain = CONFIG.global_config.domain
app_root = CONFIG.global_config.app_root
static_root = CONFIG.global_config.static_root
protocol = CONFIG.global_config.protocol
base_url = protocol + domain + "/" + app_root
homepage = protocol + domain + "/" + static_root + "/"
Exemple #38
0
# encoding: utf-8

import logging
import os

from flask import Flask

import log
from ping import bp as ping_bp

log.setup(os.environ.get("ENV", "dev"))
logger = logging.getLogger(__name__)
app = Flask(__name__)


@app.route("/")
def hello_world():
    return "hello, world"


# register buleprints
app.register_blueprint(ping_bp, url_prefix='/ping')

if __name__ == "__main__":
    app.run(host="127.0.0.1", port=8080, debug=True)
Exemple #39
0
#!/usr/bin/python2.7

import sys
import os, os.path
import argparse
import log
import paramiko
import socket
import time
import signal
from ssh_helpers import *

log.setup('noderoot', '/var/log')
logger = log.get(__name__)
default_mounts = ['var/tmp', 'run', 'dev', 'proc', 'sys', 'dev/pts']
do_run = True


def can_run():
    global do_run
    return do_run


def signal_handler(signal, frame):
    global do_run
    logger.info('Ctrl-C caught')

    # Ctrl-C pressed more then once!
    if do_run == False:
        sys.exit(1)
Exemple #40
0
        self.gui.messageFocus(message.number)

    def info(self, label, text):
        self.addMessage(Info(label, text))

    def setVmid(self, vmid):
        self.vmid = vmid
        self.gui.setVmid(self.vmid, self.core.regs.rip)

    def setVmexit(self, reason):
        self.gui.setVmexit(ExitReason.e[reason & 0xffff]['name'])


# Debug client main
try:
    if (len(sys.argv) < 2):
        debugClient = DebugClient(Config('config/debug_client.config'))
    else:
        debugClient = DebugClient(Config(sys.argv[1]))
    log.setup(debugClient.config['LOG_PREFIX'])
    log.log('------ STARTUP ------')
    ServerStateMinibuf.restoreHistories()
    debugClient.run()
except BadReply, msg:
    print("%s\n" % (msg))
    log.log(msg, "ERROR")
finally:
    ServerStateMinibuf.saveHistories()
    log.log('------ GOODBYE ------')
    log.logClose()
Exemple #41
0
import cfg
CONF = cfg.CONF
import log as logging
LOG = logging.getLogger(__name__)

import pdb;pdb.set_trace()

CONF('weibo.conf')
logging.setup('weibo')
LOG.info('fafadfafafffafa')
Exemple #42
0
# python default imports
import requests, json

# project related imports
import location
#setting up logging info
import log
log_handle=log.setup()

# this script uses an API powered by https://newsapi.org/
def get_key():
    log_handle.info("getting News API")
    return ''

# this is the default base url to help build the query to the news api    
def get_base_url():
    log_handle.info(" getting base url for news")
    return "https://newsapi.org/v2/top-headlines?country=us&"
def build_url_call(base_url=None,key=None):
    log_handle.info("Building url for news API")
    if not base_url:
        base_url = get_base_url()
    if not key:
        key = get_key()
    
    complete_url = base_url +  "apiKey=" + key
    return complete_url

#gets the news data
def get_data(complete_url=None):
    if not complete_url:
Exemple #43
0
from bottle import route, post, delete, get, run, template, request, redirect, response, static_file
import requests
import json
from datetime import datetime, timedelta
import log
import uuid
import urllib
logger = log.setup('root', 'sxsw_slack.log')
import secrets

port = 8082

import time
from slackclient import SlackClient

token = secrets.global_config.slackkey  # found at https://api.slack.com/web#authentication

sc = SlackClient(token)
print sc.api_call("api.test")
#print sc.api_call("channels.info", channel="1234567890")
#print sc.api_call(
##    "chat.postMessage", channel="#bottest", text="Hello from Python! :tada:",
#    username='******', icon_emoji=':robot_face:'
#)

#if sc.rtm_connect():
#    while True:
#        response =  sc.rtm_read()
#        if len(response):
#            for item in response:
#                print item
Exemple #44
0
    plt.savefig(raw_input('Name: '), bbox_inches='tight')
    #plt.show()

    #plt.matshow(test_input, cmap=plt.cm.gray)
    #plt.title('Reservoir input')

    #input_connections = np.zeros((1, rbn.n_nodes))
    #input_connections[0, rbn.input_connections] = 1

    #plt.matshow(input_connections, cmap=plt.cm.gray)
    #plt.title('Input connections')

    #plt.show()

if __name__ == '__main__':
    log.setup(logging.DEBUG)

    visualize_rbn_state()

    import sys
    sys.exit()

    from rbn import rbn_node

    rbn_reservoir_ordered = rbn_node.RBNNode(
            connectivity=2,
            should_perturb=False,
            output_dim=30,
            input_connectivity=15)
    #rbn_reservoir_critical = rbn_node.RBNNode(
    #        connectivity=2,
Exemple #45
0
#coding=utf-8
import httplib
import simplejson
import json
import uuid
import random
import socket
import struct
#import ast
import os
from time import sleep
import paramiko
import log

logger = log.setup()
REST_SERVER = '10.128.121.12'
REST_SERVER_PORT = 7070


class RestException(Exception):
    pass


class RestRequest(object):
    def __init__(self, host=None, port=None):
        self.host = REST_SERVER
        self.port = REST_SERVER_PORT
        self.callbackuri = 'http://%s:%s/debug/result' % (REST_SERVER,
                                                          REST_SERVER_PORT)
        self.headers = self._build_header()
Exemple #46
0
# -*- coding: latin-1 -*-
from bottle import route, post, delete, get, run, template, request, redirect, response, static_file
import requests
import json
from datetime import datetime, timedelta
import log
import uuid
import urllib
logger = log.setup('root', 'harrybot.log')
import secrets

port = 8083 

import time
from slackclient import SlackClient

token = secrets.global_config.slackkey      # found at https://api.slack.com/web#authentication

sc = SlackClient(token)
print sc.api_call("api.test")
#print sc.api_call("channels.info", channel="1234567890")
#print sc.api_call(
##    "chat.postMessage", channel="#bottest", text="Hello from Python! :tada:",
#    username='******', icon_emoji=':robot_face:'
#)

 
# -*- coding: latin-1 -*-
import re 

regs = [
Exemple #47
0
def main():

    '''
    Entry point for the script.

    System arguments:
        --clear     - clears all errors
    '''
    
    script_name = os.path.basename(sys.argv[0])
    folder_loc  = os.path.dirname(os.path.realpath(sys.argv[0]))
    folder_loc  = folder_loc.replace('scripts', '')


    #---------------------------------------------------------------------------
    # SET UP LOGGER
    #---------------------------------------------------------------------------
    logger = log.setup('root', '{folder}/logs/{script}.log'.format(
                                                    folder= folder_loc,
                                                    script= script_name[:-3]))
    logger.info('')
    logger.info('--- Script {script} Started ---'.format(script= script_name))  


    #---------------------------------------------------------------------------
    # CHECK SCRIPT IS NOT ALREADY RUNNING
    #---------------------------------------------------------------------------    
    if check_process.is_running(script_name):
        sys.exit()


    #---------------------------------------------------------------------------
    # CHECK FOR ERRORS AND NOTIFY
    #---------------------------------------------------------------------------    
    try:

        error_file = '{fl}/data/error.json'.format(fl= folder_loc)

        #Check and action passed arguments
        if len(sys.argv) > 1:
            if '--clear' in sys.argv:
                logger.info('User requested CLEAR ERROR command.')
                wd = AllErrors(error_file)
                wd.clear()
        else:
            # load configurationd data
            with open('{fl}/data/config.json'.format(fl= folder_loc), 'r') as f:
                config = json.load(f)

            # check connection to router and reset if down
            if wlan_down_check(config['network']['ROUTER_IP']):
                sys.exit()

            # action errors
            wd_rain_counter = ErrorCode(error_file, '0001')

            if not wd_rain_counter.check_counter():
                wd_rain_counter.set()

            wd_rain_counter.notify_via_maker_ch(config['maker_channel']['MAKER_CH_ADDR'],
                                                config['maker_channel']['MAKER_CH_KEY'])

    except Exception, e:
        logger.error('Script error ({error_v}). Exiting...'.format(
            error_v=e), exc_info=True)
        print('Script error ({error_v}). Exiting...'.format(error_v=e))
        sys.exit()
Exemple #48
0
from lxml import html
import requests
import json
import os
from datetime import datetime
import time
event_baseurl = "http://schedule.sxsw.com/2016/events/"
import log
logger = log.setup('root','scraper.log')
import subprocess

eventids = {}


def clean_text(t):
    return t.replace("\n","")



def get_presenters(event):
    presenters = []
    for presenter in event.xpath("//div[@class='presenter-area']/div/hgroup"):
        t = {}
        t["name"] = get_first(presenter.xpath("h4[@class='pres_name']/text()"))
        t["title"] = get_first(presenter.xpath("*[@class='pres_title']/text()"))
        t["company"] = get_first(presenter.xpath("*[@class='pres_company']/text()"))
        presenters.append(t)
    return presenters

def get_first(ar):
    return (ar[0] if len(ar)>0 else "").encode('utf-8')
Exemple #49
0
        m.vmid = 0
        frame = self.network.createFrame(m.pack())
        self.network.socket.send(frame)
        # Waiting for data
        while True:
            self.network.receive()
            message = self.debugClient.message
            # Test type
            if not isinstance(message, MessageMemoryWrite):
                raise BadReply(message.messageType)
            length = message.length
            log.log("Receiving 0x%x bytes" % (length))
            # Send Commit message
            m = MessageCommit()
            m.vmid = 0
            m.ok = 1
            frame = self.network.createFrame(m.pack() + pack('B', m.ok))
            self.network.socket.send(frame)
            if length == 0:
                log.log("Uploading ended")
                break


# Debug client main
log.setup("test_")
log.log('------ STARTUP ------')
test = Test(Config('config/debug_client.config'))
test.createComponents()
test.run()
log.log('------ GOODBYE ------')
Exemple #50
0
import logging
import log

log.setup()
Exemple #51
0
from bottle import route, post, delete, get, run, template, request, redirect, response, static_file
import requests
import json

import log

logger = log.setup("root", "sxsw.log")

import sqlite3

conn = sqlite3.connect("/vagrant/sxsw.db")

port = 8081

logger.debug(conn)


def create_tables():
    try:
        c.execute(
            "CREATE TABLE sxsw_user_events (userid text, eventid text,CONSTRAINT name_unique UNIQUE (userid,eventid))"
        )
        conn.commit()
    except e:
        logger.exception(e)


def dict_factory(cursor, row):
    d = {}
    for idx, col in enumerate(cursor.description):
        d[col[0]] = row[idx]
Exemple #52
0
def main():
    global fluent_logger
    run_summary_required = False
    try:
        log.setup()
        # load default config file
        config, default_cfg = load_default_config()
        # create factory for platform specific classes
        try:
            factory_module = importlib.import_module(config['factory_module'])
            factory = getattr(factory_module, config['factory_class'])()
        except AttributeError:
            raise Exception(
                "Requested factory module '{m}' or class '{c}' was not found.".
                format(m=config['factory_module'], c=config['factory_class']))
        # create config plugin for this platform
        config_plugin = factory.get_config_plugin_class()(config)
        config = config_plugin.get_config()

        opts, unknown_opts = _parse_opts_from_cli()
        log.set_level(debug=opts.debug)

        if opts.version:
            print pbr.version.VersionInfo('nfvbench').version_string_with_vcs()
            sys.exit(0)

        if opts.summary:
            with open(opts.summary) as json_data:
                result = json.load(json_data)
                if opts.user_label:
                    result['config']['user_label'] = opts.user_label
                print NFVBenchSummarizer(result, fluent_logger)
            sys.exit(0)

        # show default config in text/yaml format
        if opts.show_default_config:
            print default_cfg
            sys.exit(0)

        config.name = ''
        if opts.config:
            # do not check extra_specs in flavor as it can contain any key/value pairs
            whitelist_keys = ['extra_specs']
            # override default config options with start config at path parsed from CLI
            # check if it is an inline yaml/json config or a file name
            if os.path.isfile(opts.config):
                LOG.info('Loading configuration file: %s', opts.config)
                config = config_load(opts.config, config, whitelist_keys)
                config.name = os.path.basename(opts.config)
            else:
                LOG.info('Loading configuration string: %s', opts.config)
                config = config_loads(opts.config, config, whitelist_keys)

        # setup the fluent logger as soon as possible right after the config plugin is called,
        # if there is any logging or result tag is set then initialize the fluent logger
        for fluentd in config.fluentd:
            if fluentd.logging_tag or fluentd.result_tag:
                fluent_logger = FluentLogHandler(config.fluentd)
                LOG.addHandler(fluent_logger)
                break

        # traffic profile override options
        override_custom_traffic(config, opts.frame_sizes, opts.unidir)

        # copy over cli options that are used in config
        config.generator_profile = opts.generator_profile
        if opts.sriov:
            config.sriov = True
        if opts.log_file:
            config.log_file = opts.log_file
        if opts.service_chain:
            config.service_chain = opts.service_chain
        if opts.service_chain_count:
            config.service_chain_count = opts.service_chain_count
        if opts.no_vswitch_access:
            config.no_vswitch_access = opts.no_vswitch_access
        if opts.hypervisor:
            # can be any of 'comp1', 'nova:', 'nova:comp1'
            config.compute_nodes = opts.hypervisor
        if opts.vxlan:
            config.vxlan = True
        if opts.restart:
            config.restart = True
        # port to port loopback (direct or through switch)
        if opts.l2_loopback:
            config.l2_loopback = True
            if config.service_chain != ChainType.EXT:
                LOG.info('Changing service chain type to EXT')
                config.service_chain = ChainType.EXT
            if not config.no_arp:
                LOG.info('Disabling ARP')
                config.no_arp = True
            config.vlans = [int(opts.l2_loopback), int(opts.l2_loopback)]
            LOG.info('Running L2 loopback: using EXT chain/no ARP')

        if opts.use_sriov_middle_net:
            if (not config.sriov) or (config.service_chain != ChainType.PVVP):
                raise Exception(
                    "--use-sriov-middle-net is only valid for PVVP with SRIOV")
            config.use_sriov_middle_net = True

        if config.sriov and config.service_chain != ChainType.EXT:
            # if sriov is requested (does not apply to ext chains)
            # make sure the physnet names are specified
            check_physnet("left", config.internal_networks.left)
            check_physnet("right", config.internal_networks.right)
            if config.service_chain == ChainType.PVVP and config.use_sriov_middle_net:
                check_physnet("middle", config.internal_networks.middle)

        # show running config in json format
        if opts.show_config:
            print json.dumps(config, sort_keys=True, indent=4)
            sys.exit(0)

        # update the config in the config plugin as it might have changed
        # in a copy of the dict (config plugin still holds the original dict)
        config_plugin.set_config(config)

        if opts.status or opts.cleanup or opts.force_cleanup:
            status_cleanup(config, opts.cleanup, opts.force_cleanup)

        # add file log if requested
        if config.log_file:
            log.add_file_logger(config.log_file)

        openstack_spec = config_plugin.get_openstack_spec() if config.openrc_file \
            else None

        nfvbench_instance = NFVBench(config, openstack_spec, config_plugin,
                                     factory)

        if opts.server:
            server = WebServer(nfvbench_instance, fluent_logger)
            try:
                port = int(opts.port)
            except ValueError:
                server.run(host=opts.host)
            else:
                server.run(host=opts.host, port=port)
            # server.run() should never return
        else:
            with utils.RunLock():
                run_summary_required = True
                if unknown_opts:
                    err_msg = 'Unknown options: ' + ' '.join(unknown_opts)
                    LOG.error(err_msg)
                    raise Exception(err_msg)

                # remove unfilled values
                opts = {
                    k: v
                    for k, v in vars(opts).iteritems() if v is not None
                }
                # get CLI args
                params = ' '.join(str(e) for e in sys.argv[1:])
                result = nfvbench_instance.run(opts, params)
                if 'error_message' in result:
                    raise Exception(result['error_message'])

                if 'result' in result and result['status']:
                    nfvbench_instance.save(result['result'])
                    nfvbench_instance.prepare_summary(result['result'])
    except Exception as exc:
        run_summary_required = True
        LOG.error({
            'status': NFVBench.STATUS_ERROR,
            'error_message': traceback.format_exc()
        })
        print str(exc)
    finally:
        if fluent_logger:
            # only send a summary record if there was an actual nfvbench run or
            # if an error/exception was logged.
            fluent_logger.send_run_summary(run_summary_required)
Exemple #53
0
 config = {
     'logging': {
         'debug': 'SCREEN',
         'info': 'SCREEN',
         'access': 'SCREEN',
         'warn': 'SCREEN,warn.log',
         'error': 'SCREEN,error.log',
         'enabled.default': '1',
     },
     'loggers': {
         'ACCESS': 1,
         'HTTPDaemon': '2',
     }
 }
 
 l = log.setup(config)
 
 http = l.get_logger("HTTPDaemon")
 
 
 http.debug('testing')
 http.access('HTTP', 'yoho')
 http.debug('testing')
 http.warn('what')
 http.debug('testing')
 http.error('err RAWR')
 http.debug('testing',stack=True)
 http.access('FTP', 'yoho')
 try:
     raise Exception()
 except:
Exemple #54
0
import requests
import json
import os
from os import path

import log

logger = log.setup('root', 'importer.log')
import os.path

elastic_url = "http://127.0.0.1:9200"
store_url = "http://127.0.0.1:8081"

headers = {'Content-type': 'application/json', "authtoken": "gbme"}


class DecimalEncoder(json.JSONEncoder):
    #    def totimestamp(self, dt, epoch=datetime(1970,1,1)):
    #        td = dt - epoch
    #        # return td.total_seconds()
    #        return int(((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 1e3)+(CONFIG.global_config.timezone_offset*3600000))


    def default(self, o):
        if isinstance(o, decimal.Decimal):
            return float(o)
        if isintance(o, bool):
            if o: return 1
            return 0
        if isinstance(o, datetime):
            return self.totimestamp(o)
Exemple #55
0
 def __init__(self, path="config/config.yaml"):
     self.path = path
     log.setup()
     self.logger = log.logging.getLogger(__name__)
Exemple #56
0
from bottle import route, post, delete, get, run, template, request, redirect, response, static_file
import requests
import json
from datetime import datetime, timedelta
import log
import uuid
import urllib
import config as CONFIG

logger = log.setup('root', 'sxsw.log')
from email.utils import parseaddr
from passlib.apps import custom_app_context as pwd_context
import os
import sqlite3
import secrets

#token = secrets.global_config.slackkey      # found at https://api.slack.com/web#authentication

#sc = SlackClient(token)
#print sc.api_call("api.test")

authdir = "/vagrant/auth/"
conn = sqlite3.connect('/vagrant/sxsw2.db')
logger.debug(CONFIG)
logger.debug(CONFIG.global_config.domain)
domain = CONFIG.global_config.domain
app_root = CONFIG.global_config.app_root
static_root = CONFIG.global_config.static_root
protocol = CONFIG.global_config.protocol
base_url = protocol + domain + "/" + app_root
homepage = protocol + domain + "/" + static_root + "/"
Exemple #57
0
argParser.add_argument("-d", "--dvm", help = "The path to DVM.")
argParser.add_argument("-o", "--output", help = "The location of the output file")
argParser.add_argument("-b", "--backEnd", default = 'DVM', help = "The backEnd to use.")
argParser.add_argument("-f", "--frontEnd", type = str, help = "The frontEnd to use.")
argParser.add_argument("-ll", "--logLevel", type = int, default = 30, help = "Specify the log level")

argParser.add_argument("--dot", action = "store_true", help = "Generate a dot graph of the program")
argParser.add_argument("--dry_run", action = "store_true", help = "Don't compile the program but abort after parsing the input file.")

args = argParser.parse_args()

# ------------- #
# Program Setup #
# ------------- #

log.setup(args.logLevel)

fileName, fileExtension = os.path.splitext(args.path)

frontEnd.setUp(fileExtension, args.frontEnd)
frontEnd.fromFile(args.path)

if args.dot:
	IGR.dot(skipCompound = False)

if args.dry_run:
	sys.exit(0)

backEnd.setUp(fileName, args.backEnd, args.output)
backEnd.toFile()