Exemplo n.º 1
0
 def start_cli(self, args=None, histfile=config.HISTORY):
     self.args = args or None
     self.histfile = histfile
     #log.to_file(level=5, filename='logs/debug_log.txt')
     self.log, self.log_hdl_term = log.to_term(level=5)
     self.log = logging.getLogger(self.name)
     opt = optparse.OptionParser(usage='%prog [options] [<arg>] [...]')
     opt.add_option('-v', '--verbosity',
         dest='verb', metavar="LEVEL", default=4,
         help='From 1 (quiet) to 5 (debug). Default: 4')
     self.opts, self.args = opt.parse_args()
     self.log_level = int(self.opts.verb)
     log.set_level(self.log_level)
     self.start(args=self.args)
Exemplo n.º 2
0
def test_logger():
    log1 = Logger("test1")
    assert log1.level == LOG_LEVELS['info']
    log1.info("test1: printing on info")
    log1.debug("test1: should not print")
    set_level(log1.logger, "debug")
    assert log1.level == LOG_LEVELS['debug']
    log1.debug("test1: should print on debug")

    Logger.LOG_LEVEL = "warning"
    log2 = Logger("test2")
    assert id(log1) == id(log2)
    assert log2.level == LOG_LEVELS['warning']
    log2.warning("test2: should print on warning")
    assert log1.level == LOG_LEVELS['warning']
    log1.info("test1: does not print anymore!")

    Logger.LOG_LEVEL = "asdasd"
    log3 = Logger("test3")
    assert log3.level == LOG_LEVELS['info']
    assert id(log1) == id(log2) == id(log3)
    log3.info("log3: should print on info")
Exemplo n.º 3
0
Arquivo: main.py Projeto: jj4jj/sdv
def main():
    log.init_logger(config.LOG_FILE)
    log.set_level(config.LOG_LEVEL)

    sub = SubscribeManager(config.REDIS_MQ_HOST,config.REDIS_MQ_PORT,config.REDIS_MQ_DB)
    dbc = DBChartAgent(config.DB_HOST, config.DB_PORT, config.DB_USER, config.DB_PASSWD, config.DB_NAME)
    wss = WebSocketServer(config.WSS_URI, DvClinetDispatcher(sub, dbc), host=config.WSS_HOST, port=config.WSS_PORT)

    def test_push_online():
        dbc.createKey("online")
        pub = Publisher(config.REDIS_MQ_HOST, config.REDIS_MQ_PORT, config.REDIS_MQ_DB)
        import random
        while True:
            add = random.randint(-100,100);
            dbc.incKey("online","2003", add)
            pub.publish('online', {'server':'2003', "op":"inc", "value":add, "time": datasets.current_ms()})
            time.sleep(1)

    tester = Thread(target=test_push_online)
    tester.setDaemon(True)
    wss.setDaemon(True)
    wss.start()
    tester.start()

    exit_main = False
    def stop():
        log.info("stop the io thread ...")
        exit_main=True
        #wss.stop()
        #tester.stop()
    signal.signal(signal.SIGQUIT, stop)

    while not exit_main:
        sub.poll()

    wss.join()
    tester.join()
Exemplo n.º 4
0
def main():
    global fluent_logger
    run_summary_required = False
    try:
        log.setup()
        # load default config file
        config, default_cfg = load_default_config()
        # create factory for platform specific classes
        try:
            factory_module = importlib.import_module(config['factory_module'])
            factory = getattr(factory_module, config['factory_class'])()
        except AttributeError:
            raise Exception(
                "Requested factory module '{m}' or class '{c}' was not found.".
                format(m=config['factory_module'], c=config['factory_class']))
        # create config plugin for this platform
        config_plugin = factory.get_config_plugin_class()(config)
        config = config_plugin.get_config()

        opts, unknown_opts = _parse_opts_from_cli()
        log.set_level(debug=opts.debug)

        if opts.version:
            print pbr.version.VersionInfo('nfvbench').version_string_with_vcs()
            sys.exit(0)

        if opts.summary:
            with open(opts.summary) as json_data:
                result = json.load(json_data)
                if opts.user_label:
                    result['config']['user_label'] = opts.user_label
                print NFVBenchSummarizer(result, fluent_logger)
            sys.exit(0)

        # show default config in text/yaml format
        if opts.show_default_config:
            print default_cfg
            sys.exit(0)

        config.name = ''
        if opts.config:
            # do not check extra_specs in flavor as it can contain any key/value pairs
            whitelist_keys = ['extra_specs']
            # override default config options with start config at path parsed from CLI
            # check if it is an inline yaml/json config or a file name
            if os.path.isfile(opts.config):
                LOG.info('Loading configuration file: %s', opts.config)
                config = config_load(opts.config, config, whitelist_keys)
                config.name = os.path.basename(opts.config)
            else:
                LOG.info('Loading configuration string: %s', opts.config)
                config = config_loads(opts.config, config, whitelist_keys)

        # setup the fluent logger as soon as possible right after the config plugin is called,
        # if there is any logging or result tag is set then initialize the fluent logger
        for fluentd in config.fluentd:
            if fluentd.logging_tag or fluentd.result_tag:
                fluent_logger = FluentLogHandler(config.fluentd)
                LOG.addHandler(fluent_logger)
                break

        # traffic profile override options
        override_custom_traffic(config, opts.frame_sizes, opts.unidir)

        # copy over cli options that are used in config
        config.generator_profile = opts.generator_profile
        if opts.sriov:
            config.sriov = True
        if opts.log_file:
            config.log_file = opts.log_file
        if opts.service_chain:
            config.service_chain = opts.service_chain
        if opts.service_chain_count:
            config.service_chain_count = opts.service_chain_count
        if opts.no_vswitch_access:
            config.no_vswitch_access = opts.no_vswitch_access
        if opts.hypervisor:
            # can be any of 'comp1', 'nova:', 'nova:comp1'
            config.compute_nodes = opts.hypervisor
        if opts.vxlan:
            config.vxlan = True
        if opts.restart:
            config.restart = True
        # port to port loopback (direct or through switch)
        if opts.l2_loopback:
            config.l2_loopback = True
            if config.service_chain != ChainType.EXT:
                LOG.info('Changing service chain type to EXT')
                config.service_chain = ChainType.EXT
            if not config.no_arp:
                LOG.info('Disabling ARP')
                config.no_arp = True
            config.vlans = [int(opts.l2_loopback), int(opts.l2_loopback)]
            LOG.info('Running L2 loopback: using EXT chain/no ARP')

        if opts.use_sriov_middle_net:
            if (not config.sriov) or (config.service_chain != ChainType.PVVP):
                raise Exception(
                    "--use-sriov-middle-net is only valid for PVVP with SRIOV")
            config.use_sriov_middle_net = True

        if config.sriov and config.service_chain != ChainType.EXT:
            # if sriov is requested (does not apply to ext chains)
            # make sure the physnet names are specified
            check_physnet("left", config.internal_networks.left)
            check_physnet("right", config.internal_networks.right)
            if config.service_chain == ChainType.PVVP and config.use_sriov_middle_net:
                check_physnet("middle", config.internal_networks.middle)

        # show running config in json format
        if opts.show_config:
            print json.dumps(config, sort_keys=True, indent=4)
            sys.exit(0)

        # update the config in the config plugin as it might have changed
        # in a copy of the dict (config plugin still holds the original dict)
        config_plugin.set_config(config)

        if opts.status or opts.cleanup or opts.force_cleanup:
            status_cleanup(config, opts.cleanup, opts.force_cleanup)

        # add file log if requested
        if config.log_file:
            log.add_file_logger(config.log_file)

        openstack_spec = config_plugin.get_openstack_spec() if config.openrc_file \
            else None

        nfvbench_instance = NFVBench(config, openstack_spec, config_plugin,
                                     factory)

        if opts.server:
            server = WebServer(nfvbench_instance, fluent_logger)
            try:
                port = int(opts.port)
            except ValueError:
                server.run(host=opts.host)
            else:
                server.run(host=opts.host, port=port)
            # server.run() should never return
        else:
            with utils.RunLock():
                run_summary_required = True
                if unknown_opts:
                    err_msg = 'Unknown options: ' + ' '.join(unknown_opts)
                    LOG.error(err_msg)
                    raise Exception(err_msg)

                # remove unfilled values
                opts = {
                    k: v
                    for k, v in vars(opts).iteritems() if v is not None
                }
                # get CLI args
                params = ' '.join(str(e) for e in sys.argv[1:])
                result = nfvbench_instance.run(opts, params)
                if 'error_message' in result:
                    raise Exception(result['error_message'])

                if 'result' in result and result['status']:
                    nfvbench_instance.save(result['result'])
                    nfvbench_instance.prepare_summary(result['result'])
    except Exception as exc:
        run_summary_required = True
        LOG.error({
            'status': NFVBench.STATUS_ERROR,
            'error_message': traceback.format_exc()
        })
        print str(exc)
    finally:
        if fluent_logger:
            # only send a summary record if there was an actual nfvbench run or
            # if an error/exception was logged.
            fluent_logger.send_run_summary(run_summary_required)
Exemplo n.º 5
0
 def cli_setup(self):
     self.opt = optparse.OptionParser(usage='%prog [options] [<arg>] [...]')
     self.cli_options_common()
     self.opts, self.args = self.opt.parse_args()
     log.set_level(int(self.opts.verb), self.log_hdl_term)
Exemplo n.º 6
0
#
# author: Cosmin Basca
#
# Copyright 2010 University of Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#        http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = 'basca'

from raptorutil import *
from util import *
from rdfparse import *
from gcityhash import *
from tools import *
import log

log.set_level('critical', name='py4j')
log.set_level('critical', name='sh')
Exemplo n.º 7
0
async def log(client, message, **kwargs):
    set_level(kwargs.get('level'), kwargs.get('what', None))
    await client.send_message(message.channel, 'Done')
Exemplo n.º 8
0
import cgitb; cgitb.enable()
import log
log.set_level('SILENT')
log.set_templog("web.log")

import json
from webphp import php_get as get
from lib.files import safe_filename
from credentials import GIT_THEMING_PATH
import os.path
import os
import tempfile



def handle_query():
    """
    Handle any and all web submits.
    """
    action = get("action")
    log.debug("responding to: %s", action)

    ## queries for each object type available
    if action == "protostory":
        data = (get("storyentry") or "").strip()
        rows = data.split("\n")
        sid = None
        path = None
        results = []
        retval = {}
        overwrite = False
Exemplo n.º 9
0
#import cgitb; cgitb.enable()
import log
log.set_level('SILENT')
log.set_templog("web.log")

import webobject
import json
from webphp import php_get as get
from webquerylib import (
    cached_special_query, )

OBJECT_TYPES = {
    "story": webobject.Story,
    "theme": webobject.Theme,
    "storytheme": webobject.StoryTheme,
    "event": webobject.TSTPEvent,
    "proposedevents": webobject.TSTPEvent,
}


def handle_submit(obj_type):
    """
    Do the db-update part of the query.
    """
    if issubclass(obj_type, webobject.TSTPConnection):
        update = get("data")
        attrs = [x[0] for x in update]
        vals = [x[1] for x in update]
        cat1 = get("type1")
        name1 = get("filter1")
        cat2 = get("type2")
Exemplo n.º 10
0
def main():
    args = sys.argv
    fromidx = args.index("util.db") + 1
    args = args[fromidx:]
    nargs = len(args)

    if args[0] == "clear":
        if len(args) < 2 or args[1] != "nowarn":
            if raw_input("Clear Database? (yes/no) ") != "yes":
                return
        dbdefine.create_tables(True, subset="web_.*$")

    elif args[0] == "import":
        os.chdir(args[1])
        root = subprocess.check_output(["git", "rev-parse",
                                        "--show-toplevel"]).strip()
        timestamp = datetime.datetime.utcfromtimestamp(
            int(
                subprocess.check_output([
                    "git", "log", "-1", "--format=%at"
                ]).strip())).strftime("%Y-%m-%d %H:%M:%S (UTC)")
        themes = defaultdict(list)
        stories = defaultdict(list)
        storythemes = defaultdict(list)

        for path in lib.files.walk(".", ".*\\.(st|th)\\.txt$"):
            log.info("READING: %s", path)

            if path.endswith(".th.txt"):
                objs = list(lib.dataparse.read_themes_from_txt(path, False))
                themes[path].extend(objs)
                log.info(".. found %d well defined themes", len(objs))

            if path.endswith(".st.txt"):
                objs1 = list(lib.dataparse.read_stories_from_txt(path, False))
                stories[path].extend(objs1)
                if objs1:
                    log.info(".. found %d well defined stories", len(objs1))
                objs2 = list(
                    lib.dataparse.read_storythemes_from_txt(path, False))
                storythemes[path].extend(objs2)
                if objs2:
                    log.info(".. found %d themes in stories", len(objs2))
                if not objs1 and not objs2:
                    log.info(".. found nothing to take")

        # add/update meta info
        for thingdict in (themes, stories):
            for path in thingdict:
                if path.startswith("."):
                    relpath = path
                else:
                    relpath = lib.files.abspath2relpath(root, path)
                relpath.replace("\\", "/")
                tmeta = {
                    "source": relpath,
                    "timestamp": timestamp,
                }
                for obj in thingdict[path]:
                    try:
                        meta = json.loads(obj.meta)
                        meta.update(tmeta)
                    except (AttributeError, ValueError):
                        meta = tmeta
                    obj.meta = json.dumps(meta)

        rthemes = defaultdict(list)
        rstories = defaultdict(list)
        rstorythemes = defaultdict(list)
        rmatch = [(themes, rthemes), (stories, rstories),
                  (storythemes, rstorythemes)]
        rorder = [("theme", rthemes), ("story", rstories),
                  ("storytheme", rstorythemes)]
        events = []
        undefined = []

        # prepare reverse lookup dictionaries
        for dd1, dd2 in rmatch:
            for path, ll in dd1.iteritems():
                for obj in ll:
                    if hasattr(obj, "name"):
                        dd2[obj.name].append((path, obj))
                    else:
                        dd2[(obj.name1, obj.name2)].append((path, obj))

        # note any parent-less themes
        for key, ll in rthemes.iteritems():
            for path, theme in ll:
                parents = theme.list_parents()
                if not parents:
                    log.info('Top Level Theme "%s" in %s', key, path)
                for parent in parents:
                    if parent not in rthemes:
                        log.info('Undefined parent theme "%s" for "%s" in %s',
                                 parent, key, path)
                        undefined.append(("parent theme", parent, key, path))

        # drop any themes with only undefined parents
        changed = True
        while changed:
            changed = False
            for key in rthemes.keys():
                ll = rthemes[key]
                for path, theme in ll:
                    parents = theme.list_parents()
                    if parents and all(p not in rthemes for p in parents):
                        log.info(
                            'Dropping theme with undefined parents: "%s": "%s"',
                            key, parents)
                        changed = True
                        del rthemes[key]
                        break

        # drop story-themes for which either story or theme is not defined
        for theme in sorted(set(x[1] for x in rstorythemes.keys())):
            if theme not in rthemes:
                log.warn('Found undefined theme: "%s"...', theme)
        for story in sorted(set(x[0] for x in rstorythemes.keys())):
            if story not in rstories:
                log.warn('Found undefined story: "%s"...', story)
        for story, theme in rstorythemes.keys():
            drop = False
            if theme not in rthemes:
                drop = True
            if story not in rstories:
                drop = True
            if drop:
                spec = [path for path, _ in rstorythemes[(story, theme)]]
                log.warn('Skipping "%s" found in: %s', (story, theme), spec)
                del rstorythemes[(story, theme)]

        # check for multiple definitions
        for thing, dd in rorder:
            for key, ll in dd.iteritems():
                if len(ll) > 1:
                    spec = sorted(set([path for path, _ in ll]))
                    log.warn('Multiple definitions of %s "%s": %s.', thing,
                             key, spec)

        # create and commit events
        for thing, dd in rorder:
            for key, ll in dd.iteritems():
                obj = ll[0][1]
                events.extend(obj.make_edit_events(None))

        log.info('Committing %d edit events...', len(events))
        log.set_level('WARN')
        TSTPEvent.commit_many(events)
        log.set_level('INFO')
        log.info('done!')
Exemplo n.º 11
0
def _init():
	global __step_depth_max

	log.set_level( log.CRITICAL )
	__step_depth_max = _options[ 'recursive_depth' ]
Exemplo n.º 12
0

@bottle.get('/api/v1/towns')
def index_town():
    data = []
    ids = bottle.request.query.getlist('ids[]')
    for idx in ids:
        data.append(h3.get_town_info(PROCESS, int(idx)))
    return render_json(data)


@bottle.get('/api/v1/towns/:num')
def show_town(num):
    return render_json(h3.get_town_info(PROCESS, int(num)))


@bottle.put('/api/v1/towns/:num')
def update_town(num):
    num = int(num)
    data = bottle.request.json['data']
    for item in data:
        h3.set_town_info(PROCESS, num, item['offset'], item['value'],
                         item['size'])
    return render_json(h3.get_town_info(PROCESS, num))


if __name__ == '__main__':
    print('start with debug:', DEBUG)
    log.set_level(DEBUG)
    bottle.run(host='0.0.0.0', port='9090', debug=True, reloader=DEBUG)
Exemplo n.º 13
0
Arquivo: db.py Projeto: odinlake/tstp
def main():
    args = sys.argv
    fromidx = args.index("util.db") + 1
    args = args[fromidx:]
    nargs = len(args)

    if args[0] == "clear":
        if len(args) < 2 or args[1] != "nowarn":
            if raw_input("Clear Database? (yes/no) ") != "yes":
                return
        dbdefine.create_tables(True)

    elif args[0] == "import":
        themes = defaultdict(list)
        stories = defaultdict(list)
        storythemes = defaultdict(list)

        for path in lib.files.walk(args[1], ".*\.(st|th)\.txt$", 0):
            log.info("READING: %s", path)

            if path.endswith(".th.txt"):
                objs = list(lib.dataparse.read_themes_from_txt(path, False))
                themes[path].extend(objs)
                log.info(".. found %d well defined themes", len(objs))

            if path.endswith(".st.txt"):
                objs1 = list(lib.dataparse.read_stories_from_txt(path, False))
                stories[path].extend(objs1)
                if objs1:
                    log.info(".. found %d well defined stories", len(objs1))
                objs2 = list(lib.dataparse.read_storythemes_from_txt(path, False))
                storythemes[path].extend(objs2)
                if objs2:
                    log.info(".. found %d themes in stories", len(objs2))
                if not objs1 and not objs2:
                    log.info(".. found nothing to take")

        rthemes = defaultdict(list)
        rstories = defaultdict(list)
        rstorythemes = defaultdict(list)
        rmatch = [ (themes, rthemes), (stories, rstories), (storythemes, rstorythemes) ]
        rorder = [ ("theme", rthemes), ("story", rstories), ("storytheme", rstorythemes) ]
        events = []

        undefined = []

        for dd1, dd2 in rmatch:
            for path, ll in dd1.iteritems():
                for obj in ll:
                    if hasattr(obj, "name"):
                        dd2[obj.name].append((path, obj))
                    else:
                        dd2[(obj.name1, obj.name2)].append((path, obj))

        # note any parent-less themes
        for key, ll in rthemes.iteritems():
            for path, theme in ll:
                parents = filter(None, [ x.strip() for x in theme.parents.split(",") ])
                if not parents:
                    log.info('Top Level Theme "%s" in %s', key, path)
                elif key == "fictional gadget":
                    print theme, "::", theme.parents
                for parent in parents:
                    if parent not in rthemes:
                        log.info('Undefined parent theme "%s" for "%s" in %s', parent, key, path)
                        undefined.append(("parent theme", parent, key, path))

        # drop any themes with only undefined parents
        changed = True
        while (changed):
            changed = False
            for key in rthemes.keys():
                ll = rthemes[key]
                for path, theme in ll:
                    parents = filter(None, [ x.strip() for x in theme.parents.split(",") ])
                    if parents and all(p not in rthemes for p in parents):
                        log.info('Dropping theme with undefined parents: "%s": "%s"', key, parents)
                        changed = True
                        del rthemes[key]
                        break;

        # drop story-themes for which either story or theme is not defined
        for theme in sorted(set(x[1] for x in rstorythemes.keys())):
            if theme not in rthemes:
                log.warn('Found undefined theme: "%s"...', theme)
        for story in sorted(set(x[0] for x in rstorythemes.keys())):
            if story not in rstories:
                log.warn('Found undefined story: "%s"...', story)
        for story, theme in rstorythemes.keys():
            drop = False
            if theme not in rthemes:
                drop = True
            if story not in rstories:
                drop = True
            if drop:
                spec = [ path for path, _ in rstorythemes[(story, theme)] ]
                log.warn('Skipping "%s" found in: %s', (story, theme), spec)
                del rstorythemes[(story, theme)]

        # check for multiple definitions
        for thing, dd in rorder:
            for key, ll in dd.iteritems():
                if len(ll) > 1:
                    spec = [ path for path, _ in ll ]
                    log.warn('Multiple definitions of %s "%s": %s.', thing, key, spec)

        # create and commit events
        for thing, dd in rorder:
            for key, ll in dd.iteritems():
                obj = ll[0][1]
                events.extend(obj.make_edit_events(None))

        log.info('Committing %d edit events...', len(events))
        log.set_level('WARN')
        TSTPEvent.commit_many(events)
        log.set_level('INFO')
        log.info('done!')
Exemplo n.º 14
0
def cli(verbose):
    if verbose:
        set_level(logging.INFO)
Exemplo n.º 15
0
def _init():
    global __step_depth_max

    log.set_level(log.CRITICAL)
    __step_depth_max = _options['recursive_depth']