Example #1
0
    def __init__(self, argv=None):
        """
        Parse command line options, read config and initialize members.

        :param list argv: command line parameters
        """
        # parse program options (retrieve log level and config file name):
        args = docopt(self.usage, version=self.name + ' ' + self.version)
        default_opts = self.option_defaults
        program_opts = self.program_options(args)
        # initialize logging configuration:
        log_level = program_opts.get('log_level', default_opts['log_level'])
        if log_level <= logging.DEBUG:
            fmt = _('%(levelname)s [%(asctime)s] %(name)s: %(message)s')
        else:
            fmt = _('%(message)s')
        logging.basicConfig(level=log_level, format=fmt)
        # parse config options
        config_file = OptionalValue('--config')(args)
        config = udiskie.config.Config.from_file(config_file)
        options = {}
        options.update(default_opts)
        options.update(config.program_options)
        options.update(program_opts)
        # initialize instance variables
        self.config = config
        self.options = options
        self._init(config, options)
Example #2
0
def main():
    
    #print( 'Number of arguments: {0}'.format(len(sys.argv)) )
    #print( 'Argument List: {0}'.format(str(sys.argv)) )
    
    start = 1
    if len(sys.argv) > 1:
        start = int(sys.argv[1])
    
    end = start + 1
    if len(sys.argv) > 2:
        end = int(sys.argv[2])
    
    logging.getLogger('').handlers = []
    logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    log("Started mainline")
    
    trainingFileRaw = "data/train.csv"
    trainingFileNpy = "data/train.npy"   
    dataset = load(trainingFileRaw, trainingFileNpy)
    m, n = dataset.shape

    log("Full data set: rows: {0}, features: {1}".format(m,n))    

    predictions = execute(dataset, range(start, end))
    
    log("Completed mainline")
Example #3
0
def configure_logging():
    logging.basicConfig(
        filename='mv_gp_log_{:%Y%m%d_%H%M%S}.txt'.format(datetime.now()),
        level=logging.DEBUG,
        format='%(asctime)s: %(levelname)7s: [%(name)s]: %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S'
    )
def parse_args():
    """Parse arguments and sets up logging verbosity.

    :rtype: normal options and arguments as tuple.
    """
    parser = optparse.OptionParser(__doc__)
    parser.add_option("-f", "--file", dest="filename",
        help="setting file", metavar="FILE")
    parser.add_option("-o", "--output", dest="output",
        help="output file", metavar="FILE")
    parser.add_option("-n", "--dryrun", dest="dryrun",
        help="dry run", default=False, action="store_true")
    parser.add_option("-v", "--verbose", dest="verbose",
        default=False, action="store_true", help="verbose mode")
    parser.add_option("-q", "--quiet", dest="quiet",
        default=False, action="store_true", help="quiet mode")

    opts, args = parser.parse_args()

    if opts.verbose:
        logging.basicConfig(level=logging.DEBUG)
    elif not opts.quiet:
        logging.basicConfig(level=logging.INFO)

    return opts, args
def script_to_py3(script):
    """Convert a script to Python3 syntax if required."""
    if sys.version_info[0] < 3:
        return script

    import tempfile
    f = tempfile.NamedTemporaryFile(suffix=".py", delete=False)
    f.write(script.encode())
    f.flush()
    filename = f.name
    f.close()

    # 2to3 is way too chatty
    import logging
    logging.basicConfig(filename=os.devnull)

    from lib2to3.main import main
    if main("lib2to3.fixes", ['--no-diffs', '-w', '-n', filename]):
        raise Exception('py3 conversion failed')

    f2 = open(filename)
    try:
        return f2.read()
    finally:
        f2.close()
        os.remove(filename)
Example #6
0
def main():
    prog = 'devcron.py'
    usage = 'usage: %prog [options] crontab'
    description = 'A development cron daemon. See README.md for more info.'

    op = optparse.OptionParser(prog=prog, usage=usage, description=description)
    op.add_option('-v', '--verbose', dest='verbose', action='store_true',
                  help='verbose logging.')

    (options, args) = op.parse_args()

    if len(args) != 1:
        op.print_help()
        sys.exit(1)

    log_level = logging.WARN
    if options.verbose:
        log_level = logging.DEBUG

    logging.basicConfig(level=log_level)

    crontab_data = open(args[0]).read()
    crontab_data = fold_crontab_lines(crontab_data)
    crontab_data = edit_crontab_data(crontab_data)
    logging.debug("Edited crontab looks like:\n%s\n" % crontab_data)
    events = parse_crontab(crontab_data)
    logging.debug("Parsed crontab as:\n%s\n" %
                  '\n'.join([str(e) for e in events]))
    cron = Cron(events)
    cron.run()
Example #7
0
def main():

    # Get all the arguments
    args = argument_parser()

    # Check the verbosity level
    if args.verbose:
        logging.basicConfig(level=logging.DEBUG, format='%(funcName)s:%(levelname)s:%(message)s')
    else:
        logging.basicConfig(level=logging.INFO, format='%(funcName)s:%(levelname)s:%(message)s')

    method = args.method
    action = 'encode' if args.encode else 'decode'
    data = args.encode if action=='encode' else args.decode

    logging.debug("{} this {} string using {}".format(action, data, method))

    if method == 'base64':
        base64_encode_decode(action, data)

    if method == 'caeser':
        key = args.key
        if key >=0 and key < 27:
            caeser_cypher(key, action, data)
        else:
            logging.error("Key should be in the range 0-26")
Example #8
0
    def log(self, arguments, level = "info", format_with = '%(asctime)s %(message)s'):

        self.logger = logging.getLogger(self.logger_name)

        if not os.path.isfile(self.logfile):
            self.logfile = open(self.logfile, 'w').close()

        logging.basicConfig(filename = self.logfile, format = format_with)

        if type(arguments) == list:
            arguments = ("  ").join(arguments)

        if level == "warning":
            self.logger.setLevel(logging.WARNING)
            self.logger.warning(arguments)

        elif level == "error":
            self.logger.setLevel(logging.ERROR)
            self.logger.error(arguments)

        elif level == "exception":
            self.logger.setLevel(logging.CRITICAL)
            self.logger.exception(arguments)

        else:
            self.logger.setLevel(logging.INFO)
            self.logger.info(arguments)
Example #9
0
def main():
  logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
  logging.getLogger().addFilter(ColoredLoggingFilter())

  if len(sys.argv) < 2:
    ShowHelpAndExit()

  # Move to the Mozc root source directory only once since os.chdir
  # affects functions in os.path and that causes troublesome errors.
  os.chdir(MOZC_ROOT)

  command = sys.argv[1]
  args = sys.argv[2:]

  if command == 'gyp':
    (cmd_opts, cmd_args) = ParseGypOptions(args)
    GypMain(cmd_opts, cmd_args)
  elif command == 'build':
    (cmd_opts, cmd_args) = ParseBuildOptions(args)
    BuildMain(cmd_opts, cmd_args)
  elif command == 'runtests':
    (cmd_opts, cmd_args) = ParseRunTestsOptions(args)
    RunTestsMain(cmd_opts, cmd_args)
  elif command == 'clean':
    (cmd_opts, cmd_args) = ParseCleanOptions(args)
    CleanMain(cmd_opts, cmd_args)
  else:
    logging.error('Unknown command: %s', command)
    ShowHelpAndExit()
Example #10
0
def run(port, hostname, interface, verbosity):
    if verbosity == 0: # -q
        level = logging.CRITICAL
    elif verbosity == 1: # default
        level = logging.WARNING
    elif verbosity == 2: # -v
        level = logging.INFO
    else: # -v -v
        level = logging.DEBUG
    logging.basicConfig(level=level)

    if hostname:
        if not hostname.startswith('http://'):
            hostname = 'http://%s' % (hostname,)
        sep = hostname.find('/')
        if sep != -1:
            hostname = hostname[:sep]
        if port != 80 and not ':' in hostname:
            hostname = "http://%s:%d/" % (hostname, port)
        else:
            hostname = "http://%s/" % hostname
        logging.info('Hostname set to %s' % hostname)

    pong = PongGame()

    reactor.listenTCP(port, HttpFactory(hostname, pong), interface=interface)
    reactor.run()
Example #11
0
    def __init__(self, wsUri, certificate, hashName, keystore=None, truststore=None, verbose=False):
        self.url = wsUri

        self.rt = rfc3161.RemoteTimestamper(self.url, certificate, hashname=hashName, keystore=keystore,
                                            truststore=truststore)
        if verbose:
            logging.basicConfig(level=logging.DEBUG)
def configureBasicLogger(logDir,logName=""):
    # start logger:
    fileLogPath = "sim_" + strftime("%H-%M", gmtime()) + ".log" if len(logName) == 0 else logName
    fileLogPath = os.path.join(logDir, fileLogPath)
    if not os.path.exists(logDir):
        os.makedirs(logDir)
    #     flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY
    #     os.open(fileLogPath, flags)
    #     os.close(fileLogPath)
    # set up logging to file - see previous section for more details
    logging.basicConfig(level=logging.INFO,
                        format="%(asctime)s [%(processName)-12.12s] [%(levelname)-5.5s]  %(message)s",
                        datefmt='%m-%d %H:%M:%S',
                        filename=fileLogPath,
                        filemode='w')
    # define a Handler which writes INFO messages or higher to the sys.stderr
    console = logging.StreamHandler()
    console.setLevel(logging.INFO)
    # set a format which is simpler for console use
    formatter = logging.Formatter('%(asctime)s [%(processName)-12.12s] [%(levelname)-5.5s] %(message)s',
                                  datefmt='%m-%d %H:%M:%S')
    # tell the handler to use this format
    console.setFormatter(formatter)
    # add the handler to the root logger
    logging.getLogger().addHandler(console)
Example #13
0
def pytest_report_header(config):
    """Write the initial header."""
    lines = ["\n*** Integration tests for abipy + abinit + pymatgen ***\n"]
    app = lines.append

    app("Assuming the enviroment is properly configured:")
    app("In particular, we assume that the abinit executable is in $PATH and can be executed.")
    app("Change manager.yml according to your platform.")
    app("Number of manager configurations: %d" % len(_manager_confs))

    if config.option.verbose > 0:
        for i, s in enumerate(_manager_confs):
            app(80 * "=")
            app("TaskManager #%d" % i)
            app(s)
            app(80 * "=")

    app("")

    # Initialize logging
    # loglevel is bound to the string value obtained from the command line argument.
    # Convert to upper case to allow the user to specify --loglevel=DEBUG or --loglevel=debug
    import logging
    numeric_level = getattr(logging, config.option.loglevel.upper(), None)
    if not isinstance(numeric_level, int):
        raise ValueError('Invalid log level: %s' % config.option.loglevel)
    logging.basicConfig(level=numeric_level)

    return lines
Example #14
0
def main():
    global LAST_UPDATE_ID
    telegram_token = os.environ.get("TELEGRAM_TOKEN")

    logging.basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
    logger = logging.getLogger("Maslahat.uz")
    logger.setLevel(logging.DEBUG)

    # logger.debug("Initalizing bot ...")
    try:
        bot = telegram.Bot(telegram_token)
        # logger.debug("Connected to Telegram API")
    except telegram.error.TelegramError:
        pass
        # logger.warning("Cannot connect to Telegram server!")

    redis_url = os.environ.get("REDIS_URL")
    redis_conn = redis.from_url(redis_url)
    # logger.debug("Connected to Redis")

    # logger.debug("Receiving updates ...")
    try:
        LAST_UPDATE_ID = bot.getUpdates()[-1].update_id
        # logger.debug("Updates received")
    except IndexError:
        # logger.warning("No update received")
        LAST_UPDATE_ID = None

    # logger.debug("Starting heartbeat ...")
    heart_beat(logger, stat)
    # logger.debug("Waiting for updates ...")
    while True:
        bot_worker(redis_conn, bot, logger)
        check_facebook(redis_conn, bot, logger)
        check_announcements(redis_conn, bot, logger)
Example #15
0
def run(NUM_BLOCKS, STEPS_PER_BLOCK, BLOCKS_PER_DUMP, sim_params):

    print('Setting up logging')
    logging.basicConfig(filename='sim.log', level=logging.DEBUG)
    logging.info('NUM_BLOCKS: {}'.format(NUM_BLOCKS))
    logging.info('STEPS_PER_BLOCK: {}'.format(STEPS_PER_BLOCK))
    logging.info('BLOCKS_PER_DUMP: {}'.format(BLOCKS_PER_DUMP))

    print('Instantiating sampler')
    h = sim_params['h']
    r0 = sim_params['r0']
    dr = sim_params['dr']
    outname = sim_params['outname']
    sampler = mcsampler.Sampler(h, r0, dr, np.random.randint(2**32-1))

    # Setup h5 file
    h5 = h5py.File(outname, 'w')
    h5coords = h5.create_dataset('coords', shape=(NUM_BLOCKS,), compression=9, scaleoffset=2,
            dtype=np.float32, chunks=(BLOCKS_PER_DUMP,))

    # Initial coords
    x = r0

    totblocks = NUM_BLOCKS // BLOCKS_PER_DUMP
    temp_coords = np.zeros((BLOCKS_PER_DUMP,), dtype=coord_dtype)

    print('Starting Simulation')
    for dki, dk in enumerate(xrange(totblocks)):
        t1 = time.time()
        sampler.step(x, temp_coords, BLOCKS_PER_DUMP*STEPS_PER_BLOCK, STEPS_PER_BLOCK)

        h5coords[dki*BLOCKS_PER_DUMP:(dki+1)*BLOCKS_PER_DUMP] = temp_coords[:]
        logging.info('Completed {} of {} steps: {} s'.format(dk,totblocks-1, time.time() - t1))

    h5.close()
Example #16
0
def main():
    logging.basicConfig(level=logging.DEBUG)

    file_name = os.path.join(TARGET_DIR, 'stop_server.bat')
    if os.access(file_name, os.F_OK):
        logging.info('Trying to stop possibly running server...')
        subprocess.call(file_name, stderr=subprocess.PIPE, shell=True)

    if os.access(TARGET_DIR, os.F_OK):
        shutil.rmtree(TARGET_DIR)
    makedirs(TARGET_DIR, exist_ok=True)

    if IS_WINDOWS:
        deploy_wnmp()
        deploy_dokuwiki('nginx/www')
    else:
        deploy_dokuwiki()

    for pattern in [
            'example.nginx.conf',
            'readme.txt',]:
        for path in glob.glob(os.path.join(TARGET_DIR,
                os.path.normpath(pattern))):
            if os.path.isdir(path):
                shutil.rmtree(path)
            else:
                os.unlink(path)
def get_parameters():
	global host
	global port
	global thr
	global item
	optp = OptionParser(add_help_option=False,epilog="Hammers")
	optp.add_option("-q","--quiet", help="set logging to ERROR",action="store_const", dest="loglevel",const=logging.ERROR, default=logging.INFO)
	optp.add_option("-s","--server", dest="host",help="attack to server ip -s ip")
	optp.add_option("-p","--port",type="int",dest="port",help="-p 80 default 80")
	optp.add_option("-t","--turbo",type="int",dest="turbo",help="default 135 -t 135")
	optp.add_option("-h","--help",dest="help",action='store_true',help="help you")
	opts, args = optp.parse_args()
	logging.basicConfig(level=opts.loglevel,format='%(levelname)-8s %(message)s')
	if opts.help:
		usage()
	if opts.host is not None:
		host = opts.host
	else:
		usage()
	if opts.port is None:
		port = 80
	else:
		port = opts.port
	if opts.turbo is None:
		thr = 135
	else:
		thr = opts.turbo
Example #18
0
    def test_config(self, job_list_path):
        # See comment at top of file about zuul imports
        import zuul.scheduler
        import zuul.launcher.gearman
        import zuul.trigger.gerrit

        logging.basicConfig(level=logging.DEBUG)
        self.sched = zuul.scheduler.Scheduler()
        self.sched.registerReporter(None, 'gerrit')
        self.sched.registerReporter(None, 'smtp')
        self.sched.registerTrigger(None, 'gerrit')
        self.sched.registerTrigger(None, 'timer')
        layout = self.sched.testConfig(self.config.get('zuul',
                                                       'layout_config'))
        if not job_list_path:
            return False

        failure = False
        path = os.path.expanduser(job_list_path)
        if not os.path.exists(path):
            raise Exception("Unable to find job list: %s" % path)
        jobs = set()
        for line in open(path):
            v = line.strip()
            if v:
                jobs.add(v)
        for job in sorted(layout.jobs):
            if job not in jobs:
                print "Job %s not defined" % job
                failure = True
        return failure
Example #19
0
def main(args):
    django.setup()
    logger = setup_error_handler()
    parser = argparse.ArgumentParser()
    parse_lock = None

    def list_logging_levels():
        """Give a summary of all available logging levels."""
        return sorted(list(VERBOSITY_LEVELS.keys()),
                      key=lambda x: VERBOSITY_LEVELS[x])

    parser.add_argument('--verbosity', choices=list_logging_levels(),
                        help='logging level', default='info')

    args = vars(parser.parse_args())

    logging.basicConfig(level=VERBOSITY_LEVELS[args['verbosity']])

    mail = message_from_file(sys.stdin)
    try:
        parse_lock = lock()
        return parse_mail(mail)
    except:
        if logger:
            logger.exception('Error when parsing incoming email', extra={
                'mail': mail.as_string(),
            })
        raise
    finally:
        release(parse_lock)
Example #20
0
def init_logger(location, config):
    """ Initialize the logger with settings from config. """

    class NullHandler(logging.Handler):
        def emit(self, record):
            pass

    if get_conf(config, 'Logging.enabled', False) == False:
        handler = NullHandler()
        logging.getLogger("dagobah").addHandler(handler)
        return

    if get_conf(config, 'Logging.logfile', 'default') == 'default':
        path = os.path.join(location, 'dagobah.log')
    else:
        path = config['Logging']['logfile']

    level_string = get_conf(config, 'Logging.loglevel', 'info').upper()
    numeric_level = getattr(logging, level_string, None)

    logging.basicConfig(filename=path, level=numeric_level)

    root = logging.getLogger()
    stdout_logger = logging.StreamHandler(sys.stdout)
    stdout_logger.setLevel(logging.INFO)
    root.addHandler(stdout_logger)

    print 'Logging output to %s' % path
    logging.info('Logger initialized at level %s' % level_string)
def main():
  SRC_DEFAULT = '[emoji]/build/compressed_pngs'
  PREFIX_DEFAULT = 'android_'

  parser = argparse.ArgumentParser()
  parser.add_argument(
      '-s', '--src_dir', help='source images (default \'%s\')' % SRC_DEFAULT,
      default=SRC_DEFAULT, metavar='dir')
  parser.add_argument(
      '-d', '--dst_dir', help='destination directory', metavar='dir',
      required=True)
  parser.add_argument(
      '-p', '--prefix', help='prefix for thumbnail (default \'%s\')' %
      PREFIX_DEFAULT, default=PREFIX_DEFAULT, metavar='str')
  parser.add_argument(
      '-c', '--crop', help='crop images (will automatically crop if '
      'src dir is the default)', action='store_true')
  parser.add_argument(
      '-v', '--verbose', help='write log output', metavar='level',
      choices='warning info debug'.split(), const='info',
      nargs='?')
  args = parser.parse_args()

  if args.verbose is not None:
    logging.basicConfig(level=getattr(logging, args.verbose.upper()))

  crop = args.crop or (args.src_dir == SRC_DEFAULT)
  create_thumbnails_and_aliases(
      args.src_dir, args.dst_dir, crop, args.prefix)
Example #22
0
def Main():
  code = 0
  parser = BuildOptions()
  (options, args) = parser.parse_args()
  ValidateOptions(options)
  test_suite = TestSuite(options.tests,
                         options.strict_only,
                         options.non_strict_only,
                         options.unmarked_default,
			 options.print_handle)
  test_suite.Validate()
  if options.loglevel == 'debug':
    logging.basicConfig(level=logging.DEBUG)
  elif options.loglevel == 'info':
    logging.basicConfig(level=logging.INFO)
  elif options.loglevel == 'warning':
    logging.basicConfig(level=logging.WARNING)
  elif options.loglevel == 'error':
    logging.basicConfig(level=logging.ERROR)
  elif options.loglevel == 'critical':
    logging.basicConfig(level=logging.CRITICAL)
  if options.cat:
    test_suite.Print(args)
  elif options.list_includes:
    test_suite.ListIncludes(args)
  else:
    code = test_suite.Run(options.command, args,
                          options.summary or options.full_summary,
                          options.full_summary,
                          options.logname,
                          options.junitname)
  return code
Example #23
0
def deploy():
    assert isinstance(application.options, OptionsCore), 'Invalid application options %s' % application.options
    if not application.options.start: return
    try:
        if not os.path.isfile(application.options.configurationPath):
            print('The configuration file "%s" doesn\'t exist, create one by running the the application '
                  'with "-dump" option' % application.options.configurationPath, file=sys.stderr)
            sys.exit(1)
        with open(application.options.configurationPath, 'r') as f: config = load(f)

        assembly = application.assembly = ioc.open(aop.modulesIn('__setup__.**'), config=config)
        assert isinstance(assembly, Assembly), 'Invalid assembly %s' % assembly
        
        import logging
        logging.basicConfig(format=format())
        for name in warning_for(): logging.getLogger(name).setLevel(logging.WARN)
        for name in info_for(): logging.getLogger(name).setLevel(logging.INFO)
        for name in debug_for(): logging.getLogger(name).setLevel(logging.DEBUG)
        
        try: assembly.processStart()
        finally: ioc.deactivate()
    except SystemExit: raise
    except (SetupError, ConfigError):
        print('-' * 150, file=sys.stderr)
        print('A setup or configuration error occurred while deploying, try to rebuild the application properties by '
              'running the the application with "configure components" options', file=sys.stderr)
        traceback.print_exc(file=sys.stderr)
        print('-' * 150, file=sys.stderr)
    except:
        print('-' * 150, file=sys.stderr)
        print('A problem occurred while deploying', file=sys.stderr)
        traceback.print_exc(file=sys.stderr)
        print('-' * 150, file=sys.stderr)
Example #24
0
def main():
    args = parse_args()

    state = getattr(experiments.nmt, args.proto)()
    if args.state:
        if args.state.endswith(".py"):
            state.update(eval(open(args.state).read()))
        else:
            with open(args.state) as src:
                state.update(cPickle.load(src))
    for change in args.changes:
        state.update(eval("dict({})".format(change)))

    logging.basicConfig(level=getattr(logging, state['level']), format="%(asctime)s: %(name)s: %(levelname)s: %(message)s")
    logger.debug("State:\n{}".format(pprint.pformat(state)))

    rng = numpy.random.RandomState(state['seed'])
    enc_dec = RNNEncoderDecoder(state, rng, skip_init=args.skip_init, compute_alignment=True)
    enc_dec.build()
    lm_model = enc_dec.create_lm_model()

    logger.debug("Load data")
    train_data = get_batch_iterator(state)
    logger.debug("Compile trainer")
    algo = eval(state['algo'])(lm_model, state, train_data)
    logger.debug("Run training")
    main = MainLoop(train_data, None, None, lm_model, algo, state, None,
            reset=state['reset'],
            hooks=[RandomSamplePrinter(state, lm_model, train_data)]
                if state['hookFreq'] >= 0
                else None)
    if state['reload']:
        main.load()
    if state['loopIters'] > 0:
        main.main()
Example #25
0
def main():
	parser = argparse.ArgumentParser(description='inb4404')
	parser.add_argument('thread', nargs=1, help='url of the thread')
	args = parser.parse_args()

	logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(message)s', datefmt='%I:%M:%S %p')

	workpath = os.path.dirname(os.path.realpath(__file__))
	board = ''.join(args.thread).split('/')[3]
	thread = ''.join(args.thread).split('/')[5].split('#')[0]

	directory = os.path.join(workpath, 'downloads', board, thread)
	if not os.path.exists(directory):
		os.makedirs(directory)

	os.chdir(directory)

	while len(args.thread):
		for t in args.thread:
			try:
				for link, img in re.findall('(\/\/i.4cdn.org/\w+\/(\d+\.(?:jpg|png|gif|webm)))', load(t)):
					if not os.path.exists(img):
						log.info(img)
						data = load('https:' + link)
						with open(img, 'w') as f:
							f.write(data)
			except urllib2.HTTPError, err:
				log.info('%s 404\'d', t)
				args.thread.remove(t)
				continue
			except (urllib2.URLError, httplib.BadStatusLine, httplib.IncompleteRead):
				log.warning('something went wrong')
def main():
    """
    Application entry point
    """
    logging.basicConfig(level=logging.DEBUG)
    # create the application and the main window
    app = QtGui.QApplication(sys.argv)
    window = QtGui.QMainWindow()
    
    # setup ui
    ui = example_ui.Ui_MainWindow()
    ui.setupUi(window)
    window.setWindowTitle("QDarkStyle example")

    # tabify dock widgets to show bug #6
    window.tabifyDockWidget(ui.dockWidget1, ui.dockWidget2)

    # setup stylesheet
    app.setStyleSheet(qdarkstyle.load_stylesheet(pyside=True))

        # auto quit after 2s when testing on travis-ci
    if "--travis" in sys.argv:
        QtCore.QTimer.singleShot(2000, app.exit)

    # run
    window.show()
    app.exec_()
Example #27
0
def main():

    parser = argparse.ArgumentParser(description='Deploy interface.')
    parser.add_argument('--version', action='version', version=APP + " " + VERSION)
    parser.add_argument('--logging', dest='log_level', action='store',
                        default='DEBUG', choices=['DEBUG', 'INFO'],
                        help='Minimum level of logging message to show. Default (DEBUG)')

    subparsers = parser.add_subparsers(dest='cmd')

    parser_a = subparsers.add_parser('install',
                                     help='Run install')
    parser_a.set_defaults(func=install)

    parser_a = subparsers.add_parser('uninstall',
                                     help='Run uninstall')
    parser_a.set_defaults(func=uninstall)

    if len(sys.argv) == 1:
        error_msg = "ERROR: No arguments supplied!"
        print >> sys.stderr, error_msg
        parser.print_help()
        sys.exit(1)

    args = parser.parse_args()

    logging.basicConfig(level=args.log_level,
                        format=LOGGING_FORMAT)

    args.func(args)
Example #28
0
def run(job_ini, concurrent_tasks=None,
        loglevel='info', hc=None, exports=''):
    """
    Run a calculation. Optionally, set the number of concurrent_tasks
    (0 to disable the parallelization).
    """
    logging.basicConfig(level=getattr(logging, loglevel.upper()))
    job_inis = job_ini.split(',')
    assert len(job_inis) in (1, 2), job_inis
    monitor = performance.Monitor('total', measuremem=True)

    if len(job_inis) == 1:  # run hazard or risk
        oqparam = readinput.get_oqparam(job_inis[0], hc_id=hc)
        if hc and hc < 0:  # interpret negative calculation ids
            calc_ids = datastore.get_calc_ids()
            try:
                hc = calc_ids[hc]
            except IndexError:
                raise SystemExit('There are %d old calculations, cannot '
                                 'retrieve the %s' % (len(calc_ids), hc))
        calc = base.calculators(oqparam, monitor)
        monitor.monitor_dir = calc.datastore.calc_dir
        with monitor:
            calc.run(concurrent_tasks=concurrent_tasks, exports=exports,
                     hazard_calculation_id=hc)
    else:  # run hazard + risk
        calc = run2(
            job_inis[0], job_inis[1], concurrent_tasks, exports, monitor)

    logging.info('Total time spent: %s s', monitor.duration)
    logging.info('Memory allocated: %s', general.humansize(monitor.mem))
    monitor.flush()
    print('See the output with hdfview %s/output.hdf5' %
          calc.datastore.calc_dir)
    return calc
Example #29
0
def main():
    args_file = sys.argv[1]
    args_data = file(args_file).read()
    arguments = shlex.split(args_data)
    worker = 'all'
    action = WorkItemList.display

    for arg in arguments:
        # ignore any arguments without an equals in it
        if '=' in arg:
            (key, value) = arg.split('=')
            if key == 'worker':
                worker = workers[value]
            if key == 'action':
                if value == 'create':
                    action = WorkItemList.create
                elif value == 'teardown':
                    action = WorkItemList.teardown
                elif value == 'display':
                    action = WorkItemList.display

    logging.basicConfig(level=logging.ERROR)

    action(worker)
    print json.dumps({
        'success': True,
        'args': args_data
    })
Example #30
0
    def test_cmdenv(self):
        import logging
        logging.basicConfig()
        # make sure previous environment is preserved
        os.environ['SOMETHING'] = 'foofoofoo'
        old_env = os.environ.copy()

        mr_job = MRTestCmdenv(['--runner', 'inline', '--cmdenv=FOO=bar'])
        mr_job.sandbox(stdin=BytesIO(b'foo\n'))

        results = []

        with mr_job.make_runner() as runner:
            assert isinstance(runner, InlineMRJobRunner)
            runner.run()

            for line in runner.stream_output():
                key, value = mr_job.parse_output_line(line)
                results.append((key, value))

        self.assertEqual(sorted(results),
                         [('FOO', 'bar'), ('SOMETHING', 'foofoofoo')])

        # make sure we revert back
        self.assertEqual(old_env, os.environ)