Example #1
0
def main():
    parser = optparse.OptionParser()

    parser.add_option("-n", "--no-log",
        action="store_true", dest="nolog",
        help="Log to stdout instead of to a log file", default=False)

    parser.add_option("-m", "--max",
        action="store", dest="max", type="int",
        help="Maximum number of videos to process", default=1)

    parser.add_option("-d", "--dryrun",
        action="store_true", dest="dryrun",
        help="Don't start new zencoder jobs or upload to gcs",
        default=False)

    options, args = parser.parse_args()

    util.setup_logging(options.nolog)

    # Make sure only one youtube-export converter is running at a time.
    with filelock.FileLock("export.lock", timeout=2):
        (success, error_ids) = YouTubeExporter.convert_missing_downloads(
            options.max, options.dryrun)

    if error_ids:
        msg = ('Skipped %d youtube-ids due to errors:\n%s\n'
               % (len(error_ids), '\n'.join(sorted(error_ids))))
        logger.warning(msg)
        # Make this part of the stdout output as well, so it gets passed
        # from cron to our email.
        print msg
    return (success, len(error_ids))
Example #2
0
def job_daemon():
    setup_logging()
    logging.info('Daemon running...')

    while True:
        queue, data = redis.blpop(['jobs', 'index-jobs'])

        if queue == 'jobs':
            query_hash = data
            logging.info('New task: {}:{}'.format(queue, query_hash))

            yara = redis.get('query:' + query_hash)
            job_id = 'job:' + query_hash

            try:
                execute_job(job_id, query_hash, yara)
            except Exception as e:
                logging.exception('Failed to execute job.')
                redis.hmset(job_id, {
                    'status': 'failed',
                    'error': str(e),
                })
        elif queue == 'index-jobs':
            path = data
            db.index(path)
Example #3
0
def make_app(stores_path='/tmp',
             cache_enabled=True,
             cache_type='memory',
             cache_size=10000,
             cache_host='localhost',
             cache_port=6379,
             cache_ttl=86400,
             gc_interval=86400):
    global app
    global cache

    # monkey patch bottle to increase BaseRequest.MEMFILE_MAX
    BaseRequest.MEMFILE_MAX = 1024000

    setup_logging()
    app.config['gitstores_path'] = stores_path
    app.config['gc_interval'] = gc_interval
    cache_backend = None
    if cache_type == 'memory':
        cache_backend = LocalCache(cache_size)
    elif cache_type == 'redis':
        try:
            import redis
            cache_backend = RedisCache(redis.Redis(cache_host, cache_port),
                                       cache_ttl)
        except ImportError:
            pass
    cache = QueryCache(backend=cache_backend, enabled=cache_enabled)
    if gc_interval > 0:
        t = threading.Thread(target=run_gc)
        t.setDaemon(True)
        t.start()
    return app
Example #4
0
def job_daemon():
    setup_logging()
    logging.info('Daemon running...')

    while True:
        queue, data = redis.blpop(['jobs', 'index-jobs'])

        if queue == 'jobs':
            query_hash = data
            logging.info('New task: {}:{}'.format(queue, query_hash))

            yara = redis.get('query:' + query_hash)
            job_id = 'job:' + query_hash

            try:
                execute_job(job_id, query_hash, yara)
            except Exception as e:
                logging.exception('Failed to execute job.')
                redis.hmset(job_id, {
                    'status': 'failed',
                    'error': str(e),
                })
        elif queue == 'index-jobs':
            path = data
            db.index(path)
Example #5
0
def main():
    """main"""
    global logger
    parser = get_argument_parser()
    add_logging_argument(parser)
    add_redis_argparse_argument(parser)
    args = parser.parse_args()
    setup_logging(args)
    logger = logging.getLogger("Report")

    if all((args.scope, args.uri, args.run)):
        # run the sorting center process
        redis = get_redis_server_from_options(args)
        for _ in report_events(
            report_trouble_events(
                uri=args.uri,
                scope=args.scope,
                redis=redis,
                wait_for_events=args.wait_for_events,
            )
        ):
            pass

    else:
        parser.print_help()
        return 1
Example #6
0
def main():
    parser = optparse.OptionParser()

    parser.add_option("-n", "--no-log",
        action="store_true", dest="nolog",
        help="Log to stdout instead of to a log file", default=False)

    parser.add_option("-m", "--max",
        action="store", dest="max", type="int",
        help="Maximum number of videos to process", default=1)

    parser.add_option("-d", "--dryrun",
        action="store_true", dest="dryrun",
        help="Don't start new zencoder jobs or upload to gcs",
        default=False)

    options, args = parser.parse_args()

    util.setup_logging(options.nolog)

    # Make sure only one youtube-export converter is running at a time.
    with filelock.FileLock("export.lock", timeout=2):
        (success, error_ids) = YouTubeExporter.convert_missing_downloads(
            options.max, options.dryrun)

    if error_ids:
        msg = ('Skipped %d youtube-ids due to errors:\n%s\n'
               % (len(error_ids), '\n'.join(sorted(error_ids))))
        logger.warning(msg)
        # Make this part of the stdout output as well, so it gets passed
        # from cron to our email.
        print msg
    return (success, len(error_ids))
Example #7
0
File: main.py Project: 112RG/Gitta
def main():
    util.setup_logging()
    logging.info('Starting Gitta')
    github = Github('test', sys.argv[1])

    for repo in github.getRepos():
        github.mirrorRepo(repo)
Example #8
0
def make_app(stores_path='/tmp', cache_enabled=True, cache_type='memory', cache_size=10000, cache_host='localhost', cache_port=6379, cache_ttl=86400, gc_interval=86400):
    global app
    global cache

    # monkey patch bottle to increase BaseRequest.MEMFILE_MAX
    BaseRequest.MEMFILE_MAX = 1024000

    setup_logging()
    app.config['gitstores_path'] = stores_path
    app.config['gc_interval'] = gc_interval
    cache_backend = None
    if cache_type == 'memory':
        cache_backend = LocalCache(cache_size)
    elif cache_type == 'redis':
        try:
            import redis
            cache_backend = RedisCache(redis.Redis(cache_host, cache_port), cache_ttl)
        except ImportError:
            pass
    cache = QueryCache(backend=cache_backend, enabled=cache_enabled)
    if gc_interval > 0:
        t = threading.Thread(target=run_gc)
        t.setDaemon(True)
        t.start()
    return app
Example #9
0
def main():

    parser = argparse.ArgumentParser()
    arguments = [
        ("preprocess", preprocess_dataset,
         "Preprocess samples - cleaning/filtering of invalid data."),
        ("split", split_dataset,
         "Split dataset in separate folds for training/validation/testing."),
        ("pretrain", prepare_embeddings,
         "Precompute input representations from unlabeled/training data."),
        ("prepare_input", prepare_input,
         "Convert raw inputs to numpy compatible data types."),
        ("train", train, "Train currently selected model."),
        ("test", test, "Run available model on evaluation data.")
        # ("analyse", analyse_dataset),                 # WIP
        # ("extract_embeddings", extract_embeddings),   # WIP
    ]

    for arg, _, description in arguments:
        parser.add_argument('--{}'.format(arg),
                            action='store_true',
                            help=description)

    params = parser.parse_args()
    args = parse_config("config.json")

    setup_logging(args)
    set_random_seed(args)

    for arg, fun, _ in arguments:
        if hasattr(params, arg) and getattr(params, arg):
            logging.info("Performing {} operation..".format(arg))
            fun(args)
Example #10
0
def main():
    """main"""
    parser = get_argument_parser()
    add_logging_argument(parser)
    add_redis_argparse_argument(parser)
    args = parser.parse_args()
    setup_logging(args)

    if all((args.purge_scope, args.uri, args.scope)):
        purge_scope(uri=args.uri, scope=args.scope)

    if args.purge_redis and args.redis_server:
        purge_redis(get_redis_server_from_options(args))

    if args.import_file:
        # import events from file
        if args.import_file == "-":
            input_file = sys.stdin
        else:
            input_file = open(args.import_file, "r")

        import_events(uri=args.uri, scope=args.scope, input_file=input_file)
        return 0
    else:
        parser.print_help()
        return 1
Example #11
0
def main(argv=None) -> int:
    """read command line arguments, configure application and run command
    specified on command line"""
    args = parse_arguments(argv or sys.argv[1:])
    setup_logging(args)

    show_system_info()

    app = QtWidgets.QApplication(sys.argv)

    #with open(os.path.join(APP_DIR, STYLESHEET)) as f:
    #    app.setStyleSheet(f.read())

    ex = track_ui()
    ex.show()

    for s in (signal.SIGABRT, signal.SIGINT, signal.SIGSEGV, signal.SIGTERM):
        signal.signal(s, lambda signal, frame: sigint_handler(signal, ex))

    # catch the interpreter every now and then to be able to catch
    # signals
    timer = QtCore.QTimer()
    timer.start(200)
    timer.timeout.connect(lambda: None)

    sys.exit(app.exec_())
Example #12
0
def job_daemon():
    setup_logging()
    logging.info('Daemon running...')

    for extractor in config.METADATA_EXTRACTORS:
        extractor.set_redis(redis)

    while True:
        queue, data = redis.blpop(['queue-search', 'queue-index', 'queue-metadata', 'queue-yara'])

        if queue == 'queue-search':
            job_hash = data
            logging.info('New task: {}:{}'.format(queue, job_hash))

            try:
                execute_search(job_hash)
            except Exception as e:
                logging.exception('Failed to execute job.')
                redis.hmset('job:' + job_hash, {
                    'status': 'failed',
                    'error': str(e),
                })
        elif queue == 'queue-yara':
            job_hash, file_path = data.split(':', 1)
            try:
                execute_yara(job_hash, file_path)
            except Exception as e:
                logging.exception('Failed to execute yara match.')
                redis.hmset('job:' + job_hash, {
                    'status': 'failed',
                    'error': str(e),
                })
        elif queue == 'queue-metadata':
            job_hash, file_path = data.split(':', 1)
            execute_metadata(job_hash, file_path)
Example #13
0
def main():
    """main"""
    parser = get_argument_parser()
    add_logging_argument(parser)
    args = parser.parse_args()
    setup_logging(args)

    if args.test_simulator:
        # read the scope/stream from uri
        simulator = Simulator(
            simulated_run_time=args.simulated_run_time,
            intake_run_time=args.intake_run_time,
            package_count=args.package_count,
            simulated_start_time=int(time.time()),
            lost_package_count=args.lost_package_count,
            delayed_package_count=args.delayed_package_count,
        )
        for event in simulator.event_source():
            if args.json_output:
                sys.stdout.write("%s\n" % json.dumps(event))
            else:
                print("%r" % event)

    else:
        parser.print_help()
    def setUpClass(cls):
        manifest_path = '{}/{}/resources/AndroidManifest.xml'.format(
            config.root_dir, 'tests')
        copy_manifest_path = '{}/{}/resources/AndroidManifest.xml_copy'.format(
            config.root_dir, 'tests')

        # shutil.copyfile(copy_manifest_path, manifest_path)    def setUp(self):
        util.setup_logging()
    def __init__(self):
        signal.signal(signal.SIGINT, self.exit_gracefully)
        signal.signal(signal.SIGTERM, self.exit_gracefully)

        util.setup_logging()
        logging.info("Starting up worker %s at %s" %
                     (self.worker_id, socket.gethostname()))
        time.sleep(3)
Example #16
0
def job_daemon() -> None:
    setup_logging()
    logging.info("Daemon running...")
    yara_queues = ["queue-yara-high", "queue-yara-medium", "queue-yara-low"]

    for extractor in config.METADATA_EXTRACTORS:
        logging.info("Plugin loaded: %s", extractor.__class__.__name__)
        extractor.set_redis(redis)

    logging.info("Daemon loaded, entering the main loop...")

    while True:
        queue, data = redis.blpop([
            "queue-search", "queue-index", "queue-metadata", "queue-commands"
        ] + yara_queues)

        if queue == "queue-search":
            job_hash = data
            logging.info("New task: {}:{}".format(queue, job_hash))

            try:
                execute_search(job_hash)
            except Exception as e:
                logging.exception("Failed to execute job.")
                redis.hmset("job:" + job_hash, {
                    "status": "failed",
                    "error": str(e)
                })

        elif queue in yara_queues:
            job_hash, file_path = data.split(":", 1)
            try:
                execute_yara(job_hash, file_path)
            except Exception as e:
                logging.exception("Failed to execute yara match.")
                redis.hmset("job:" + job_hash, {
                    "status": "failed",
                    "error": str(e)
                })

        elif queue == "queue-metadata":
            # LEGACY QUEUE
            # Exists mostly because there is no sane way to do migrations
            # for job queues in redis.
            # This is currently a "drain only" queue that will pick up any
            # pending tasks during mquery update, and will stay idle forever.
            job_hash, file_path = data.split(":", 1)
            update_metadata(job_hash, file_path, [])

        elif queue == "queue-commands":
            logging.info("Running a command: %s", data)
            resp = db.execute_command(data)
            logging.info(resp)

        if redis.set("gc-lock", "locked", ex=60, nx=True):
            collect_expired_jobs()
Example #17
0
def main():
    opts = parse_cli()
    setup_logging(opts.quiet, opts.network_log)
    api = ErgoNodeApi(opts.server)

    # check api key provided if --send is enabled
    if not opts.api_key:
        logging.error('Option --api-key is required')
        sys.exit(1)

    address_id = read_data_file('address.id', 1)[0]
    box_id = read_data_file('box.id', 1)[0]
    token_id = read_data_file('token.id', 1)[0]

    print('Address: %s' % address_id)
    print('Box ID: %s' % box_id)
    print('Token ID: %s' % token_id)

    tx = {
        'requests': [{
            'address': address_id,
            'value': 100000,
            'assets': [{
                'tokenId': token_id,
                'amount': 1
            }]
        }],
        'fee':
        1e6,
        'inputsRaw': []
    }
    pprint(tx)

    signed_tx = api.request(
        '/wallet/transaction/generate',
        data=json.dumps(tx),
        api_key=opts.api_key,
    )
    print('[/wallet/transaction/generate]')
    pprint(signed_tx)

    res_box_id = find_box_id(signed_tx, token_id)
    print('Second Box ID: %s' % res_box_id)

    with open('box.id', 'a') as out:
        out.write('%s\n' % res_box_id)

    res = api.request(
        '/wallet/transaction/send',
        data=json.dumps(tx),
        api_key=opts.api_key,
    )
    print('[/wallet/transaction/send]')
    pprint(res)
Example #18
0
def serve(baker):
    global BAKER
    BAKER = baker
    BAKER.started = emit_started
    BAKER.updated = emit_updated
    BAKER.finished = emit_finished
    util.setup_logging()
    host = "0.0.0.0"
    port = 5000
    logging.info("forge initializing on http//%s:%s" % (host, port))
    setup()
    socketio.run(app, host=host, port=port)
Example #19
0
def main():
    opts = parse_cli()
    setup_logging(opts.quiet, opts.network_log)
    api = ErgoNodeApi(opts.server)

    # check api key provided if --send is enabled
    if not opts.api_key:
        logging.error('Option --api-key is required')
        sys.exit(1)

    with open('box.id') as inp:
        lines = inp.read().splitlines()

    if len(lines) == 1:
        logging.debug('One box ID in box.id')
        pass
    elif len(lines) != 2:
        logging.error('File box.id contains %d lines (expected 2)' %
                      (len(lines)))
        sys.exit(1)
    else:
        logging.debug('Two box IDs in box.id')
        id1, id2 = lines
        if not is_box_spent(api, id1, opts.api_key):
            logging.debug('First box ID is not spent')
            if opts.stop:
                with open('box.id', 'w') as out:
                    out.write('%s\n' % id1)  # remove 2nd line
                logging.debug('Removed second box ID')
        else:
            logging.debug('! First box ID is spent')
            if not is_box_spent(api, id2, opts.api_key):
                logging.debug('Second box ID is not spent')
                res = api.request(
                    '/wallet/boxes/unspent'
                    '?minConfirmations=30&minInclusionHeight=0',
                    api_key=opts.api_key,
                )
                #print('Looking for %s' % id1)
                #for item in res:
                #    print('*', item['box']['boxId'])
                found = any(x['box']['boxId'] == id1 for x in res)
                if found:
                    with open('box.id', 'w') as out:
                        out.write('%s\n' % id2)  # remove 1st line
                    logging.debug('Removed first box ID')
                else:
                    logging.error('Not enough confirmations yet')
                    sys.exit(1)
            else:
                logging.error('Both 1st and 2nd box IDs are spent')
                sys.exit(1)
Example #20
0
def main():
    try:
        # Setup logging
        util.setup_logging()
        logger = logging.getLogger(__name__)

        # Connect to Reddit
        auth = json.load(open('auth.json'))
        reddit = praw.Reddit(username=auth['reddit']['username'],
                             password=auth['reddit']['password'],
                             client_id=auth['reddit']['app_id'],
                             client_secret=auth['reddit']['secret'],
                             user_agent=auth['reddit']['user_agent'])
        logger.info("Connected to Reddit as: " + str(reddit.user.me()))

        # SSH tunnel to database
        tunnel = ds.create_ssh_tunnel(auth['database']['host'],
                                      int(auth['database']['port']),
                                      auth['database']['ssh_user'],
                                      auth['database']['ssh_pw'])

        # Connect to datastore
        db = ds.connect_datastore('127.0.0.1', tunnel.local_bind_port,
                                  auth['database']['name'],
                                  auth['database']['user'],
                                  auth['database']['password'])

        # Check once for new comics before streaming new data forever
        xkcd_updater.run(db)

        # Run bot
        reference_scanner.run(reddit, db)

    except Exception as err:
        logger.error('Caught exception: %s', str(err), exc_info=True)

        msg = MIMEText(
            f'Forgive me creator, for I have been slain. My death was caused by:\n\n{err}'
        )
        msg['Subject'] = 'xkcd_stats_bot has died'
        msg['From'] = '*****@*****.**'
        msg['To'] = '*****@*****.**'
        s = smtplib.SMTP('localhost')
        s.send_message(msg)
        s.quit()

        os.kill(os.getpid(), 9)

    tunnel.stop()
Example #21
0
    def __init__(self, phone_number):
        self.connected = False
        self.phone_number = phone_number

        setup_logging(phone_number)

        self.logger = logging.getLogger(__name__)

        environment = get_env('env')
        rollbar_key = get_env('rollbar_key')

        self.yowsup_env = get_env('yowsup_env', False, 's40')

        # initialize rollbar for exception reporting
        rollbar.init(rollbar_key, environment)
Example #22
0
    def __init__(self, phone_number):
        self.connected = False
        self.phone_number = phone_number

        setup_logging(phone_number)

        self.logger = logging.getLogger(__name__)

        environment = get_env('env')
        rollbar_key = get_env('rollbar_key')

        self.yowsup_env = get_env('yowsup_env', False, 's40')

        # initialize rollbar for exception reporting
        rollbar.init(rollbar_key, environment)
Example #23
0
def list_imports():
    import optparse
    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option('-I', '--ignore', dest='ignores', action='append',
                      default=def_ignores,
                      help="Add the given directory name to the list to be ignored.")

    parser.add_option('-u', '--unified', action='store_true',
                      help="Just output the unique set of dependencies found, "
                      "in no particular order, without the filenames.  The default "
                      "is to output all imports, in order of appearance, along with "
                      "the filename and line number.")

    parser.add_option('-v', '--verbose', action='count', default=0,
                      help="Output input lines as well.")

    opts, args = parser.parse_args()
    setup_logging(opts.verbose)

    if not args:
        logging.warning("Searching for files from root directory.")
        args = ['.']

    info = logging.info

    if opts.unified:
        all_symnames = set()
        for fn in iter_pyfiles(args, opts.ignores):
            all_symnames.update(x[0] for x in
                                find_imports(fn, opts.verbose, opts.ignores))
        for symname in sorted(all_symnames):
            print symname
    else:
        for fn in iter_pyfiles(args, opts.ignores):
            if opts.verbose:
                lines = list(open(fn, 'rU'))
            for symname, lineno, islocal in find_imports(fn,
                                                         opts.verbose,
                                                         opts.ignores):
                print '%s:%d: %s' % (fn, lineno, symname)
                if opts.verbose:
                    for no in xrange(lineno-1, len(lines)):
                        l = lines[no].rstrip()
                        print '   %s' % l
                        if l[-1] != '\\':
                            break
                    print
Example #24
0
def generate(write_loc=None, logger=None):
    if logger is None:
        logger = util.setup_logging('console')
    settings = util.get_settings()
    if write_loc is None:
        write_loc = os.path.join(settings.generated_root, 'conserver.cf')

    logger.info("generate: started")
    try:
        logger.debug("generate: pulling data")
        db = None
        try:
            db = transactional.DBSession(transactional.DB_URI)
        except Exception, ex:
            raise
        results = transactional.get_console_hnodes(db)
        conserves = {}
        for result in results:
            console_props = {}
            console_props['id'] = result['id']
            console_props['fqdn'] = result['handle']
            props = transactional.get_console_props(db, result['id'])
            console_props['host'] = props['host']
            console_props['include'] = props['include']
            console_props['baud'] = props['baud']
            conserves[result['id']] = console_props

        hnodes_with_consoles = {}
        results = transactional.get_hnodes_with_consoles(db)
        for result in results:
            hnodes_with_consoles[result['handle']] = \
                    {'conserver': conserves[result['id']], 
                     'port': result['console_port']}
Example #25
0
def main() -> None:
    """Spawns a new agent process. Use argv if you want to use a different
    group_id (it's `default` by default)
    """
    setup_logging()
    if len(sys.argv) > 1:
        agent_group_id = sys.argv[1]
    else:
        agent_group_id = "default"

    logging.info("Agent [%s] running...", agent_group_id)

    db = Database(config.REDIS_HOST, config.REDIS_PORT)
    agent = Agent(agent_group_id, config.BACKEND, db)

    agent.main_loop()
Example #26
0
def main(argv=None) -> int:
    """read command line arguments, configure application and run command
    specified on command line"""
    args = parse_arguments(argv or sys.argv[1:])
    setup_logging(args)

    show_system_info()

    args = sys.argv[1:]

    if args == []:
        print('no command provided')
        return
    elif args == ['quit']:
        request = {'type': 'quit'}

    elif args == ['version']:
        request = {'type': 'version'}

    elif args == ['apps']:
        request = {'type': 'apps'}

    elif args == ['current']:
        request = {'type': 'current'}

    elif args == ['rules']:
        request = {'type': 'rules'}

    elif args == ['save']:
        request = {'type': 'save'}

    elif args == ['help']:
        print(['quit', 'version', 'apps', 'current', 'rules'])
        sys.exit()

    else:
        raise Exception('command not handled: %s' % args)

    try:
        result = send_request(request)
        handle_result(result)
    except zmq.ZMQError as e:
        log.error(e)
        return
    except KeyboardInterrupt:
        log.info("got keyboard interrupt - exit")
Example #27
0
 def __init__(self, work_dir):
     self.work_dir = work_dir
     self.logfile = os.path.join(self.work_dir, 'voldemort.log')
     util.setup_logging(self.logfile, logging.DEBUG)
     log.info('Voldemort working at %s' %self.work_dir)
     self.config = config.load_config(self.work_dir)
     template.setup_template_dirs(self.config.layout_dirs)
     template.setup_filters()
     # ignore the following directories
     self.ignored_items = [ 
                             self.config.posts_dir,
                             self.config.site_dir,
                             self.logfile,
                             os.path.join(self.work_dir, '.git'),
                             os.path.join(self.work_dir, '.DS_Store')
                          ] + self.config.layout_dirs
     log.debug('The following list of directories/files will be ignored: %s'
                         %', '.join(self.ignored_items))
Example #28
0
def activate(generated_root=None, logger=None):
    if logger is None:
        logger = util.setup_logging('console')
    conf_file = os.path.join(generated_root, 'conserver.cf')
    shutil.copy(conf_file, '/etc/conserver.cf')
    try:
        util.check_call(['/etc/init.d/conserver', 'restart'])
    except (RuntimeError, SystemExit):
        raise
Example #29
0
def job_daemon() -> None:
    setup_logging()
    logging.info("Daemon running...")
    yara_queues = ["queue-yara-high", "queue-yara-medium", "queue-yara-low"]

    for extractor in config.METADATA_EXTRACTORS:
        extractor.set_redis(redis)

    while True:
        queue, data = redis.blpop(
            ["queue-search", "queue-index", "queue-metadata", "queue-commands"]
            + yara_queues
        )

        if queue == "queue-search":
            job_hash = data
            logging.info("New task: {}:{}".format(queue, job_hash))

            try:
                execute_search(job_hash)
            except Exception as e:
                logging.exception("Failed to execute job.")
                redis.hmset(
                    "job:" + job_hash, {"status": "failed", "error": str(e)}
                )

        elif queue in yara_queues:
            job_hash, file_path = data.split(":", 1)
            try:
                execute_yara(job_hash, file_path)
            except Exception as e:
                logging.exception("Failed to execute yara match.")
                redis.hmset(
                    "job:" + job_hash, {"status": "failed", "error": str(e)}
                )

        elif queue == "queue-metadata":
            job_hash, file_path = data.split(":", 1)
            execute_metadata(job_hash, file_path)

        elif queue == "queue-commands":
            logging.info("Running a command: %s", data)
            resp = db.execute_command(data)
            logging.info(resp)
Example #30
0
def initialize_ui_server(config_global, secret_key=None):
    config = config_global["ui_server"]

    # connect to the database
    conn = sqlite3.connect(config['db_file'], isolation_level=None)
    conn.execute(
        "create table if not exists states (id integer primary key autoincrement, state text not null, state_hash blob not null)"
    )
    conn.execute("create index if not exists hash_idx on states(state_hash)")
    conn.execute(
        "create table if not exists names (id integer primary key autoincrement, name text not null, stateid integer not null)"
    )
    conn.execute("create unique index if not exists name_idx on names (name)")

    config["static_path"] = os.path.join(
        os.path.join(*os.path.split(__file__)[:-1]), 'static')
    config["db_connection"] = conn
    config["static_url_prefix"] = os.path.join(config["url_path_prefix"],
                                               "static") + "/"
    config["secret_key"] = secret_key
    config['autoescape'] = None

    # init the application instance
    application = tornado.web.Application([
        (r"/", IndexHandler),
        (r"/token", TokenHandler),
        (r"/shorten", ShortenHandler),
        (r"/expand/(.*)", ExpandHandler),
        (r"/redirect/(.*)", RedirectHandler),
        (r"/named/(.*)", NameHandler),
        (r"/dashboards", DashboardListHandler),
        (r"/static/(.*)", tornado.web.StaticFileHandler, {
            "path": config['static_path']
        }),
    ], **config)

    # start the main server
    http_server = tornado.httpserver.HTTPServer(application)
    http_server.listen(config["port"])

    # setup logging
    util.setup_logging(config_global)

    log.info('Firefly UI server started on port %d' % config["port"])
Example #31
0
 def __init__(self, work_dir, conf):
     self.work_dir = work_dir
     self.config = conf
     self.logfile = os.path.join(self.work_dir, 'voldemort.log')
     self.tag_template = os.path.join(self.work_dir, 'tag.html')
     util.setup_logging(self.logfile, logging.DEBUG)
     log.info('Voldemort working at %s' % self.work_dir)
     template.setup_template_dirs(self.config.layout_dirs)
     template.setup_filters()
     # ignore the following directories
     self.ignored_items = [
         self.config.posts_dir, self.config.site_dir, self.logfile,
         self.tag_template,
         os.path.join(self.work_dir, '.git'),
         os.path.join(self.work_dir, '.DS_Store')
     ] + self.config.layout_dirs
     log.debug(
         'The following list of directories/files will be ignored: %s' %
         ', '.join(self.ignored_items))
def main():
    util.setup_logging()

    util.logger.info('Running s3.list_missing_converted_formats()')
    today_yt = s3.list_missing_converted_formats()

    # Because we will always be missing formats for new youtube videos
    # that are still in the process of being converted, we only
    # complain for videos that have ben missing a converted format for
    # at least 2 days.
    yesterday_fname = '/tmp/last_missing_converted.json'
    if not os.path.exists(yesterday_fname):
        util.logger.warn("Missing %s, will not report any missing converted "
                         "formats today." % yesterday_fname)
        yesterday_yt = {}
    else:
        with open(yesterday_fname) as f:
            yesterday_yt = json.load(f)

    # This limits the output to keys/values in both yesterday *and* today.
    yt = {
        k: set(yesterday_yt[k]) & set(today_yt[k])
        for k in set(yesterday_yt) & set(today_yt)
    }

    yt = sorted("%s: %s" % (y, sorted(yt[y])) for y in yt if yt[y])
    if yt:
        util.logger.error(
            "MISSING CONVERTED FORMATS:\n%s\n\n"
            "To see where these videos are used, run\n"
            "   curl 'http://www.khanacademy.org/api/internal/videos/localized/all?format=pretty' | less"
            % "\n".join(yt))

    # Now write today's output out for tomorrow.  We only do this update
    # once a day, where we round 'day' to 20 hours.  We need to convert
    # our set to a list before we can emit it.
    json_yt = {k: sorted(v) for (k, v) in today_yt.iteritems()}
    if (not os.path.exists(yesterday_fname)
            or os.path.getmtime(yesterday_fname) + 20 * 60 * 60 < time.time()):
        util.logger.info('Saving converted-formats output for use tomorrow')
        with open(yesterday_fname, 'w') as f:
            json.dump(json_yt, f, indent=4, sort_keys=True)
def main():
    """main"""
    parser = get_argument_parser()
    add_logging_argument(parser)
    add_redis_argparse_argument(parser)
    args = parser.parse_args()
    setup_logging(args)

    handled_params = False
    if all((args.purge_scope, args.uri, args.scope)):
        purge_scope(uri=args.uri, scope=args.scope)
        handled_params = True
        
    if args.purge_redis and args.redis_server:
        purge_redis(get_redis_server_from_options(args))
        handled_params = True

    if not handled_params:
        parser.print_help()
        return 1
Example #34
0
def main():
    demos = {
        'simple': simple,
        'append': append,
        'shared_handle': shared_handle,
    }

    if len(sys.argv) != 2 or sys.argv[1] not in demos:
        print(f'Usage: {sys.argv[0]} demo_name')
        print('Demo names:', ', '.join(demos.keys()))
        print('(read demo.py for details)')
        sys.exit(1)

    demo = demos[sys.argv[1]]

    setup_logging()

    temp_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'tmp')
    shutil.rmtree(temp_dir)
    os.mkdir(temp_dir)
    socket_path = temp_dir + '/server.sock'

    with ExitStack() as exit:
        # Start server
        server = SyncServer(socket_path)
        server_thread = Thread(name='Server', target=server.run)
        server_thread.start()
        exit.callback(server_thread.join)
        exit.callback(server.socket_server.shutdown)

        # Start client
        client = SyncClient(socket_path)
        client.start()
        exit.callback(client.stop)

        # Create objects
        mount = Mount('tmp', temp_dir)
        fs = FS(client, mount)

        # Run demo
        demo(fs)
def main():
    """main"""
    parser = get_argument_parser()
    add_logging_argument(parser)
    args = parser.parse_args()
    setup_logging(args)

    if all((args.uri, args.scope, args.table_name, args.key_name)):
        # read or write to a kvt
        value = read_or_write_kvt(
            uri=args.uri,
            scope=args.scope,
            table_name=args.table_name,
            key_name=args.key_name,
            value=args.value,
        )
        print("%r" % value)
        return 0
    else:
        parser.print_help()
        return 1
Example #36
0
def main():
    log = util.setup_logging('convert', volume=11)
    log.info("Converting some files for you")

    flacs=[]
    for i in sys.argv[1:]:
        with open(i) as f:
            flacs.extend([x.strip() for x in f.readlines()])
    jobs = []
    for flac in flacs:
        jobs.append(FlacToMP3Job(flac))
    mp.ThreadPool(jobs).run_jobs()
Example #37
0
def job_daemon() -> None:
    setup_logging()
    logging.info("Daemon running...")

    for extractor in config.METADATA_EXTRACTORS:
        logging.info("Plugin loaded: %s", extractor.__class__.__name__)
        extractor.set_redis(redis)

    logging.info("Daemon loaded, entering the main loop...")

    while True:
        if try_to_do_task():
            continue

        if try_to_do_search():
            continue

        if redis.set("gc-lock", "locked", ex=60, nx=True):
            collect_expired_jobs()

        time.sleep(5)
Example #38
0
    def __init__(self):
        # Make app
        self.app = wx.App(False) 

        # Generate view    
        subs_list = list_subjects()
        self.view = View(None, subs=subs_list)
        if False:#not config.TESTING_MODE:
            sys.stdout=self.view.redir_out
            sys.stderr=self.view.redir_err
        setup_logging(outwin=self.view.redir_out,errwin=self.view.redir_err)

        self.tcpip = TCPIP(config.scanimage_tcpip_address)

        # Button bindings
        self.view.start_button.Bind(wx.EVT_BUTTON, self.evt_onoff)
        self.view.prepare_button.Bind(wx.EVT_BUTTON, self.evt_prepare)
        self.view.pause_button.Bind(wx.EVT_BUTTON, self.evt_pause)
        self.view.tcpip_button.Bind(wx.EVT_BUTTON, self.evt_tcpip)
        self.view.Bind(wx.EVT_CLOSE, self.evt_close)
        self.view.lighton_but.Bind(wx.EVT_BUTTON, lambda evt, temp=1: self.set_light(evt, temp))
        self.view.lightoff_but.Bind(wx.EVT_BUTTON, lambda evt, temp=0: self.set_light(evt, temp))
        self.view.puff_but.Bind(wx.EVT_BUTTON, self.evt_puff)
        self.view.deliver_but.Bind(wx.EVT_BUTTON, self.evt_deliver)
        self.view.roi_but.Bind(wx.EVT_BUTTON, self.evt_roi)
        self.view.add_sub_button.Bind(wx.EVT_BUTTON, self.evt_addsub)
        self.view.resetcam_button.Bind(wx.EVT_BUTTON, self.evt_resetcam)
        self.view.usrinput_box.Bind(wx.EVT_TEXT_ENTER, self.update_usrinput)
        self.view.slider.Bind(wx.EVT_COMMAND_SCROLL_THUMBTRACK, self.on_slide)
        self.view.ax_interactive.figure.canvas.mpl_connect('button_press_event', self.evt_interactive_click)

        # Runtime
        self.selection_pts = []
        self.selecting = False
        self.update_state(self.STATE_NULL)
        self.n_updates = 0

        # Run
        #self.view.Show()
        self.app.MainLoop()
def main():
    util.setup_logging()

    util.logger.info('Running gcs.list_missing_converted_formats()')
    today_yt = gcs.list_missing_converted_formats()

    # Because we will always be missing formats for new youtube videos
    # that are still in the process of being converted, we only
    # complain for videos that have ben missing a converted format for
    # at least 2 days.
    yesterday_fname = '/tmp/last_missing_converted.json'
    if not os.path.exists(yesterday_fname):
        util.logger.warn("Missing %s, will not report any missing converted "
                         "formats today." % yesterday_fname)
        yesterday_yt = {}
    else:
        with open(yesterday_fname) as f:
            yesterday_yt = json.load(f)

    # This limits the output to keys/values in both yesterday *and* today.
    yt = {k: set(yesterday_yt[k]) & set(today_yt[k])
          for k in set(yesterday_yt) & set(today_yt)}

    yt = sorted("%s: %s" % (y, sorted(yt[y])) for y in yt if yt[y])
    if yt:
        util.logger.error("MISSING CONVERTED FORMATS:\n%s\n\n"
                          "To see where these videos are used, run\n"
                          "   curl 'http://www.khanacademy.org/api/internal/videos/localized/all?format=pretty' | less"
                          % "\n".join(yt))

    # Now write today's output out for tomorrow.  We only do this update
    # once a day, where we round 'day' to 20 hours.  We need to convert
    # our set to a list before we can emit it.
    json_yt = {k: sorted(v) for (k, v) in today_yt.iteritems()}
    if (not os.path.exists(yesterday_fname) or
            os.path.getmtime(yesterday_fname) + 20 * 60 * 60 < time.time()):
        util.logger.info('Saving converted-formats output for use tomorrow')
        with open(yesterday_fname, 'w') as f:
            json.dump(json_yt, f, indent=4, sort_keys=True)
def main():
    """main"""
    global logger
    parser = get_argument_parser()
    add_logging_argument(parser)
    add_redis_argparse_argument(parser)
    args = parser.parse_args()
    setup_logging(args)
    if args.sorting_center_code:
        logger = logging.getLogger("Sort Center %s" % args.sorting_center_code)
    else:
        logger = logging.getLogger()

    if all((args.sorting_center_code, args.scope, args.uri, args.run)):
        # run the sorting center process
        redis = get_redis_server_from_options(args)
        return process_sorting_center_events(
            uri=args.uri,
            scope=args.scope,
            sorting_center_code=args.sorting_center_code,
            redis=redis,
            maximum_event_count=args.maximum_event_count,
            wait_for_events=args.wait_for_events,
            mark_event_index_frequency=args.mark_event_index_frequency,
            report_lost_packages=args.report_lost_packages,
        )
    elif all((args.sorting_center_code, args.scope, args.uri, args.package_id)):
        # test retrieving events for a single package
        for event in extract_sorting_center_events_by_package_id(
            uri=args.uri,
            scope=args.scope,
            sorting_center_code=args.sorting_center_code,
            package_id=args.package_id,
        ):
            print("%r" % event)

    else:
        parser.print_help()
        return 1
Example #41
0
def main():
    # logger = logging.getLogger()
    util.setup_logging()
    # logger = logging.getLogger(__name__)
    # send some messages
    # logger.debug('debug message')
    # logger.info('info message')
    logging.debug('debug message')
    logging.info('info message')
    logging.warning('warn message')
    logging.error('error message')
    logging.critical('critical message')

    # inner fnc
    inner_fnc()

    # run func1
    lib1.func1()

    # run func1
    my_module.foo()
    bar = my_module.Bar()
    bar.bar()
Example #42
0
def initialize_data_server(config_global, secret_key=None):
    config = config_global["data_server"]

    # connect to the database to store annotation in
    # I kind of hate having the schema for this DB here, but I'm going to leave it for to retain parity with ui_server.py
    db_conn = sqlite3.connect(config['db_file'], isolation_level=None)
    db_conn.execute("""
        create table if not exists annotations (
            id integer primary key autoincrement,
            type integer not null,
            description text not null,
            time float not null
        )""")
    db_conn.execute("create index if not exists time on annotations(time)")

    config['db'] = db_conn
    config["secret_key"] = secret_key

    # init the application instance
    application = tornado.web.Application([
        (r"/data", DataHandler),
        (r"/legend", GraphLegendHandler),
        (r"/title", GraphTitleHandler),
        (r"/ping", PingHandler),
        (r"/annotations", AnnotationsHandler),
        (r"/add_annotation", AddAnnotationHandler),
        (r"/sources", SourcesHandler)], **config)

    # start the main server
    http_server = tornado.httpserver.HTTPServer(application)
    http_server.bind(config["port"])
    http_server.start(0)

    # setup logging
    util.setup_logging(config_global)

    log.info('Firefly data server started on port %d' % config["port"])
Example #43
0
def initialize_ui_server(config_global, secret_key=None):
    config = config_global["ui_server"]

    # connect to the database
    conn = sqlite3.connect(config['db_file'], isolation_level=None)
    conn.execute("create table if not exists states (id integer primary key autoincrement, state text not null, state_hash blob not null)")
    conn.execute("create index if not exists hash_idx on states(state_hash)")
    conn.execute("create table if not exists names (id integer primary key autoincrement, name text not null, stateid integer not null)")
    conn.execute("create unique index if not exists name_idx on names (name)")

    config["static_path"] = os.path.join(os.path.join(*os.path.split(__file__)[:-1]), 'static')
    config["db_connection"] = conn
    config["static_url_prefix"] = os.path.join(config["url_path_prefix"], "static") + "/"
    config["secret_key"] = secret_key
    config['autoescape'] = None

    # init the application instance
    application = tornado.web.Application([
        (r"/", IndexHandler),
        (r"/token", TokenHandler),
        (r"/shorten", ShortenHandler),
        (r"/expand/(.*)", ExpandHandler),
        (r"/redirect/(.*)", RedirectHandler),
        (r"/named/(.*)", NameHandler),
        (r"/dashboards", DashboardListHandler),
        (r"/static/(.*)", tornado.web.StaticFileHandler, {"path": config['static_path']}),
    ], **config)

    # start the main server
    http_server = tornado.httpserver.HTTPServer(application)
    http_server.listen(config["port"])

    # setup logging
    util.setup_logging(config_global)

    log.info('Firefly UI server started on port %d' % config["port"])
Example #44
0
def main():
    # http://docs.python.org/2/library/argparse.html
    global logger
    parser = argparse.ArgumentParser(description='Create a server certificate using the cacerts db.')
    parser.add_argument('--loglevel', help='Specify the default logging level (optional).', choices=['debug', 'info', 'warning', 'error', 'DEBUG', 'INFO', 'WARNING', 'ERROR'], default='info')
    parser.add_argument('--logfile', help='Specify logfile name.', default='/tmp/create_servercert.log')
    parser.add_argument('--cacerts_dir', help='alternate cacerts config dir.', default='../cacerts')
    parser.add_argument('--domain', help='The domain name.', default='forj.io')
    parser.add_argument('--site', help='The name of the site.', default='')
    parser.add_argument('--password', help='Specify a password (optional).', default='changeme')
    parser.add_argument('--subject', help='Specify the certificate subject info.', default='/C=US/ST=California/L=Roseville/O=HP/OU=PDE')
    parser.add_argument('--altnames', help='Specify alternative names like "/CN=server1/CN=server2"', default='')
    args = parser.parse_args()

    util.setup_logging(args.logfile, args.loglevel)
    cacerts_dir = os.path.abspath(args.cacerts_dir)
    ca2013_dir = os.path.abspath(os.path.join(cacerts_dir, "ca2013"))
    site_name = args.site + "." + args.domain
    subject = args.subject + "/CN="+site_name

    util.validate_directory(cacerts_dir)
    util.validate_directory(ca2013_dir)
    util.validate_directory(ca2013_dir+"/private")
    util.validate_directory(ca2013_dir+"/certs")
    util.validate_directory(ca2013_dir+"/crl")
    util.validate_directory(ca2013_dir+"/newcerts")

    util.validate_file(cacerts_dir+"/openssl.cnf")
    util.validate_file(cacerts_dir+"/serial")
    util.validate_file(ca2013_dir+"/serial")

    # Creating root cert
    # Running at cacerts_dir
    copyfile("/dev/null", cacerts_dir+"/index.txt")
    print "(1)"
    util.openssl_cmd("genrsa -passout pass:"******" -des3 -out private/cakey.key 4096", "", cacerts_dir, "")
    copyfile(cacerts_dir+"/private/cakey.key", cacerts_dir+"/private/cakey.pem")
    print "(2)"
    util.openssl_cmd("req -passin pass:"******" -subj " + subject + " -new -x509 -nodes -sha1 -days 1825 -key private/cakey.key -out cacert.pem -config ./openssl.cnf", "", cacerts_dir, "")

    # Creating intermediate cert
    # Running at cacerts_dir/ca2013
    copyfile("/dev/null", ca2013_dir+"/index.txt")
    copyfile(cacerts_dir+"/openssl.cnf", ca2013_dir+"/openssl.cnf")
    print "(3)"
    util.openssl_cmd("genrsa -passout pass:"******" -des3 -out private/cakey.pem 4096", "", ca2013_dir, "")
    print "(4)"
    util.openssl_cmd("req -passin pass:"******" -subj " + subject + " -new -sha1 -key private/cakey.pem -out ca2013.csr -config ./openssl.cnf", "", ca2013_dir, "")
    print "(5)"
    util.openssl_cmd("ca -batch -extensions v3_ca -days 365 -out cacert.pem -in ca2013.csr -config openssl.cnf -key "+args.password+" -keyfile ../private/cakey.key -cert ../cacert.pem", "", ca2013_dir, "")
    copyfile(ca2013_dir+"/cacert.pem", ca2013_dir+"/chain.crt")
    file2 = open(cacerts_dir+"/cacert.pem", "rb")
    with open(ca2013_dir+"/chain.crt", "a") as myfile:
        myfile.write(file2.read())

    # Root and Intermediate certificates
    copyfile(cacerts_dir+"/cacert.pem", cacerts_dir+"/root.cer")
    copyfile(ca2013_dir+"/cacert.pem", cacerts_dir+"/intermediate.cer")

    # Permissions
    os.chmod(cacerts_dir+"/cacert.pem", 0755)
    os.chmod(cacerts_dir+"/intermediate.cer", 0755)
    os.chmod(cacerts_dir+"/root.cer", 0755)
    os.chmod(cacerts_dir+"/private/cakey.pem", 0400)
    os.chmod(cacerts_dir+"/ca2013/private/cakey.pem", 0755)
    os.chmod(cacerts_dir+"/private/cakey.key", 0755)
    os.chmod(cacerts_dir+"/ca2013/ca2013.csr", 0755)
    os.chmod(cacerts_dir+"/ca2013/cacert.pem", 0755)
    os.chmod(cacerts_dir+"/ca2013/chain.crt", 0755)
    os.chmod(cacerts_dir+"/index.txt", 0765)
    os.chmod(cacerts_dir+"/ca2013/index.txt", 0765)

    # TODO: create a recursive chown def
    uid = getpwnam('puppet').pw_uid
    gid = getpwnam('puppet').pw_gid
    os.chown(cacerts_dir+"/cacert.pem", uid, gid)
    os.chown(cacerts_dir+"/intermediate.cer", uid, gid)
    os.chown(cacerts_dir+"/root.cer", uid, gid)
    os.chown(cacerts_dir+"/private/cakey.pem", uid, gid)
    os.chown(cacerts_dir+"/ca2013/private/cakey.pem", uid, gid)
    os.chown(cacerts_dir+"/private/cakey.key", uid, gid)
    os.chown(cacerts_dir+"/ca2013/ca2013.csr", uid, gid)
    os.chown(cacerts_dir+"/ca2013/cacert.pem", uid, gid)
    os.chown(cacerts_dir+"/ca2013/chain.crt", uid, gid)
    os.chown(cacerts_dir+"/index.txt", uid, gid)
    os.chown(cacerts_dir+"/ca2013/index.txt", uid, gid)
Example #45
0
 def __init__(self):
     super(LoggingNetwork, self).__init__()
     self._logger = setup_logging(name='network')
Example #46
0
import fnmatch
import logging
import os
import re
import shutil
import signal
import subprocess
import sys
import time
import tempfile

import pydoop.hdfs as phdfs

from util import chdir, get_exec, mk_hdfs_temp_dir, setup_logging

logger = setup_logging()

GlobalConf = {
        'job_manager_mem'  : 10000,
        #'task_manager_mem' : 160000,
        #'slots'            : 16,
        'task_manager_mem' : 80000,
        'slots'            : 8,
        'props_filename'   : 'bclconverter.properties',
        'flinkpar'         : 1,
        'jnum'             : 2,
        'tasksPerNode'     : 16,
        'session_wait'     : 45,
        'seqal_nthreads'   : 8,
        'seqal_yarn_cores' : 2,
        'reference_archive': 'hs37d5.fasta.tar',
Example #47
0
def gendeps():
    import optparse

    parser = optparse.OptionParser(__doc__.strip())

    parser.add_option(
        "-i",
        "--internal",
        "--internal-only",
        default=0,
        action="count",
        help="Filter out dependencies that are outside of the "
        "roots of the input files. If internal is used twice, we "
        "filter down further the dependencies to the set of "
        "files that were processed only, not just to the files "
        "that live in the same roots.",
    )

    parser.add_option(
        "-e",
        "--external",
        "--external-only",
        action="store_true",
        help="Filter out dependencies to modules within the "
        "roots of the input files. This can be used to find out "
        "what external modules a package depends on, for example. "
        "Note that it does not make sense to use --internal and "
        "--external at the same time, as --internal will reject "
        "all the dependencies --external allows would output.",
    )

    parser.add_option(
        "-I",
        "--ignore",
        dest="ignores",
        action="append",
        default=def_ignores,
        help="Add the given directory name to the list to be ignored.",
    )

    parser.add_option("-v", "--verbose", action="count", default=0, help="Output more debugging information")
    parser.add_option("-q", "--quiet", action="count", default=0, help="Output less debugging information")

    parser.add_option(
        "-f",
        "--follow",
        "-r",
        "--recursive",
        action="store_true",
        help="Follow the modules depended upon and trace their dependencies. "
        "WARNING: This can be slow.  Use --internal to limit the scope.",
    )

    parser.add_option(
        "--print-roots",
        action="store_true",
        help="Only print the package roots corresponding to the input files."
        "This is mostly used for testing and troubleshooting.",
    )

    parser.add_option(
        "-d",
        "--disable-pragmas",
        action="store_false",
        dest="do_pragmas",
        default=True,
        help="Disable processing of pragma directives as strings after imports.",
    )

    parser.add_option(
        "-u", "--ignore-unused", action="store_true", help="Automatically ignore unused imports. (See sfood-checker.)"
    )

    opts, args = parser.parse_args()
    opts.verbose -= opts.quiet
    setup_logging(opts.verbose)

    if not args:
        logging.warning("Searching for files from current directory.")
        args = ["."]

    info = logging.info

    if opts.internal and opts.external:
        parser.error("Using --internal and --external at the same time does not make sense.")

    if opts.print_roots:
        inroots = find_roots(args, opts.ignores)
        for dn in sorted(inroots):
            print dn
        return

    info("")
    info("Input paths:")
    for arg in args:
        fn = realpath(arg)
        info("  %s" % fn)
        if not exists(fn):
            parser.error("Filename '%s' does not exist." % fn)

    # Get the list of package roots for our input files and prepend them to the
    # module search path to insure localized imports.
    inroots = find_roots(args, opts.ignores)
    if (opts.internal or opts.external) and not inroots:
        parser.error(
            "No package roots found from the given files or directories. "
            "Using --internal with these roots will generate no dependencies."
        )
    info("")
    info("Roots of the input files:")
    for root in inroots:
        info("  %s" % root)

    info("")
    info("Using the following import path to search for modules:")
    sys.path = inroots + sys.path
    for dn in sys.path:
        info("  %s" % dn)
    inroots = frozenset(inroots)

    # Find all the dependencies.
    info("")
    info("Processing files:")
    info("")
    allfiles = defaultdict(set)
    allerrors = []
    processed_files = set()

    fiter = iter_pyfiles(args, opts.ignores, False)
    while 1:
        newfiles = set()
        for fn in fiter:
            if fn in processed_files:
                continue  # Make sure we process each file only once.

            info("  %s" % fn)
            processed_files.add(fn)

            if is_python(fn):
                files, errors = find_dependencies(fn, opts.verbose, opts.do_pragmas, opts.ignore_unused)
                allerrors.extend(errors)
            else:
                # If the file is not a source file, we don't know how to get the
                # dependencies of that (without importing, which we want to
                # avoid).
                files = []

            # When packages are the source of dependencies, remove the __init__
            # file.  This is important because the targets also do not include the
            # __init__ (i.e. when "from <package> import <subpackage>" is seen).
            if basename(fn) == "__init__.py":
                fn = dirname(fn)

            # Make sure all the files at least appear in the output, even if it has
            # no dependency.
            from_ = relfile(fn, opts.ignores)
            if from_ is None:
                continue
            infrom = from_[0] in inroots
            if opts.internal and not infrom:
                continue
            if not opts.external:
                allfiles[from_].add((None, None))

            # Add the dependencies.
            for dfn in files:
                xfn = dfn
                if basename(xfn) == "__init__.py":
                    xfn = dirname(xfn)

                to_ = relfile(xfn, opts.ignores)
                into = to_[0] in inroots
                if (opts.internal and not into) or (opts.external and into):
                    continue
                allfiles[from_].add(to_)
                newfiles.add(dfn)

        if not (opts.follow and newfiles):
            break
        else:
            fiter = iter(sorted(newfiles))

    # If internal is used twice, we filter down further the dependencies to the
    # set of files that were processed only, not just to the files that live in
    # the same roots.
    if opts.internal >= 2:
        filtfiles = type(allfiles)()
        for from_, tolist in allfiles.iteritems():
            filtfiles[from_] = set(x for x in tolist if x in allfiles or x == (None, None))
        allfiles = filtfiles

    info("")
    info("SUMMARY")
    info("=======")

    # Output a list of the symbols that could not be imported as modules.
    reports = [
        ("Modules that were ignored because not used:", ERROR_UNUSED, logging.info),
        ("Modules that could not be imported:", ERROR_IMPORT, logging.warning),
    ]
    if opts.verbose >= 2:
        reports.append(("Symbols that could not be imported as modules:", ERROR_SYMBOL, logging.debug))

    for msg, errtype, efun in reports:
        names = set(name for (err, name) in allerrors if err is errtype)
        if names:
            efun("")
            efun(msg)
            for name in sorted(names):
                efun("  %s" % name)

    # Output the list of roots found.
    info("")
    info("Found roots:")

    found_roots = set()
    for key, files in allfiles.iteritems():
        found_roots.add(key[0])
        found_roots.update(map(itemgetter(0), files))
    if None in found_roots:
        found_roots.remove(None)
    for root in sorted(found_roots):
        info("  %s" % root)

    # Output the dependencies.
    info("")
    output_depends(allfiles)
Example #48
0
            },
            "ui_server": {
                "port": options.uiserver_port,
                "data_servers": config.get("ui_server", {}).get("data_servers", []),
                "db_file": options.uiserver_db_file,
                "url_path_prefix": options.url_path_prefix
            }
        }

    config["testing"] = options.testing
    config["data_server"]["data_sources_by_key"] = {}

    # setup logging in a configurable manner
    if options.loggingconf:
        config["loggingconf"] = options.loggingconf
    util.setup_logging(config)

    if options.config_file is None:
        config["config_file"] = "firefly.yaml"
    else:
        config["config_file"] = options.config_file

    if config["testing"]:
        config['data_server']['data_sources'].append('data_sources.test_data.TestData')

        # if we're testing and not behind a reverse proxy (apache), make the base
        # url / instead of /firefly to compensate for apache url rewriting
        config['ui_server']['url_path_prefix'] = '/'

        # Turn on automatic code reloading
        config["data_server"]['debug'] = True
Example #49
0
from client import Client


# print sys.argv
if len(sys.argv) >= 2:
	phone_number = sys.argv[1]
	timeout = int(sys.argv[2])
	debug = False
	if len(sys.argv) > 3:
		debug = sys.argv[3] == "True"



	env = os.environ['ENV']		
	
	logger = setup_logging(phone_number, env)
	logger.info("Ready to go with %s - Debug %s" %(phone_number, debug))

	rollbar.init(os.environ['ROLLBAR_KEY'], env)

	client = Client(phone_number, logger)

	if client.account.setup == True:
		if debug == False:
			client.connect()

		poll = True
		start = time.time()
		while (poll):
			now = time.time()
			runtime = int(now - start)			
Example #50
0
#!/usr/bin/env python

import logging
import sys
import numpy as np
import os
import tensorflow as tf
from matplotlib import pyplot as plt
from os.path import join as pj
from util import setup_logging

from conv_model import ConvModel
from env import current as env


setup_logging(logging.getLogger())


data_source = []
for f in sorted(os.listdir(env.dataset())):
    if f.endswith(".wav"):
        data_source.append(env.dataset(f))



cm = ConvModel(
	batch_size = 30000,
	filter_len = 150,
	filters_num = 100,
	target_sr = 3000,
	gamma = 1e-03,
Example #51
0
#!/usr/bin/python
import logging

import sys
sys.path.append('lib')

from stock import Stock
from util import setup_logging

setup_logging()

stock = Stock()

report = stock.build_report()

for stn in report.stns():
    logging.info("Station: %s", stn.name)
    for loc in stn.locs():
        logging.info("  Location: %s", loc.name)
        for item in loc.items():
            output = []
            output.append("    %s: %d" % (item.name, item.have))
            if item.target > 0:
                output.append("/%d" % item.target)
                output.append(" (%.1f%%)" % item.percent)
            logging.info("".join(output))
Example #52
0
from util import error_message, setup_logging

from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker

from models import Account
from client import Client

if len(sys.argv) >= 2:
	
	timeout = int(sys.argv[1])
	debug = sys.argv[2] == "True"

	env = os.environ['ENV']
	
	logger = setup_logging("SERVICE", env)
	rollbar.init(os.environ['ROLLBAR_KEY'], env)

	try:	
		logger.info("Starting service Timeout: %s - Debug %s" %(timeout, debug))

		url = os.environ['SQLALCHEMY_DATABASE_URI']
		_db = create_engine(url, echo=False, pool_size=15, pool_timeout=600,pool_recycle=600)
		_session = sessionmaker(bind=_db)

		accounts = _session().query(Account).filter_by(setup=True).all()
		logger.info("Going to load %s accounts" %len(accounts))

		for account in accounts:
			# create a logger for the client
			client_logger = setup_logging(account.phone_number, env)
Example #53
0
def main():
    # http://docs.python.org/2/library/argparse.html
    global logger
    parser = argparse.ArgumentParser(description='Create a server certificate using the cacerts db.')
    parser.add_argument('--loglevel', help='Specify the default logging level (optional).', choices=['debug', 'info', 'warning', 'error', 'DEBUG', 'INFO', 'WARNING', 'ERROR'], default='info')
    parser.add_argument('--logfile', help='Specify logfile name.', default='/tmp/create_servercert.log')
    parser.add_argument('--cacerts_dir', help='alternate cacerts config dir.', default='../cacerts')
    parser.add_argument('--domain', help='The domain name.', default='forj.io')
    parser.add_argument('--site', help='The name of the site.', default='')
    parser.add_argument('--password', help='Specify a password (optional).', default='changeme')
    parser.add_argument('--subject', help='Specify the certificate subject info.', default='/C=US/ST=California/L=Roseville/O=HP/OU=PDE')
    parser.add_argument('--altnames', help='Specify alternative names like "/CN=server1/CN=server2"', default='')
    args = parser.parse_args()

    util.setup_logging(args.logfile, args.loglevel)
    util.banner_start()
    util.logger.debug("parsed arguments")
    util.logger.info("got folder " + args.cacerts_dir)
    cacerts_dir = os.path.abspath(args.cacerts_dir)

    util.validate_directory(cacerts_dir)
    cainter_dir = os.path.abspath(os.path.join(cacerts_dir, "ca2013"))
    util.validate_directory(cainter_dir)
    cakey_pem = os.path.abspath(os.path.join(cacerts_dir, "private/cakey.pem"))
    util.validate_file(cakey_pem)

    if not args.site:
        util.logger.error("found cakey_pem")
        sys.exit(1)

    source_dir = cainter_dir
    destin_dir = os.path.join(cainter_dir, 'certs')

    # http://docs.python.org/2/library/subprocess.html#replacing-older-functions-with-the-subprocess-module
    util.openssl_cmd("test", args.site, cainter_dir, 'version')

    # pushd /cacerts/ca2013
    #
    # [ -f ~/.rnd ] && sudo rm -f ~/.rnd
    # openssl genrsa -passout pass:xxxxxxxx -des3 -out $_SITE.key 2048 -config ./openssl.cnf
    # openssl req -passin pass:xxxxxxxx -new -key $_SITE.key -out $_SITE.csr -subj "/C=US/ST=California/L=Roseville/O=HP/OU=PDE/CN=$_SITE.forj.io" -config ./openssl.cnf
    # openssl ca -passin pass:xxxxxxxx -batch -config openssl.cnf -policy policy_anything -out $_SITE.crt -infiles $_SITE.csr
    subject = args.subject + "/CN=" + args.site + "." + args.domain + args.altnames
    util.openssl_cmd("genrsa", args.site + '.' + args.domain, cainter_dir, "-passout pass:"******" -des3 2048 -config ./openssl.cnf")
    util.openssl_cmd("req", args.site + '.' + args.domain, cainter_dir, "-passin pass:"******" -new -subj " + subject + " -config ./openssl.cnf")
    # -keyfile and -cert makes the linkage to intermediate certificate
    util.openssl_cmd("ca", args.site + '.' + args.domain, cainter_dir, "-passin pass:"******" -batch -config ./openssl.cnf -policy policy_anything -keyfile ./private/cakey.pem -cert ./cacert.pem")

    # cd cainter_dir
    # mv $_SITE.key $_SITE.csr $_SITE.crt certs
    extensions = ['.key', '.csr', '.crt']

    for ext in extensions:
        util.logger.debug("relocating " + args.site + ext)
        os.rename(os.path.join(source_dir, args.site + '.' + args.domain + ext),
                  os.path.join(destin_dir, args.site + '.' + args.domain + ext))

    # this is an ssl cert, remove the ssl password on the key....
    #  openssl rsa -passin pass:xxxxxxxx -in $_SITE.key -out $_FQDN.key
    key_in = os.path.join(destin_dir, args.site + '.' + args.domain + '.key')
    key_out = os.path.join(destin_dir, args.site + '.' + args.domain + '.key2')
    util.openssl_cmd("rsa", args.site, cainter_dir, "-passin pass:"******" -in " + key_in + " -out " + key_out)
    util.logger.debug("unlink : " + key_in)
    os.unlink(key_in)
    util.logger.debug("rename : " + key_out + " -> " + key_in)
    os.rename(key_out, key_in)
Example #54
0
import logging
import my_module
import util
import os

util.setup_logging(
    default_path=os.path.join(
        os.path.dirname(__file__), '..', 'logging.json')
)


def main():
    logging.info("ok")
    logging.error("uh oh")
    c = my_module.ClassA('test')
    c.my_method()


if __name__ == "__main__":
    # execute only if run as a script
    main()