Exemplo n.º 1
0
def main():
    """
    main entry point for script
    """

    comm = MPI.COMM_WORLD

    opts = getoptions(True)

    opts['threads'] = comm.Get_size()

    logout = "mpiOutput-{}.log".format(comm.Get_rank())

    # For MPI jobs, do something sane with logging.
    setuplogger(logging.ERROR, logout, opts['log'])

    config = Config()

    if comm.Get_size() < 2:
        logging.error("Must run MPI job with at least 2 processes")
        sys.exit(1)

    myhost = MPI.Get_processor_name()
    logging.info("Nodename: %s", myhost)

    processjobs(config, opts, comm.Get_rank(), comm)

    logging.info("Rank: %s FINISHED", comm.Get_rank())
Exemplo n.º 2
0
def runindexing():
    """ main script entry point """
    opts = getoptions()
    keep_csv = opts["keep_csv"]

    setuplogger(opts['log'], opts['debugfile'], filelevel=logging.INFO)

    config = Config(opts['config'])

    logging.info("archive indexer starting")

    pool = None
    if opts['num_threads'] > 1:
        logging.debug("Using %s processes", opts['num_threads'])
        pool = Pool(opts['num_threads'])

    for resourcename, resource in config.resourceconfigs():

        if opts['resource'] in (None, resourcename,
                                str(resource['resource_id'])):
            if not resource.get('pcp_log_dir'):
                continue

            acache = PcpArchiveProcessor(resource)
            afind = PcpArchiveFinder(opts['mindate'], opts['maxdate'],
                                     opts['all'])
            if pool is not None:
                index_resource_multiprocessing(config, resource, acache, afind,
                                               pool, keep_csv)
            else:
                fast_index_allowed = bool(resource.get("fast_index", False))
                with LoadFileIndexUpdater(config, resource, keep_csv) as index:
                    for archivefile, fast_index, hostname in afind.find(
                            resource['pcp_log_dir']):
                        start_time = time.time()
                        data = acache.processarchive(
                            archivefile, fast_index and fast_index_allowed,
                            hostname)
                        parse_end = time.time()
                        if data is not None:
                            index.insert(*data)
                        db_end = time.time()
                        logging.debug(
                            "processed archive %s (fileio %s, dbacins %s)",
                            archivefile, parse_end - start_time,
                            db_end - parse_end)

    logging.info("archive indexer complete")
    if pool is not None:
        pool.close()
        pool.join()
Exemplo n.º 3
0
def main():
    """
    main entry point for script
    """
    opts = getoptions(False)

    setuplogger(opts['log'])

    config = Config()

    threads = opts['threads']

    process_pool = mp.Pool(threads) if threads > 1 else None
    processjobs(config, opts, process_pool)

    if process_pool is not None:
        # wait for all processes to finish
        process_pool.close()
        process_pool.join()
Exemplo n.º 4
0
def runindexing():
    """ main script entry point """
    opts = getoptions()

    setuplogger(opts['log'], opts['debugfile'], logging.DEBUG)

    config = Config(opts['config'])

    logging.info("archive indexer starting")

    for resourcename, resource in config.resourceconfigs():

        if opts['resource'] in (None, resourcename, str(resource['resource_id'])):

            acache = PcpArchiveProcessor(config, resource)
            afind = PcpArchiveFinder(opts['mindate'], opts['maxdate'])

            for archivefile in afind.find(resource['pcp_log_dir']):
                acache.processarchive(archivefile)

            acache.close()

    logging.info("archive indexer complete")
Exemplo n.º 5
0
def runindexing():
    """ main script entry point """
    opts = getoptions()

    setuplogger(opts['log'], opts['debugfile'], logging.DEBUG)

    config = Config(opts['config'])

    logging.info("archive indexer starting")

    for resourcename, resource in config.resourceconfigs():

        if opts['resource'] in (None, resourcename, str(resource['resource_id'])):

            acache = PcpArchiveProcessor(config, resource)
            afind = PcpArchiveFinder(opts['mindate'], opts['maxdate'])

            for archivefile in afind.find(resource['pcp_log_dir']):
                acache.processarchive(archivefile)

            acache.close()

    logging.info("archive indexer complete")
Exemplo n.º 6
0
def main():
    """
    main entry point for script
    """
    opts = getoptions(False)

    setuplogger(opts['log'])

    config = Config()

    threads = opts['threads']

    if threads <= 1:
        processjobs(config, opts, None)
        return
    else:
        proclist = []
        for procid in xrange(threads):
            p = Process(target=processjobs, args=(config, opts, procid))
            p.start()
            proclist.append(p)

        for proc in proclist:
            p.join()