Ejemplo n.º 1
0
def check_results(data_in, data_out, logger):
    local_wc = pts.LocalWordCount(data_in)
    logger.info("checking results")
    with open(data_out, 'r') as f:
        res = local_wc.check(f.read())
    if res.startswith("ERROR"):  # FIXME: change local_wc to raise an exception
        logger.error(res)
        raise RuntimeError(res)
    logger.info(res)
Ejemplo n.º 2
0
def main(argv):
    logger = logging.getLogger("main")
    logger.setLevel(logging.INFO)
    input_dir = argv[1]
    output_dir = argv[2]
    logger.info("checking results")
    lwc = pts.LocalWordCount(input_dir)
    measured_res = get_res(output_dir)
    expected_res = lwc.expected_output
    logger.info(check(measured_res, expected_res))
Ejemplo n.º 3
0
def main():
    parser = make_parser()
    opt, _ = parser.parse_args()
    logger = logging.getLogger("main")
    logger.setLevel(logging.INFO)
    logger.info("running word count")
    wc_output = run_wc(opt)
    logger.info("running filter")
    filter_output = run_filter(opt, wc_output)
    logger.info("checking results")
    res = hadut.collect_output(filter_output)
    local_wc = pts.LocalWordCount(opt.input, min_occurrence=opt.threshold)
    logger.info(local_wc.check(res))
Ejemplo n.º 4
0
def main(argv):
    parser = make_parser()
    args = parser.parse_args(argv)
    update_conf(args)
    logger = logging.getLogger("main")
    logger.setLevel(logging.INFO)
    runner = hadut.PipesRunner(prefix=PREFIX, logger=logger)
    with open(args.pipes_exe) as f:
        pipes_code = pts.add_sys_path(f.read())
    runner.set_input(args.local_input, put=True)
    runner.set_exe(pipes_code)
    runner.run(properties=CONF, hadoop_conf_dir=HADOOP_CONF_DIR, logger=logger)
    res = runner.collect_output()
    runner.clean()
    local_wc = pts.LocalWordCount(args.local_input)
    logging.info(local_wc.check(res))
Ejemplo n.º 5
0
def check_wordcount(mr_out_dir, stop_words=None):
    output = hadut.collect_output(mr_out_dir)
    local_wc = pts.LocalWordCount(DEFAULT_INPUT_DIR, stop_words=stop_words)
    res = local_wc.check(output)
    return res.startswith("OK")  # FIXME: change local_wc to raise an exception
Ejemplo n.º 6
0
def main(argv):

    logger = logging.getLogger("main")
    logger.setLevel(logging.DEBUG)

    with Timer() as total_time:

        parser = make_parser()
        args = parser.parse_args(argv)
        if args.dataset:
            print args.dataset
            create_dataset(logger, args.dataset)

        if args.script:
            piped_code_file = args.script
        else:
            piped_code_file = DEFAULT_SCRIPT

        if not os.path.exists(piped_code_file):
            raise IOError("script {0} not found !!!".format(piped_code_file))

        with open(piped_code_file) as f:
            pipes_code = pts.add_sys_path(f.read())

        dataset = [d for d in os.listdir("dataset") if d.endswith("MB")]
        dataset.sort(cmp=lambda x, y: cmp(int(x.replace("MB", "")),
                                          int(y.replace("MB", ""))))

        logger.info(" Uploading dataset: { %s }", ', '.join(dataset))
        if not hadut.path_exists(os.path.join(DATASET_DIR)):
            logger.info("  dataset folder created")
            hdfs.mkdir(DATASET_DIR)

        for data_filename in dataset:
            source_path = os.path.join(DATASET_DIR, data_filename)
            dest_path = os.path.join(DATASET_DIR, data_filename)

            if not hadut.path_exists(os.path.join(DATASET_DIR, data_filename)):
                logger.info(" -> uploading %s...", source_path)
                hdfs.put(source_path, dest_path)

        update_conf(args)

        results = dict()
        for data_input in dataset:

            with Timer() as t:
                runner = hadut.PipesRunner(prefix=PREFIX, logger=logger)
                logger.info("Running the script %s with data input %s..",
                            piped_code_file, data_input)
                data_input_path = os.path.join(DATASET_DIR, data_input)
                runner.set_input(data_input_path, put=False)
                runner.set_exe(pipes_code)
                runner.run(properties=CONF,
                           hadoop_conf_dir=HADOOP_CONF_DIR,
                           logger=logger)
                res = runner.collect_output()
                print data_input_path
                local_wc = pts.LocalWordCount(data_input_path)
                logging.info(local_wc.check(res))
                # print res
                # runner.clean()
            results[data_input] = (t.secs, t.msecs)

    print "\n\n RESULTs"
    print "=" * (len(piped_code_file) + 15)
    print " *  script: {0}".format(piped_code_file)
    print " *  mappers: {0}".format(CONF["mapred.map.tasks"])
    print " *  reducers: {0}".format(CONF["mapred.reduce.tasks"])
    print " *  dataset: [{0}]".format(",".join(dataset))
    print " *  times (input -> secs):"
    for data_input in dataset:
        print "    - {0} -> {1} secs.".format(data_input,
                                              results[data_input][0])
    print "\n => Total execution time: {0}".format(total_time.secs)
    print "=" * (len(piped_code_file) + 15)
    print "\n"