Ejemplo n.º 1
0
def check_vfile(vfile_path: Path2, includes: str,
                args: argparse.Namespace) -> None:
    html_path = vfile_path.with_suffix(".html")
    src_path = Path2(unescape_filename(vfile_path.stem))
    if args.skip_incomplete:
        if not html_path.exists():
            print(f"Skipping {src_path}")
            return
    else:
        assert html_path.exists(), \
            f"Couldn't find HTML file for {src_path}. "\
            f"Are you sure the report is completed?"

    src_f = src_path.with_suffix("")
    src_ext = src_path.suffix
    new_filename_path = Path2(str(src_f) + "_solution" + src_ext)
    vfile_path.copyfile(args.prelude / new_filename_path)

    result = subprocess.run(["coqc"] + includes.split() +
                            [str(new_filename_path)],
                            cwd=args.prelude,
                            capture_output=True,
                            encoding='utf8')
    assert result.returncode == 0, \
        f"Returned a non zero errorcode {result.returncode}! \n"\
        f"{result.stderr}"
    print(f"Checked {src_path}")
    if args.print_stdout:
        print(f"Output:\n{result.stdout}", end="")
Ejemplo n.º 2
0
    def set_src_filepath(self, src_dir_path):
        """
        Set one filepath to backup this file.
        Called for every file in the source directory.

        :argument src_dir_path: filesystem_walk.DirEntryPath() instance
        """
        log.debug("set_src_filepath() with: '%s'", src_dir_path)
        self.abs_src_filepath = src_dir_path.resolved_path
        log.debug(" * abs_src_filepath: %s" % self.abs_src_filepath)

        if self.abs_src_filepath is None:
            log.info("Can't resolve source path: %s", src_dir_path)
            return

        self.sub_filepath = self.abs_src_filepath.relative_to(self.abs_src_root)
        log.debug(" * sub_filepath: %s" % self.sub_filepath)

        self.sub_path = self.sub_filepath.parent
        log.debug(" * sub_path: %s" % self.sub_path)

        self.filename = self.sub_filepath.name
        log.debug(" * filename: %s" % self.filename)

        self.abs_dst_path = Path2(self.abs_dst_root, self.sub_path)
        log.debug(" * abs_dst_path: %s" % self.abs_dst_path)

        self.abs_dst_filepath = Path2(self.abs_dst_root, self.sub_filepath)
        log.debug(" * abs_dst_filepath: %s" % self.abs_dst_filepath)

        self.abs_dst_hash_filepath = Path2(
            "%s%s%s" % (self.abs_dst_filepath, os.extsep, phlb_config.hash_name)
        )
        log.debug(" * abs_dst_hash_filepath: %s" % self.abs_dst_hash_filepath)
Ejemplo n.º 3
0
def downloading_block(sentinel_link,
                      destinationpath,
                      sentinel_chk_sum_link=None):
    from pathlib_revised import Path2
    import urllib2
    import os
    import sys

    PATH_MAX_limit = 260

    old_work_dir = os.getcwd()
    os.chdir(os.path.dirname(destinationpath))
    if len(destinationpath) > PATH_MAX_limit:
        LEN_cnd = True
        destinationfile_fin = os.path.basename(destinationpath)
        destinationfile = 'tmp'
        path_tmp = Path2(os.getcwd() + '//tmp')
        path_fin = Path2(os.getcwd() + "//" + destinationfile_fin)
    else:
        LEN_cnd = False
        destinationfile = os.path.basename(destinationpath)
        path_fin = Path2('error')

    if sentinel_chk_sum_link:
        while True:
            try:
                downloadfile_chk = urllib2.urlopen(sentinel_chk_sum_link)
                check_sum = downloadfile_chk.read()
                downloadfile_chk.close()
                break
            except urllib2.HTTPError, error:
                print "ERROR: ", error.read()
            except urllib2.URLError, error:
                print "ERROR: ", error.read()
def recreateFile(fileId, domain, relativePath, root, sourceDir, logger, a_time,
                 m_time):
    '''Source file created from taking first two characters of fileID,
       using that as subfolder of source directory, and finding full name of file'''
    subFolder = fileId[0:2]
    sourceFile = sourceDir + "\\" + subFolder + "\\" + fileId
    '''Gets rid of folder slashes and replaces with backslashes, offending characters with underscores'''
    sanitizedRelPath = relativePath.replace("/", "\\")
    sanitizedRelPath = re.sub('[<>:"|?*]', '_', sanitizedRelPath)
    destFile = root + "\\" + domain + "\\" + sanitizedRelPath

    if not os.path.exists(os.path.dirname(destFile)):
        try:
            os.makedirs(os.path.dirname(destFile))
        except OSError as exc:  # Guard against race condition
            if exc.errno != errno.EEXIST:
                raise
    '''Tries to copy all the files to their recreated directory'''
    try:
        logger.debug("Trying to copy " + sourceFile + " to " + destFile)
        Path2(sourceFile).copyfile(Path2(destFile))
        logger.debug("Successfully copied " + sourceFile + " to " + destFile)
        try:
            os.utime(destFile, (a_time, m_time))
        except:
            pass  # silently fail
    except Exception as ex:
        logger.exception("Could not complete copy " + sourceFile + " to " +
                         destFile + " Exception was: " + str(ex))
Ejemplo n.º 5
0
def add_all_backups():
    abs_dst_root = Path2(phlb_config.backup_path)
    backup_names = scandir_limited(abs_dst_root.path, limit=1)
    for dir_entry in backup_names:
        backup_name_path = Path2(dir_entry.path)
        print(("_"*79))
        print(("'%s' (path: %s)" % (backup_name_path.stem, backup_name_path.path)))
        add_backup_name(backup_name_path)
Ejemplo n.º 6
0
def createFolder(folderPath):
    if not os.path.exists(folderPath):
        try:
            fixedFolderPath = Path2(folderPath)

            Path2(fixedFolderPath).makedirs()
        except Exception as ex:
            logging.exception("Could not make root directory: " + folderPath +
                              "\nError was: " + str(ex))
Ejemplo n.º 7
0
    def test_already_extended(self):
        existing_path = Path2("~").expanduser()
        extended_path = existing_path.extended_path
        self.assertTrue(extended_path.startswith("\\\\?\\"))

        # A already extended path should not added \\?\ two times:
        extended_path2 = Path2(extended_path).extended_path
        self.assertEqual(extended_path2, "\\\\?\\%s" % existing_path)
        self.assertEqual(extended_path2.count("\\\\?\\"), 1)
Ejemplo n.º 8
0
def main() -> None:
    util.use_cuda = False
    parser = \
        argparse.ArgumentParser(
            description="A module for exploring deep Q learning "
            "with proverbot9001")

    parser.add_argument("scrape_file")

    parser.add_argument("out_weights", type=Path2)
    parser.add_argument("environment_files", type=Path2, nargs="+")
    parser.add_argument("--proof", default=None)

    parser.add_argument("--prelude", default=".", type=Path2)

    parser.add_argument("--predictor-weights",
                        default=Path2("data/polyarg-weights.dat"),
                        type=Path2)
    parser.add_argument("--start-from", default=None, type=Path2)
    parser.add_argument("--num-predictions", default=16, type=int)

    parser.add_argument("--buffer-size", default=256, type=int)
    parser.add_argument("--batch-size", default=32, type=int)

    parser.add_argument("--num-episodes", default=256, type=int)
    parser.add_argument("--episode-length", default=16, type=int)

    parser.add_argument("--learning-rate", default=0.0001, type=float)
    parser.add_argument("--batch-step", default=50, type=int)
    parser.add_argument("--gamma", default=0.8, type=float)

    parser.add_argument("--pretrain-epochs", default=10, type=int)
    parser.add_argument("--no-pretrain", action='store_false',
                        dest='pretrain')

    parser.add_argument("--progress", "-P", action='store_true')
    parser.add_argument("--verbose", "-v", action='count', default=0)
    parser.add_argument("--log-anomalies", type=Path2, default=None)
    parser.add_argument("--log-outgoing-messages", type=Path2, default=None)

    parser.add_argument("--hardfail", action="store_true")

    parser.add_argument("--ghosts", action='store_true')
    parser.add_argument("--graphs-dir", default=Path2("graphs"), type=Path2)

    parser.add_argument("--success-repetitions", default=10, type=int)

    args = parser.parse_args()

    try:
        os.makedirs(str(args.graphs_dir))
    except OSError as e:
        if e.errno != errno.EEXIST:
            raise

    reinforce(args)
Ejemplo n.º 9
0
def test_rename(deep_path):
    old_file = Path2(deep_path, "old_file.txt")
    old_file.touch()

    new_file = Path2(deep_path, "new_file.txt")
    assert new_file.is_file() is False

    old_file.rename(new_file)
    assert old_file.is_file() is False
    assert new_file.is_file() is True
Ejemplo n.º 10
0
def test_copyfile(deep_path):
    old_file = Path2(deep_path, "old_file.txt")
    with old_file.open("w") as f:
        f.write("unittests!")

    new_file = Path2(deep_path, "new_file.txt")
    assert new_file.is_file() is False
    old_file.copyfile(new_file)
    assert old_file.is_file() is True
    assert new_file.is_file() is True
    with new_file.open("r") as f:
        assert f.read() == "unittests!"
Ejemplo n.º 11
0
    def __init__(self, dir_entry, onerror=print_func):
        """
        :param dir_entry: os.DirEntry() instance
        """
        self.dir_entry = dir_entry  # os.DirEntry() instance
        self.path = dir_entry.path  # str or bytes of the path, from: os.DirEntry().path

        self.is_symlink = dir_entry.is_symlink()
        self.is_file = dir_entry.is_file(follow_symlinks=False)
        self.is_dir = dir_entry.is_dir(follow_symlinks=False)
        self.stat = dir_entry.stat(follow_symlinks=False)

        self.path_instance = Path2(self.path)
        try:
            self.resolved_path = self.path_instance.resolve()
        except (PermissionError, FileNotFoundError) as err:
            onerror("Resolve %r error: %s" % (self.path, err))
            self.resolved_path = None
            self.resolve_error = err
        else:
            self.resolve_error = None

        if self.resolved_path is None:
            # e.g.: broken symlink under linux
            self.different_path = True
        else:
            # e.g.: a junction under windows
            # https://www.python-forum.de/viewtopic.php?f=1&t=37725&p=290429#p290428 (de)
            self.different_path = self.path_instance.path != self.resolved_path.path
Ejemplo n.º 12
0
def file_chunks(filepath: Path2, chunk_size: int):
    with filepath.open(mode='r') as f:
        while True:
            chunk = list(itertools.islice(f, chunk_size))
            if len(chunk) == 0:
                return
            yield chunk
Ejemplo n.º 13
0
def test_link(deep_path):
    old_file = Path2(deep_path, "old_file.txt")
    with old_file.open("w") as f:
        f.write("unittests!")

    new_file = Path2(deep_path, "new_file.txt")
    assert new_file.is_file() is False
    old_file.link(new_file)
    assert old_file.is_file() is True
    assert new_file.is_file() is True
    with new_file.open("r+") as f:
        assert f.read() == "unittests!"
        f.seek(0)
        f.write("new content!")
    with old_file.open("r") as f:
        assert f.read() == "new content!"
Ejemplo n.º 14
0
    def test_home(self):
        self.assertEqual(
            Path2("~/foo").expanduser().path, os.path.expanduser("~\\foo"))

        self.assertEqual(
            Path2("~/foo").expanduser().extended_path,
            "\\\\?\\%s" % os.path.expanduser("~\\foo"))

        existing_path = Path2("~").expanduser()
        ref_path = os.path.expanduser("~")
        self.assertEqual(str(existing_path), "%s" % ref_path)
        self.assertEqual(existing_path.extended_path, "\\\\?\\%s" % ref_path)
        self.assertTrue(existing_path.is_dir())
        self.assertTrue(existing_path.exists())

        self.assertEqual(str(existing_path), str(existing_path.resolve()))
Ejemplo n.º 15
0
def deep_path(tmp_path):
    os.chdir(tmp_path)

    deep_path = Path2(tmp_path, "A" * 255, "B" * 255)
    deep_path.makedirs()

    return deep_path
Ejemplo n.º 16
0
def main(arg_list: List[str], bar_idx: int) -> None:
    sys.setrecursionlimit(4500)
    global predictor

    args, parser = parse_arguments(arg_list)
    predictor = get_predictor(parser, args)
    base = Path2(os.path.dirname(os.path.abspath(__file__)) + "/..")
    coqargs = ["sertop"]

    try:
        with open(args.prelude + "/_CoqProject", 'r') as includesfile:
            includes = includesfile.read()
    except FileNotFoundError:
        eprint("Didn't find a _CoqProject file in prelude dir")
        includes = ""
    if not args.output_dir.exists():
        args.output_dir.makedirs()

    context_filter = args.context_filter or dict(
        predictor.getOptions())["context_filter"]
    for filename in [details_css, details_javascript]:
        destpath = args.output_dir / filename
        if not destpath.exists():
            srcpath = base.parent / 'reports' / filename
            srcpath.copyfile(destpath)

    search_file(args, coqargs, includes, predictor, bar_idx)
Ejemplo n.º 17
0
    def backup(self):
        # make temp file available in destination via link ;)
        temp_log_path = Path2(settings.LOG_FILEPATH)
        assert temp_log_path.is_file(
        ), "%s doesn't exists?!?" % settings.LOG_FILEPATH
        try:
            temp_log_path.link(self.path_helper.log_filepath)  # call os.link()
        except OSError as err:
            # e.g.:
            # temp is on a other drive than the destination
            log.error("Can't link log file: %s" % err)
            copy_log = True
        else:
            copy_log = False

        try:
            self._backup()
        finally:
            if copy_log:
                log.warn(
                    "copy log file from '%s' to '%s'" %
                    (settings.LOG_FILEPATH, self.path_helper.log_filepath))
                temp_log_path.copyfile(
                    self.path_helper.log_filepath)  # call shutil.copyfile()

        self.backup_run.completed = True
        self.backup_run.save()
Ejemplo n.º 18
0
def tokenize_goals(data : StrictEmbeddedDataset, args : Namespace) \
    -> Tuple[Tokenizer, List[Sentence]]:
    if args.load_tokens and Path2(args.load_tokens).exists():
        print("Loading tokens from {}".format(args.load_tokens))
        with open(args.load_tokens, 'rb') as f:
            tokenizer = pickle.load(f)
            assert isinstance(tokenizer, Tokenizer)
    else:
        start = time.time()
        print("Picking tokens...", end="")
        sys.stdout.flush()
        subset : Sequence[EmbeddedSample]
        if args.num_relevance_samples > len(data):
            subset = data
        else:
            subset = random.sample(data, args.num_relevance_samples)
        tokenizer = make_keyword_tokenizer_relevance(
            [(goal, next_tactic) for
             prev_tactics, hypotheses, goal, next_tactic in subset],
            tokenizers[args.tokenizer], args.num_keywords, TOKEN_START, args.num_threads)
        print("{}s".format(time.time() - start))
    if args.save_tokens:
        print("Saving tokens to {}".format(args.save_tokens))
        with open(args.save_tokens, 'wb') as f:
            pickle.dump(tokenizer, f)
    if args.print_keywords:
        print("Keywords are {}".format(tokenizer.listTokens()))
    start = time.time()
    print("Tokenizing...", end="")
    sys.stdout.flush()
    tokenized_data = tokenize_data(tokenizer, data, args.num_threads)
    print("{:.2f}s".format(time.time() - start))
    return tokenizer, [goal for prev_tactics, hypotheses, goal, tactic in tokenized_data]
Ejemplo n.º 19
0
def scandir_walk(top, skip_dirs=(), on_skip=None):
    """
    Just walk the filesystem tree top-down with os.scandir() and don't follow symlinks.
    :param top: path to scan
    :param skip_dirs: List of dir names to skip
        e.g.: "__pycache__", "temp", "tmp"
    :param on_skip: function that will be called if 'skip_dirs' match.
        e.g.:
        def on_skip(entry, pattern):
            log.error("Skip pattern %r hit: %s" % (pattern, entry.path))
    :return: yields os.DirEntry() instances
    """
    # We may not have read permission for top, in which case we can't
    # get a list of the files the directory contains.  os.walk
    # always suppressed the exception then, rather than blow up for a
    # minor reason when (say) a thousand readable directories are still
    # left to visit.  That logic is copied here.
    try:
        scandir_it = Path2(top).scandir()
    except PermissionError as err:
        log.error("scandir error: %s" % err)
        return

    for entry in scandir_it:
        if entry.is_dir(follow_symlinks=False):
            if entry.name in skip_dirs:
                on_skip(entry, entry.name)
            else:
                yield from scandir_walk(entry.path, skip_dirs, on_skip)
        else:
            yield entry
Ejemplo n.º 20
0
def main():
    parser = \
      argparse.ArgumentParser(
          description="A module for drawing and re-drawing reinforcement "
          "learning graphs")

    parser.add_argument("predictor_weights")
    parser.add_argument("estimator_weights")
    parser.add_argument("graph_json")
    parser.add_argument("--max-term-length", default=512, type=int)

    args = parser.parse_args()

    predictor = predict_tactic.loadPredictorByFile(args.predictor_weights)
    q_estimator_name, *saved = torch.load(str(args.estimator_weights))
    if q_estimator_name == "features evaluator":
        q_estimator = features_q_estimator.FeaturesQEstimator(0, 0, 0)
    elif q_estimator_name == "polyarg evaluator":
        q_estimator = polyarg_q_estimator.PolyargQEstimator(
            0, 0, 0,
            cast(features_polyarg_predictor.FeaturesPolyargPredictor,
                 predictor))

    graph = ReinforceGraph.load(args.graph_json)
    assignApproximateQScores(graph, args.max_term_length, predictor,
                             q_estimator)
    graph.draw(Path2(args.graph_json).stem)
Ejemplo n.º 21
0
def test_open_file(deep_path):
    file_path = Path2(deep_path, "file.txt")
    with file_path.open("w") as f:
        f.write("unittests!")

    assert file_path.is_file() is True
    with file_path.open("r") as f:
        assert f.read() == "unittests!"
Ejemplo n.º 22
0
    def test_symlink(self, tmp_path):
        os.chdir(tmp_path)

        src_file = Path2("source_file.txt")
        src_file.touch()

        dst_file = Path2("destination.txt")
        dst_file.symlink_to(src_file)

        scan_result = list(Path2(".").scandir())
        scan_result.sort(key=lambda x: x.path)
        self.assertEqual([f.path for f in scan_result],
                         ['./destination.txt', './source_file.txt'])

        for dir_entry in scan_result:
            dir_entry_path = DirEntryPath(dir_entry)
            info = dir_entry_path.pformat()
            # print(info)
            if dir_entry_path.path == "./source_file.txt":
                self.assertFalse(dir_entry_path.is_symlink)
                self.assertTrue(dir_entry_path.is_file)
            elif dir_entry_path.path == "./destination.txt":
                self.assertTrue(dir_entry_path.is_symlink)
                self.assertFalse(dir_entry_path.is_file)
            else:
                self.fail()

            self.assertEqual(dir_entry_path.resolved_path,
                             Path2(self.temp_root_path, "source_file.txt"))

        # Create a broken symlink, by deleting the source file:
        src_file.unlink()

        scan_result = list(Path2(".").scandir())
        self.assertEqual([f.path for f in scan_result], ['./destination.txt'])

        dir_entry_path = DirEntryPath(scan_result[0])
        info = dir_entry_path.pformat()
        print(info)
        self.assertEqual(dir_entry_path.path, "./destination.txt")
        self.assertEqual(dir_entry_path.resolved_path,
                         None)  # <- broken, so can't be resolve
        self.assertIsInstance(dir_entry_path.resolve_error,
                              FileNotFoundError)  # <- the error instance
        self.assertTrue(dir_entry_path.is_symlink)
        self.assertFalse(dir_entry_path.is_file)
Ejemplo n.º 23
0
def save_checkpoints(predictor_name : str,
                     metadata : MetadataType, arg_values : Namespace,
                     checkpoints_stream : Iterable[StateType]):
    for epoch, predictor_state in enumerate(checkpoints_stream, start=1):
        epoch_filename = Path2(str(arg_values.save_file.with_suffix("")) + "-{epoch}.dat")
        with epoch_filename.open(mode='wb') as f:
            print("=> Saving checkpoint at epoch {}".format(epoch))
            torch.save((predictor_name, (arg_values, metadata, predictor_state)), f)
Ejemplo n.º 24
0
def recreateFile(fileId, domain, relativePath, root, sourceDir):
    '''Source file created from taking first two characters of fileID,
       using that as subfolder of source directory, and finding full name of file'''
    subFolder = fileId[0:2]
    sourceFile = sourceDir + "\\" + subFolder + "\\" + fileId
    '''Gets rid of folder slashes and replaces with backslashes, offending characters with underscores'''
    sanitizedRelPath = relativePath.replace("/", "\\")
    sanitizedRelPath = re.sub('[<>:"/\\|?*]', '_', sanitizedRelPath)
    destFile = root + "\\" + domain + "\\" + sanitizedRelPath
    '''Tries to copy all the files to their recreated directory'''
    try:
        logging.debug("Trying to copy " + sourceFile + " to " + destFile)
        Path2(sourceFile).copyfile(Path2(destFile))
        logging.debug("Successfully copied " + sourceFile + " to " + destFile)
    except Exception as ex:
        logging.exception("Could not complete copy " + sourceFile + " to " +
                          destFile + " Exception was: " + str(ex))
Ejemplo n.º 25
0
def test_chmod(deep_path):
    file_path = Path2(deep_path, "file.txt")
    file_path.touch()
    file_path.chmod(0o777)
    if not IS_NT:
        assert file_path.stat().st_mode == 33279
    file_path.chmod(0o666)
    if not IS_NT:
        assert file_path.stat().st_mode == 33206
Ejemplo n.º 26
0
    def test_extended_path_hack(self):
        abs_path = Path2("c:/foo/bar/")
        self.assertEqual(str(abs_path), "c:\\foo\\bar")
        self.assertEqual(abs_path.path, "c:\\foo\\bar")
        self.assertEqual(abs_path.extended_path, "\\\\?\\c:\\foo\\bar")

        rel_path = Path2("../foo/bar/")
        self.assertEqual(str(rel_path), "..\\foo\\bar")
        self.assertEqual(rel_path.extended_path, "..\\foo\\bar")

        with self.assertRaises(FileNotFoundError) as err:
            abs_path.resolve()
        self.assertEqual(err.exception.filename, "\\\\?\\c:\\foo\\bar")
        # self.assertEqual(err.exception.filename, "c:\\foo\\bar")

        path = Path2("~").expanduser()
        path = path.resolve()
        self.assertNotIn("\\\\?\\", str(path))
Ejemplo n.º 27
0
 def iter(self, dir_entries):
     """
     :param dir_entries: list of os.DirEntry() instances
     """
     filter = self.filter
     for entry in dir_entries:
         path = filter(Path2(entry.path))
         if path != False:
             yield path
Ejemplo n.º 28
0
    def test_file_not_found(self):
        file_path = Path2(self.first_run_path, "sub dir A", "dir_A_file_B.txt")
        file_path.unlink()

        result = self.invoke_cli("verify", self.first_run_path)
        print((result.output))
        self.assertIn("5 File entry exist in database.", result.output)
        self.assertIn("ERROR", result.output)
        self.assertIn("Verify done.", result.output)
        self.assertIn("File not found: %s" % file_path.path, result.output)
Ejemplo n.º 29
0
def test_rglob(deep_path):
    file_path = Path2(deep_path, "a test file.txt")
    file_path.touch()

    files = tuple(deep_path.rglob("*"))
    assert len(files) == 1

    f = files[0]
    assert f.is_file() is True
    assert f.extended_path == file_path.extended_path
Ejemplo n.º 30
0
def main(arg_list: List[str]) -> None:
    parser = argparse.ArgumentParser(
        description=
        "Produce an index file from attempting to complete proofs using Proverbot9001."
    )
    parser.add_argument("-j",
                        "--threads",
                        dest="num_threads",
                        default=16,
                        type=int)
    parser.add_argument("--output",
                        "-o",
                        help="output data folder name",
                        default="search-report",
                        type=Path2)
    parser.add_argument('--weightsfile', default=None)
    parser.add_argument('--predictor',
                        choices=list(static_predictors.keys()),
                        default=None)
    parser.add_argument('filenames', nargs="+", help="proof file name (*.v)")
    args, unknown_args = parser.parse_known_args(arg_list)
    commit, date = get_metadata()
    base = Path2(os.path.dirname(os.path.abspath(__file__)) + "/..")

    if not args.output.exists():
        args.output.makedirs()

    with multiprocessing.pool.ThreadPool(args.num_threads) as pool:
        pool.starmap(
            functools.partial(run_search, unknown_args, args.output,
                              args.predictor, args.weightsfile),
            enumerate(args.filenames))
    file_args: Optional[argparse.Namespace] = None
    file_results: List[ReportStats] = []
    for filename in args.filenames:
        csv_args, result = read_stats_from_csv(args.output, filename)
        csv_args.debug = False
        csv_args.filename = ""
        if not file_args:
            file_args = csv_args
        else:
            assert file_args == csv_args, \
                f"File {filename} has different args than the others! "\
                f"Others args are {csv_args}, file args are {file_args}"
        file_results.append(result)
    assert file_args

    tqdm.write("Writing summary with {} file outputs.".format(
        len(file_results)))
    predictorOptions = get_predictor(parser, args).getOptions()
    write_summary(
        args, predictorOptions + [("report type", "search"),
                                  ("search width", file_args.search_width),
                                  ("search depth", file_args.search_depth)],
        commit, date, file_results)
Ejemplo n.º 31
0
 def test_instances(self):
     self.assertIsInstance(Path2(), PosixPath2)
     self.assertIsInstance(Path2("."), PosixPath2)
     self.assertIsInstance(Path2.home(), PosixPath2)
     self.assertIsInstance(Path2.home().resolve(), PosixPath2)
Ejemplo n.º 32
0
 def test_instances(self):
     self.assertIsInstance(Path2(), WindowsPath2)
     self.assertIsInstance(Path2("."), WindowsPath2)
     self.assertIsInstance(Path2(".").resolve(), WindowsPath2)
     self.assertIsInstance(Path2.home(), WindowsPath2)