Пример #1
0
    def test_malformed_query(self) -> None:
        with self.assertRaises(EdenError) as ctx:
            self.client.globFiles(
                GlobParams(mountPoint=self.mount_path_bytes, globs=["adir["])
            )
        self.assertIn("unterminated bracket sequence", str(ctx.exception))
        self.assertEqual(EdenErrorType.POSIX_ERROR, ctx.exception.errorType)

        with self.assertRaises(EdenError) as ctx:
            self.client.globFiles(GlobParams(self.mount_path_bytes, ["adir["], True))
        self.assertIn("unterminated bracket sequence", str(ctx.exception))
        self.assertEqual(EdenErrorType.POSIX_ERROR, ctx.exception.errorType)
Пример #2
0
    def assert_glob(
        self,
        globs: List[str],
        expected_matches: List[bytes],
        include_dotfiles: bool = False,
        msg: Optional[str] = None,
        commits: Optional[List[bytes]] = None,
        prefetching: bool = False,
        expected_commits: Optional[List[bytes]] = None,
        search_root: Optional[bytes] = None,
        list_only_files: bool = False,
    ) -> None:
        params = GlobParams(
            mountPoint=self.mount_path_bytes,
            globs=globs,
            includeDotfiles=include_dotfiles,
            prefetchFiles=prefetching,
            revisions=commits,
            searchRoot=search_root,
            listOnlyFiles=list_only_files,
        )
        result = self.client.globFiles(params)
        self.assertEqual(expected_matches, sorted(result.matchingFiles), msg=msg)
        self.assertFalse(result.dtypes)

        if expected_commits:
            self.assertCountEqual(
                expected_commits, self.client.globFiles(params).originHashes, msg=msg
            )
Пример #3
0
    def run(self, args: argparse.Namespace) -> int:
        instance, checkout, rel_path = require_checkout(args, args.repo)
        if args.repo and rel_path != Path("."):
            print(f"{args.repo} is not the root of an eden repo")
            return 1

        if args.pattern_file is not None:
            with open(args.pattern_file) as f:
                args.PATTERN += [pat.strip() for pat in f.readlines()]

        with instance.get_thrift_client_legacy() as client:
            result = client.globFiles(
                GlobParams(
                    mountPoint=bytes(checkout.path),
                    globs=args.PATTERN,
                    includeDotfiles=False,
                    prefetchFiles=not args.no_prefetch,
                    suppressFileList=args.silent,
                    prefetchMetadata=False,
                )
            )
            if not args.silent:
                for name in result.matchingFiles:
                    print(os.fsdecode(name))
        return 0
Пример #4
0
    def run(self, args: argparse.Namespace) -> int:
        instance = get_eden_instance(args)

        if args.repo:
            repo_root = self._repo_root(args.repo)
            if not repo_root:
                print(f"{args.repo} does not appear to be an eden repo")
                return 1
            if repo_root != os.path.realpath(args.repo):
                print(f"{args.repo} is not the root of an eden repo")
                return 1
        else:
            repo_root = self._repo_root(os.getcwd())
            if not repo_root:
                print("current directory does not appear to be an eden repo")
                return 1

        if args.pattern_file is not None:
            with open(args.pattern_file) as f:
                args.PATTERN += [pat.strip() for pat in f.readlines()]

        with instance.get_thrift_client() as client:
            result = client.globFiles(
                GlobParams(
                    mountPoint=os.fsencode(repo_root),
                    globs=args.PATTERN,
                    includeDotfiles=False,
                    prefetchFiles=not args.no_prefetch,
                    suppressFileList=args.silent,
                ))
            if not args.silent:
                for name in result.matchingFiles:
                    print(name)

        return 0
Пример #5
0
    def assert_glob(
        self,
        globs: List[str],
        expected_matches: List[str],
        include_dotfiles: bool = False,
        msg: Optional[str] = None,
        commits: Optional[List[bytes]] = None,
        prefetching: bool = False,
        expected_commits: Optional[List[bytes]] = None,
    ) -> None:
        params = GlobParams(
            mountPoint=self.mount_path_bytes,
            globs=globs,
            includeDotfiles=include_dotfiles,
            prefetchFiles=prefetching,
            revisions=commits,
        )
        result = self.client.globFiles(params)
        path_results = (path.decode("utf-8", errors="surrogateescape")
                        for path in result.matchingFiles)
        self.assertEqual(expected_matches, sorted(path_results), msg=msg)
        self.assertFalse(result.dtypes)

        if expected_commits:
            self.assertCountEqual(expected_commits,
                                  self.client.globFiles(params).originHashes,
                                  msg=msg)
Пример #6
0
    def test_malformed_query(self) -> None:
        with self.assertRaises(EdenError) as ctx:
            self.client.glob(self.mount_path_bytes, ["adir["])
        self.assertIn("unterminated bracket sequence", str(ctx.exception))

        with self.assertRaises(EdenError) as ctx:
            self.client.globFiles(
                GlobParams(self.mount_path_bytes, ["adir["], True))
        self.assertIn("unterminated bracket sequence", str(ctx.exception))
Пример #7
0
 def test_globs_may_not_include_dotdot(self):
     with self.assertRaises(EdenError) as ctx:
         self.client.globFiles(
             GlobParams(self.mount_path_bytes, ["java/../java/com/**/*.java"])
         )
     self.assertEqual(
         "Invalid glob (PathComponent must not be ..): java/../java/com/**/*.java",
         str(ctx.exception),
     )
     self.assertEqual(EdenErrorType.ARGUMENT_ERROR, ctx.exception.errorType)
Пример #8
0
    def run(self, args: argparse.Namespace) -> int:
        checkout_and_patterns = _find_checkout_and_patterns(args)

        with checkout_and_patterns.instance.get_telemetry_logger().new_sample(
                "prefetch") as telemetry_sample:
            telemetry_sample.add_string(
                "checkout", checkout_and_patterns.checkout.path.name)
            telemetry_sample.add_bool("skip_prefetch", args.no_prefetch)
            telemetry_sample.add_bool("background", args.background)
            telemetry_sample.add_bool("prefetch_metadata",
                                      args.prefetch_metadata)
            if args.pattern_file:
                telemetry_sample.add_string("pattern_file", args.pattern_file)
            if args.PATTERN:
                telemetry_sample.add_normvector("patterns", args.PATTERN)

            with checkout_and_patterns.instance.get_thrift_client_legacy(
            ) as client:
                result = client.globFiles(
                    GlobParams(
                        mountPoint=bytes(checkout_and_patterns.checkout.path),
                        globs=checkout_and_patterns.patterns,
                        includeDotfiles=args.include_dot_files,
                        prefetchFiles=not args.no_prefetch,
                        suppressFileList=args.silent,
                        prefetchMetadata=args.prefetch_metadata,
                        background=args.background,
                        listOnlyFiles=args.list_only_files,
                    ))
                if args.background or args.silent:
                    return 0

                telemetry_sample.add_int("files_fetched",
                                         len(result.matchingFiles))

                if not args.silent:
                    if checkout_and_patterns.patterns and not result.matchingFiles:
                        _eprintln(
                            f"No files were matched by the pattern{'s' if len(checkout_and_patterns.patterns) else ''} specified.\n"
                            "See `eden prefetch -h` for docs on pattern matching.",
                        )
                    _println("\n".join(
                        os.fsdecode(name) for name in result.matchingFiles))

        return 0
Пример #9
0
    def run(self, args: argparse.Namespace) -> int:
        checkout_and_patterns = _find_checkout_and_patterns(args)

        with checkout_and_patterns.instance.get_thrift_client_legacy(
        ) as client:
            result = client.globFiles(
                GlobParams(
                    mountPoint=bytes(checkout_and_patterns.checkout.path),
                    globs=checkout_and_patterns.patterns,
                    includeDotfiles=args.include_dot_files,
                    prefetchFiles=False,
                    suppressFileList=False,
                    searchRoot=os.fsencode(checkout_and_patterns.rel_path),
                    listOnlyFiles=args.list_only_files,
                ))
            for name in result.matchingFiles:
                _println(os.fsdecode(name))
        return 0
Пример #10
0
    def run(self, args: argparse.Namespace) -> int:
        checkout_and_patterns = find_checkout_and_patterns(args)

        with checkout_and_patterns.instance.get_thrift_client_legacy() as client:
            result = client.globFiles(
                GlobParams(
                    mountPoint=bytes(checkout_and_patterns.checkout.path),
                    globs=checkout_and_patterns.patterns,
                    includeDotfiles=False,
                    prefetchFiles=not args.no_prefetch,
                    suppressFileList=args.silent,
                    prefetchMetadata=False,
                )
            )
            if not args.silent:
                for name in result.matchingFiles:
                    print(os.fsdecode(name))
        return 0
Пример #11
0
 def assert_glob_with_dtypes(
     self,
     globs: List[str],
     expected_matches: List[Tuple[bytes, str]],
     include_dotfiles: bool = False,
     msg: Optional[str] = None,
 ) -> None:
     params = GlobParams(
         self.mount_path_bytes,
         globs,
         includeDotfiles=include_dotfiles,
         wantDtype=True,
     )
     result = self.client.globFiles(params)
     actual_results = zip(
         result.matchingFiles,
         (_dtype_to_str(dtype) for dtype in result.dtypes),
     )
     self.assertEqual(expected_matches, sorted(actual_results), msg=msg)
Пример #12
0
    def assert_glob(
        self,
        globs: List[str],
        expected_matches: List[bytes],
        include_dotfiles: bool = False,
        msg: Optional[str] = None,
    ) -> None:
        params = GlobParams(self.mount_path_bytes, globs, include_dotfiles)
        self.assertCountEqual(expected_matches,
                              self.client.globFiles(params).matchingFiles,
                              msg=msg)

        # Also verify behavior of legacy Thrift API.
        if include_dotfiles:
            self.assertCountEqual(
                expected_matches,
                self.client.glob(self.mount_path_bytes, globs),
                msg=msg,
            )
Пример #13
0
def make_prefetch_request(
    checkout: EdenCheckout,
    instance: EdenInstance,
    all_profile_contents: Set[str],
    enable_prefetch: bool,
    silent: bool,
    revisions: Optional[List[str]],
    predict_revisions: bool,
    background: bool,
    predictive: bool,
    predictive_num_dirs: int,
) -> Optional[Glob]:
    if predict_revisions:
        # The arc and hg commands need to be run in the mount mount, so we need
        # to change the working path if it is not within the mount.
        current_path = Path.cwd()
        in_checkout = False
        try:
            # this will throw if current_path is not a relative path of the
            # checkout path
            checkout.get_relative_path(current_path)
            in_checkout = True
        except Exception:
            os.chdir(checkout.path)

        bookmark_to_prefetch_command = ["arc", "stable", "best", "--verbose", "error"]
        bookmarks_result = subprocess.run(
            bookmark_to_prefetch_command,
            stdin=subprocess.DEVNULL,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            env=get_environment_suitable_for_subprocess(),
        )

        if bookmarks_result.returncode:
            raise Exception(
                "Unable to predict commits to prefetch, error finding bookmark"
                f" to prefetch: {bookmarks_result.stderr}"
            )

        bookmark_to_prefetch = bookmarks_result.stdout.decode().strip("\n")

        commit_from_bookmark_commmand = [
            "hg",
            "log",
            "-r",
            bookmark_to_prefetch,
            "-T",
            "{node}",
        ]
        commits_result = subprocess.run(
            commit_from_bookmark_commmand,
            stdin=subprocess.DEVNULL,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            env=get_environment_suitable_for_subprocess(),
        )

        if commits_result.returncode:
            raise Exception(
                "Unable to predict commits to prefetch, error converting"
                f" bookmark to commit: {commits_result.stderr}"
            )

        # if we changed the working path lets change it back to what it was
        # before
        if not in_checkout:
            os.chdir(current_path)

        raw_commits = commits_result.stdout.decode()
        # arc stable only gives us one commit, so for now this is a single
        # commit, but we might use multiple in the future.
        revisions = [re.sub("\n$", "", raw_commits)]

        if not silent:
            print(f"Prefetching for revisions: {revisions}")

    byte_revisions = None
    if revisions is not None:
        byte_revisions = [bytes.fromhex(revision) for revision in revisions]

    with instance.get_thrift_client_legacy() as client:
        if predictive:
            predictiveParams = PredictiveFetch()
            if predictive_num_dirs > 0:
                predictiveParams.numTopDirectories = predictive_num_dirs
            return client.predictiveGlobFiles(
                GlobParams(
                    mountPoint=bytes(checkout.path),
                    includeDotfiles=False,
                    prefetchFiles=enable_prefetch,
                    suppressFileList=silent,
                    revisions=byte_revisions,
                    background=background,
                    predictiveGlob=predictiveParams,
                )
            )
        else:
            return client.globFiles(
                GlobParams(
                    mountPoint=bytes(checkout.path),
                    globs=list(all_profile_contents),
                    includeDotfiles=False,
                    prefetchFiles=enable_prefetch,
                    suppressFileList=silent,
                    revisions=byte_revisions,
                    background=background,
                )
            )