예제 #1
0
파일: cloc.py 프로젝트: akraymund/pants
    def console_output(self, targets):
        if not self.get_options().transitive:
            targets = self.context.target_roots

        input_snapshots = tuple(
            target.sources_snapshot(scheduler=self.context._scheduler)
            for target in targets)
        input_files = set(f.path for snapshot in input_snapshots
                          for f in snapshot.files)

        # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
        with temporary_dir() as tmpdir:
            list_file = os.path.join(tmpdir, 'input_files_list')
            with open(list_file, 'w') as list_file_out:
                for input_file in sorted(input_files):
                    list_file_out.write(input_file)
                    list_file_out.write(b'\n')
            list_file_snapshot = self.context._scheduler.capture_snapshots(
                (PathGlobsAndRoot(
                    PathGlobs(('input_files_list', )),
                    str(tmpdir),
                ), ))[0]

        cloc_path, cloc_snapshot = ClocBinary.global_instance(
        ).hackily_snapshot(self.context)

        directory_digest = self.context._scheduler.merge_directories(
            tuple(s.directory_digest for s in input_snapshots + (
                cloc_snapshot,
                list_file_snapshot,
            )))

        cmd = (
            '/usr/bin/perl',
            cloc_path,
            '--skip-uniqueness',
            '--ignored=ignored',
            '--list-file=input_files_list',
            '--report-file=report',
        )

        # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
        req = ExecuteProcessRequest(cmd, (), directory_digest,
                                    ('ignored', 'report'), (), 15 * 60, 'cloc')
        exec_result = self.context.execute_process_synchronously(
            req, 'cloc', (WorkUnitLabel.TOOL, ))

        files_content_tuple = self.context._scheduler.product_request(
            FilesContent,
            [exec_result.output_directory_digest])[0].dependencies

        files_content = {fc.path: fc.content for fc in files_content_tuple}
        for line in files_content['report'].split('\n'):
            yield line

        if self.get_options().ignored:
            yield 'Ignored the following files:'
            for line in files_content['ignored'].split('\n'):
                yield line
예제 #2
0
 def _get_cloc_script(self):
     return ClocBinary.global_instance().select(self.context)
예제 #3
0
async def run_cloc(
    console: Console,
    options: CountLinesOfCodeOptions,
    cloc_binary: ClocBinary,
    sources_snapshots: SourcesSnapshots,
) -> CountLinesOfCode:
    """Runs the cloc Perl script."""
    all_file_names = sorted(
        set(
            itertools.chain.from_iterable(
                sources_snapshot.snapshot.files
                for sources_snapshot in sources_snapshots)))
    file_content = "\n".join(all_file_names).encode()

    if not file_content:
        return CountLinesOfCode(exit_code=0)

    input_files_filename = "input_files.txt"
    input_file_digest = await Get[Digest](InputFilesContent(
        [FileContent(path=input_files_filename, content=file_content)]), )
    downloaded_cloc_binary = await Get[DownloadedExternalTool](
        ExternalToolRequest, cloc_binary.get_request(Platform.current))
    digest = await Get[Digest](MergeDigests((
        input_file_digest,
        downloaded_cloc_binary.digest,
        *(sources_snapshot.snapshot.digest
          for sources_snapshot in sources_snapshots),
    )))

    report_filename = "report.txt"
    ignore_filename = "ignored.txt"

    cmd = (
        "/usr/bin/perl",
        downloaded_cloc_binary.exe,
        "--skip-uniqueness",  # Skip the file uniqueness check.
        f"--ignored={ignore_filename}",  # Write the names and reasons of ignored files to this file.
        f"--report-file={report_filename}",  # Write the output to this file rather than stdout.
        f"--list-file={input_files_filename}",  # Read an exhaustive list of files to process from this file.
    )
    req = Process(
        argv=cmd,
        input_digest=digest,
        output_files=(report_filename, ignore_filename),
        description=
        f"Count lines of code for {pluralize(len(all_file_names), 'file')}",
    )

    exec_result = await Get[ProcessResult](Process, req)
    files_content = await Get[FilesContent](Digest, exec_result.output_digest)

    file_outputs = {fc.path: fc.content.decode() for fc in files_content}

    for line in file_outputs[report_filename].splitlines():
        console.print_stdout(line)

    if options.values.ignored:
        console.print_stderr("\nIgnored the following files:")
        for line in file_outputs[ignore_filename].splitlines():
            console.print_stderr(line)

    return CountLinesOfCode(exit_code=0)
예제 #4
0
    def console_output(self, targets):
        if not self.get_options().transitive:
            targets = self.context.target_roots

        # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
        with temporary_dir() as tmpdir:
            list_file = os.path.join(tmpdir, 'input_files_list')
            input_files = set()
            with open(list_file, 'w') as list_file_out:
                for target in targets:
                    for source in target.sources_relative_to_buildroot():
                        input_files.add(source)
                        list_file_out.write(source)
                        list_file_out.write(b'\n')
            list_file_snapshot = self.context._scheduler.capture_snapshots(
                (PathGlobsAndRoot(
                    PathGlobs(('input_files_list', )),
                    str(tmpdir),
                ), ))[0]

        cloc_path, cloc_snapshot = ClocBinary.global_instance(
        ).hackily_snapshot(self.context)

        # TODO: This should use an input file snapshot which should be provided on the Target object,
        # rather than hackily re-snapshotting each of the input files.
        # See https://github.com/pantsbuild/pants/issues/5762
        input_pathglobs = PathGlobs(tuple(input_files))
        input_snapshot = self.context._scheduler.product_request(
            Snapshot, [input_pathglobs])[0]

        directory_digest = self.context._scheduler.merge_directories((
            cloc_snapshot.directory_digest,
            input_snapshot.directory_digest,
            list_file_snapshot.directory_digest,
        ))

        cmd = (
            '/usr/bin/perl',
            cloc_path,
            '--skip-uniqueness',
            '--ignored=ignored',
            '--list-file=input_files_list',
            '--report-file=report',
        )

        # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
        req = ExecuteProcessRequest(cmd, (), directory_digest,
                                    ('ignored', 'report'), (), 15 * 60, 'cloc')
        exec_result = self.context.execute_process_synchronously(
            req, 'cloc', (WorkUnitLabel.TOOL, ))

        # TODO: Remove this check when https://github.com/pantsbuild/pants/issues/5719 is resolved.
        if exec_result.exit_code != 0:
            raise TaskError('{} ... exited non-zero ({}).'.format(
                ' '.join(cmd), exec_result.exit_code))

        files_content_tuple = self.context._scheduler.product_request(
            FilesContent,
            [exec_result.output_directory_digest])[0].dependencies

        files_content = {fc.path: fc.content for fc in files_content_tuple}
        for line in files_content['report'].split('\n'):
            yield line

        if self.get_options().ignored:
            yield 'Ignored the following files:'
            for line in files_content['ignored'].split('\n'):
                yield line
예제 #5
0
 def _get_cloc_script(self):
   return ClocBinary.global_instance().select(self.context)
예제 #6
0
    def console_output(self, targets):
        input_snapshots = tuple(
            target.sources_snapshot(scheduler=self.context._scheduler)
            for target in targets)
        input_files = {
            f
            for snapshot in input_snapshots for f in snapshot.files
        }

        # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
        with temporary_dir() as tmpdir:
            list_file = os.path.join(tmpdir, "input_files_list")
            with open(list_file, "w") as list_file_out:
                for input_file in sorted(input_files):
                    list_file_out.write(input_file)
                    list_file_out.write("\n")
            list_file_snapshot = self.context._scheduler.capture_snapshots(
                (PathGlobsAndRoot(
                    PathGlobs(("input_files_list", )),
                    tmpdir,
                ), ))[0]

        cloc_path, cloc_snapshot = ClocBinary.global_instance(
        ).hackily_snapshot(self.context)

        directory_digest = self.context._scheduler.merge_directories(
            tuple(s.directory_digest for s in input_snapshots + (
                cloc_snapshot,
                list_file_snapshot,
            )))

        cmd = (
            "/usr/bin/perl",
            cloc_path,
            "--skip-uniqueness",
            "--ignored=ignored",
            "--list-file=input_files_list",
            "--report-file=report",
        )

        # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
        req = Process(
            argv=cmd,
            input_files=directory_digest,
            output_files=("ignored", "report"),
            description="cloc",
        )
        exec_result = self.context.execute_process_synchronously_or_raise(
            req, "cloc", (WorkUnitLabel.TOOL, ))

        files_content_tuple = self.context._scheduler.product_request(
            FilesContent,
            [exec_result.output_directory_digest])[0].dependencies

        files_content = {
            fc.path: fc.content.decode()
            for fc in files_content_tuple
        }
        for line in files_content["report"].split("\n"):
            yield line

        if self.get_options().ignored:
            yield "Ignored the following files:"
            for line in files_content["ignored"].split("\n"):
                yield line
예제 #7
0
  def console_output(self, targets):
    if not self.get_options().transitive:
      targets = self.context.target_roots

    input_snapshots = tuple(
      target.sources_snapshot(scheduler=self.context._scheduler) for target in targets
    )
    input_files = {f for snapshot in input_snapshots for f in snapshot.files}

    # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
    with temporary_dir() as tmpdir:
      list_file = os.path.join(tmpdir, 'input_files_list')
      with open(list_file, 'w') as list_file_out:
        for input_file in sorted(input_files):
          list_file_out.write(input_file)
          list_file_out.write('\n')
      list_file_snapshot = self.context._scheduler.capture_snapshots((
        PathGlobsAndRoot(
          PathGlobs(('input_files_list',)),
          text_type(tmpdir),
        ),
      ))[0]

    cloc_path, cloc_snapshot = ClocBinary.global_instance().hackily_snapshot(self.context)

    directory_digest = self.context._scheduler.merge_directories(tuple(s.directory_digest for s in
      input_snapshots + (
      cloc_snapshot,
      list_file_snapshot,
    )))

    cmd = (
      '/usr/bin/perl',
      cloc_path,
      '--skip-uniqueness',
      '--ignored=ignored',
      '--list-file=input_files_list',
      '--report-file=report',
    )

    # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
    req = ExecuteProcessRequest(
      argv=cmd,
      input_files=directory_digest,
      output_files=('ignored', 'report'),
      description='cloc',
    )
    exec_result = self.context.execute_process_synchronously_without_raising(req, 'cloc', (WorkUnitLabel.TOOL,))

    files_content_tuple = self.context._scheduler.product_request(
      FilesContent,
      [exec_result.output_directory_digest]
    )[0].dependencies

    files_content = {fc.path: fc.content.decode('utf-8') for fc in files_content_tuple}
    for line in files_content['report'].split('\n'):
      yield line

    if self.get_options().ignored:
      yield 'Ignored the following files:'
      for line in files_content['ignored'].split('\n'):
        yield line
예제 #8
0
파일: cloc.py 프로젝트: baroquebobcat/pants
  def console_output(self, targets):
    if not self.get_options().transitive:
      targets = self.context.target_roots

    # TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
    with temporary_dir() as tmpdir:
      list_file = os.path.join(tmpdir, 'input_files_list')
      input_files = set()
      with open(list_file, 'w') as list_file_out:
        for target in targets:
          for source in target.sources_relative_to_buildroot():
            input_files.add(source)
            list_file_out.write(source)
            list_file_out.write(b'\n')
      list_file_snapshot = self.context._scheduler.capture_snapshots((
        PathGlobsAndRoot(
          PathGlobs(('input_files_list',)),
          str(tmpdir),
        ),
      ))[0]

    cloc_path, cloc_snapshot = ClocBinary.global_instance().hackily_snapshot(self.context)

    # TODO: This should use an input file snapshot which should be provided on the Target object,
    # rather than hackily re-snapshotting each of the input files.
    # See https://github.com/pantsbuild/pants/issues/5762
    input_pathglobs = PathGlobs(tuple(input_files))
    input_snapshot = self.context._scheduler.product_request(Snapshot, [input_pathglobs])[0]

    directory_digest = self.context._scheduler.merge_directories((
      cloc_snapshot.directory_digest,
      input_snapshot.directory_digest,
      list_file_snapshot.directory_digest,
    ))

    cmd = (
      '/usr/bin/perl',
      cloc_path,
      '--skip-uniqueness',
      '--ignored=ignored',
      '--list-file=input_files_list',
      '--report-file=report',
    )

    # The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
    req = ExecuteProcessRequest(cmd, (), directory_digest, ('ignored', 'report'), 15 * 60, 'cloc')
    exec_result = self.context.execute_process_synchronously(req, 'cloc', (WorkUnitLabel.TOOL,))

    # TODO: Remove this check when https://github.com/pantsbuild/pants/issues/5719 is resolved.
    if exec_result.exit_code != 0:
      raise TaskError('{} ... exited non-zero ({}).'.format(' '.join(cmd), exec_result.exit_code))

    files_content_tuple = self.context._scheduler.product_request(
      FilesContent,
      [exec_result.output_directory_digest]
    )[0].dependencies

    files_content = {fc.path: fc.content for fc in files_content_tuple}
    for line in files_content['report'].split('\n'):
      yield line

    if self.get_options().ignored:
      yield 'Ignored the following files:'
      for line in files_content['ignored'].split('\n'):
        yield line