def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, spec_excludes=None, invalidation_report=None): """ :API: public """ deprecated_conditional(lambda: spec_excludes is not None, '0.0.75', 'Use address_mapper#build_ignore_patterns instead.') self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance().get_source_roots() self._lock = OwnerPrintingPIDLockFile(os.path.join(self._buildroot, '.pants.run')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._spec_excludes = spec_excludes self._replace_targets(target_roots) self._invalidation_report = invalidation_report
def change_calculator(self, build_graph, address_mapper, scm=None, workspace=None, exclude_target_regexp=None): """Constructs and returns a BuildGraphChangeCalculator. :param BuildGraph build_graph: A BuildGraph instance. :param AddressMapper address_mapper: A AddressMapper instance. :param Scm scm: The SCM instance. Defaults to discovery. :param ScmWorkspace: The SCM workspace instance. :param string exclude_target_regexp: The exclude target regexp. """ scm = scm or get_scm() if scm is None: raise TaskError('A `changed` goal or `--changed` option was specified, ' 'but no SCM is available to satisfy the request.') workspace = workspace or ScmWorkspace(scm) return BuildGraphChangeCalculator( scm, workspace, address_mapper, build_graph, self._changed_request.include_dependees, fast=self._changed_request.fast, changes_since=self._changed_request.changes_since, diffspec=self._changed_request.diffspec, exclude_target_regexp=exclude_target_regexp )
def _get_project_tree(self, build_file_rev, pants_ignore): """Creates the project tree for build files for use in a given pants run.""" if build_file_rev: return ScmProjectTree(self._root_dir, get_scm(), build_file_rev, pants_ignore) else: return FileSystemProjectTree(self._root_dir, pants_ignore)
def change_calculator(cls, options, address_mapper, build_graph, scm=None, workspace=None, spec_excludes=None): scm = scm or get_scm() if scm is None: raise TaskError('No SCM available.') workspace = workspace or ScmWorkspace(scm) return ChangeCalculator( scm, workspace, address_mapper, build_graph, options.include_dependees, fast=options.fast, changes_since=options.changes_since, diffspec=options.diffspec, # NB: exclude_target_regexp is a global scope option registered # elsewhere exclude_target_regexp=options.exclude_target_regexp, spec_excludes=spec_excludes)
def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, spec_excludes=None, invalidation_report=None): self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._lock = OwnerPrintingPIDLockFile( os.path.join(self._buildroot, '.pants.run')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._spec_excludes = spec_excludes self._replace_targets(target_roots) self._synthetic_targets = defaultdict(list) self._invalidation_report = invalidation_report
def new_session(self): session = self.scheduler.new_session() scm = get_scm() change_calculator = EngineChangeCalculator(session, self.symbol_table, scm) if scm else None return LegacyGraphSession(session, self.symbol_table, change_calculator)
def change_calculator(self, build_graph, address_mapper, scm=None, workspace=None, exclude_target_regexp=None): """Constructs and returns a BuildGraphChangeCalculator. :param BuildGraph build_graph: A BuildGraph instance. :param AddressMapper address_mapper: A AddressMapper instance. :param Scm scm: The SCM instance. Defaults to discovery. :param ScmWorkspace: The SCM workspace instance. :param string exclude_target_regexp: The exclude target regexp. """ scm = scm or get_scm() if scm is None: raise TaskError( 'A `changed` goal or `--changed` option was specified, ' 'but no SCM is available to satisfy the request.') workspace = workspace or ScmWorkspace(scm) return BuildGraphChangeCalculator( scm, workspace, address_mapper, build_graph, self._changed_request.include_dependees, fast=self._changed_request.fast, changes_since=self._changed_request.changes_since, diffspec=self._changed_request.diffspec, exclude_target_regexp=exclude_target_regexp)
def change_calculator(cls, options, address_mapper, build_graph, scm=None, workspace=None, spec_excludes=None): deprecated_conditional( lambda: spec_excludes is not None, '0.0.75', 'Use address_mapper#build_ignore_patterns instead.') scm = scm or get_scm() if scm is None: raise TaskError('No SCM available.') workspace = workspace or ScmWorkspace(scm) return ChangeCalculator( scm, workspace, address_mapper, build_graph, options.include_dependees, fast=options.fast, changes_since=options.changes_since, diffspec=options.diffspec, # NB: exclude_target_regexp is a global scope option registered # elsewhere exclude_target_regexp=options.exclude_target_regexp, spec_excludes=spec_excludes)
def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, invalidation_report=None): self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance( ).get_source_roots() self._lock = OwnerPrintingInterProcessFileLock( os.path.join(self._buildroot, '.pants.workdir.file_lock')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._replace_targets(target_roots) self._invalidation_report = invalidation_report
def get_project_tree(options): """Creates the project tree for build files for use in a given pants run.""" pants_ignore = options.pants_ignore or [] if options.build_file_rev: return ScmProjectTree(get_buildroot(), get_scm(), options.build_file_rev, pants_ignore) else: return FileSystemProjectTree(get_buildroot(), pants_ignore)
def __init__(self, config, options, run_tracker, target_roots, requested_goals=None, lock=None, log=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None): self._config = config self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._lock = lock or Lock.unlocked() self._log = log or Context.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self.replace_targets(target_roots)
def __init__( self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, spec_excludes=None, ): self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._lock = OwnerPrintingPIDLockFile(os.path.join(self._buildroot, ".pants.run")) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._spec_excludes = spec_excludes self._replace_targets(target_roots) self._synthetic_targets = defaultdict(list)
def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, build_configuration=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, invalidation_report=None, scheduler=None): self._options = options # We register a callback that will cause build graph edits to invalidate our caches, and we hold # a handle directly to the callback function to ensure that it is not GC'd until the context is. self.build_graph = build_graph self._clear_target_cache_handle = self._clear_target_cache self._targets_cache = dict() self.build_graph.add_invalidation_callback(self._clear_target_cache_handle) self._build_file_parser = build_file_parser self.build_configuration = build_configuration self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = run_tracker.logger self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance().get_source_roots() self._lock = OwnerPrintingInterProcessFileLock(os.path.join(self._buildroot, '.pants.workdir.file_lock')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout.buffer self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._replace_targets(target_roots) self._invalidation_report = invalidation_report self._scheduler = scheduler
def __init__( self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, invalidation_report=None, ): self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance().get_source_roots() self._lock = OwnerPrintingInterProcessFileLock(os.path.join(self._buildroot, ".pants.workdir.file_lock")) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._replace_targets(target_roots) self._invalidation_report = invalidation_report
def create(cls, options, session, symbol_table, build_root=None): """ :param Options options: An `Options` instance to use. :param session: The Scheduler session :param symbol_table: The symbol table :param string build_root: The build root. """ # Determine the literal target roots. spec_roots = cls.parse_specs(options.target_specs, build_root) # Determine `Changed` arguments directly from options to support pre-`Subsystem` # initialization paths. changed_options = options.for_scope('changed') changed_request = ChangedRequest.from_options(changed_options) # Determine the `--owner-of=` arguments provided from the global options owned_files = options.for_global_scope().owner_of logger.debug('spec_roots are: %s', spec_roots) logger.debug('changed_request is: %s', changed_request) logger.debug('owned_files are: %s', owned_files) scm = get_scm() change_calculator = ChangeCalculator(session, symbol_table, scm) if scm else None owner_calculator = OwnerCalculator( session, symbol_table) if owned_files else None targets_specified = sum(1 for item in (changed_request.is_actionable(), owned_files, spec_roots) if item) if targets_specified > 1: # We've been provided a more than one of: a change request, an owner request, or spec roots. raise InvalidSpecConstraint( 'Multiple target selection methods provided. Please use only one of ' '--changed-*, --owner-of, or target specs') if change_calculator and changed_request.is_actionable(): # We've been provided no spec roots (e.g. `./pants list`) AND a changed request. Compute # alternate target roots. changed_addresses = change_calculator.changed_target_addresses( changed_request) logger.debug('changed addresses: %s', changed_addresses) return TargetRoots( tuple( SingleAddress(a.spec_path, a.target_name) for a in changed_addresses)) if owner_calculator and owned_files: # We've been provided no spec roots (e.g. `./pants list`) AND a owner request. Compute # alternate target roots. owner_addresses = owner_calculator.owner_target_addresses( owned_files) logger.debug('owner addresses: %s', owner_addresses) return TargetRoots( tuple( SingleAddress(a.spec_path, a.target_name) for a in owner_addresses)) return TargetRoots(spec_roots)
def _get_buildfile_type(self, build_file_rev): """Selects the BuildFile type for use in a given pants run.""" if build_file_rev: ScmBuildFile.set_rev(build_file_rev) ScmBuildFile.set_scm(get_scm()) return ScmBuildFile else: return FilesystemBuildFile
def add_scm_info(self): """Adds SCM-related info.""" scm = get_scm() if not scm: return revision = scm.commit_id branch = scm.branch_name or revision self.add_infos(('revision', revision), ('branch', branch))
def __init__(self, context, scm=None, outstream=sys.stdout): """Creates a WhatChanged task that uses an Scm to determine changed files. context: The pants execution context. scm: The scm to use, taken from the globally configured scm if None. outstream: The stream to write changed files or targets to. """ super(ScmWhatChanged, self).__init__(context, ScmWorkspace(scm or get_scm()), outstream)
def __init__(self, scm): super(ScmWorkspace, self).__init__() self._scm = scm or get_scm() if self._scm is None: raise self.WorkspaceError('Cannot figure out what changed without a configured ' 'source-control system.')
def setup_legacy_graph(pants_ignore_patterns, workdir, build_root=None, native=None, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() symbol_table_cls = symbol_table_cls or LegacySymbolTable project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper( symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.Factory.global_instance().create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = (create_legacy_graph_tasks(symbol_table_cls) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table_cls)) # TODO: Do not use the cache yet, as it incurs a high overhead. scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native) engine = LocalSerialEngine(scheduler, use_cache=False) change_calculator = EngineChangeCalculator( scheduler, engine, symbol_table_cls, scm) if scm else None return LegacyGraphHelper(scheduler, engine, symbol_table_cls, change_calculator)
def setup_legacy_graph(pants_ignore_patterns, workdir, build_root=None, native=None, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None, include_trace_on_error=True): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :param bool include_trace_on_error: If True, when an error occurs, the error message will include the graph trace. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() symbol_table_cls = symbol_table_cls or LegacySymbolTable project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks(symbol_table_cls) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table_cls) ) # TODO: Do not use the cache yet, as it incurs a high overhead. scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native, include_trace_on_error=include_trace_on_error) change_calculator = EngineChangeCalculator(scheduler, symbol_table_cls, scm) if scm else None return LegacyGraphHelper(scheduler, symbol_table_cls, change_calculator)
def create( cls, options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, build_root: Optional[str] = None, ) -> Specs: specs = cls.parse_specs(raw_specs=options.specs, build_root=build_root) changed_options = ChangedOptions.from_options( options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs.provided and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" if specs.filesystem_specs and specs.address_specs: specs_description = "target and file arguments" elif specs.filesystem_specs: specs_description = "file arguments" else: specs_description = "target arguments" raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. Please " "use only one.") if not changed_options.provided: return specs scm = get_scm() if not scm: raise InvalidSpecConstraint( "The `--changed-*` options are not available without a recognized SCM (usually " "Git).") changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(scm=scm)), dependees=changed_options.dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_specs = [] filesystem_specs = [] for address in cast(ChangedAddresses, changed_addresses): if not address.is_base_target: # TODO: Should adjust Specs parsing to support parsing the disambiguated file # Address, which would bypass-rediscovering owners. filesystem_specs.append(FilesystemLiteralSpec( address.filename)) else: address_specs.append( SingleAddress(address.spec_path, address.target_name)) return Specs( AddressSpecs(address_specs, filter_by_global_options=True), FilesystemSpecs(filesystem_specs), )
def add_scm_info(self): """Adds SCM-related info.""" scm = get_scm() if scm: revision = scm.commit_id branch = scm.branch_name or revision else: revision, branch = 'none', 'none' self.add_infos(('revision', revision), ('branch', branch))
def __init__(self, scm): super(ScmWorkspace, self).__init__() self._scm = scm or get_scm() if self._scm is None: raise self.WorkspaceError( 'Cannot figure out what changed without a configured ' 'source-control system.')
def calculate_specs( options_bootstrapper: OptionsBootstrapper, options: Options, session: SchedulerSession, *, build_root: Optional[str] = None, ) -> Specs: """Determine the specs for a given Pants run.""" build_root = build_root or get_buildroot() specs = SpecsParser(build_root).parse_specs(options.specs) changed_options = ChangedOptions.from_options(options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if specs.provided and changed_options.provided: changed_name = "--changed-since" if changed_options.since else "--changed-diffspec" if specs.filesystem_specs and specs.address_specs: specs_description = "target and file arguments" elif specs.filesystem_specs: specs_description = "file arguments" else: specs_description = "target arguments" raise InvalidSpecConstraint( f"You used `{changed_name}` at the same time as using {specs_description}. Please " "use only one.") if not changed_options.provided: return specs scm = get_scm() if not scm: raise InvalidSpecConstraint( "The `--changed-*` options are not available without a recognized SCM (usually " "Git).") changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(scm=scm)), dependees=changed_options.dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [Params(changed_request, options_bootstrapper)]) logger.debug("changed addresses: %s", changed_addresses) address_specs = [] for address in cast(ChangedAddresses, changed_addresses): address_input = AddressInput.parse(address.spec) address_specs.append( AddressLiteralSpec( path_component=address_input.path_component, # NB: AddressInput.target_component may be None, but AddressLiteralSpec expects a # string. target_component=address_input.target_component or address.target_name, )) return Specs(AddressSpecs(address_specs, filter_by_global_options=True), FilesystemSpecs([]))
def create(cls, options, session, symbol_table, build_root=None, exclude_patterns=None, tags=None): """ :param Options options: An `Options` instance to use. :param session: The Scheduler session :param symbol_table: The symbol table :param string build_root: The build root. """ # Determine the literal target roots. spec_roots = cls.parse_specs( target_specs=options.target_specs, build_root=build_root, exclude_patterns=exclude_patterns, tags=tags) # Determine `Changed` arguments directly from options to support pre-`Subsystem` # initialization paths. changed_options = options.for_scope('changed') changed_request = ChangedRequest.from_options(changed_options) # Determine the `--owner-of=` arguments provided from the global options owned_files = options.for_global_scope().owner_of logger.debug('spec_roots are: %s', spec_roots) logger.debug('changed_request is: %s', changed_request) logger.debug('owned_files are: %s', owned_files) scm = get_scm() change_calculator = ChangeCalculator(scheduler=session, symbol_table=symbol_table, scm=scm) if scm else None owner_calculator = OwnerCalculator(scheduler=session, symbol_table=symbol_table) if owned_files else None targets_specified = sum(1 for item in (changed_request.is_actionable(), owned_files, spec_roots) if item) if targets_specified > 1: # We've been provided a more than one of: a change request, an owner request, or spec roots. raise InvalidSpecConstraint( 'Multiple target selection methods provided. Please use only one of ' '--changed-*, --owner-of, or target specs' ) if change_calculator and changed_request.is_actionable(): # We've been provided no spec roots (e.g. `./pants list`) AND a changed request. Compute # alternate target roots. changed_addresses = change_calculator.changed_target_addresses(changed_request) logger.debug('changed addresses: %s', changed_addresses) dependencies = tuple(SingleAddress(a.spec_path, a.target_name) for a in changed_addresses) return TargetRoots([Specs(dependencies=dependencies, exclude_patterns=exclude_patterns, tags=tags)]) if owner_calculator and owned_files: # We've been provided no spec roots (e.g. `./pants list`) AND a owner request. Compute # alternate target roots. owner_addresses = owner_calculator.owner_target_addresses(owned_files) logger.debug('owner addresses: %s', owner_addresses) dependencies = tuple(SingleAddress(a.spec_path, a.target_name) for a in owner_addresses) return TargetRoots([Specs(dependencies=dependencies, exclude_patterns=exclude_patterns, tags=tags)]) return TargetRoots(spec_roots)
def __init__(self, scheduler, symbol_table, scm): """ :param scheduler: The `Scheduler` instance to use for computing file to target mappings. :param symbol_table: The symbol table. :param scm: The `Scm` instance to use for change determination. """ super(EngineChangeCalculator, self).__init__(scm or get_scm()) self._scheduler = scheduler self._symbol_table = symbol_table self._mapper = EngineSourceMapper(self._scheduler)
def add_scm_info(self): """Adds SCM-related info and returns a dict composed of just this added info.""" scm = get_scm() if scm: revision = scm.commit_id tag = scm.tag_name or 'none' branch = scm.branch_name or revision else: revision, tag, branch = 'none', 'none', 'none' return self.add_infos(('revision', revision), ('tag', tag), ('branch', branch))
def create( cls, options: Options, session: SchedulerSession, build_root: Optional[str] = None, exclude_patterns: Optional[Iterable[str]] = None, tags: Optional[Iterable[str]] = None, ) -> Specs: specs = cls.parse_specs( raw_specs=options.specs, build_root=build_root, exclude_patterns=exclude_patterns, tags=tags, ) changed_options = ChangedOptions.from_options( options.for_scope("changed")) logger.debug("specs are: %s", specs) logger.debug("changed_options are: %s", changed_options) if changed_options.is_actionable( ) and specs.provided_specs.dependencies: # We've been provided both a change request and specs. raise InvalidSpecConstraint( "Multiple target selection methods provided. Please use only one of " "`--changed-*`, address specs, or filesystem specs.") if changed_options.is_actionable(): scm = get_scm() if not scm: raise InvalidSpecConstraint( "The `--changed-*` options are not available without a recognized SCM (usually Git)." ) changed_request = ChangedRequest( sources=tuple(changed_options.changed_files(scm=scm)), include_dependees=changed_options.include_dependees, ) (changed_addresses, ) = session.product_request( ChangedAddresses, [changed_request]) logger.debug("changed addresses: %s", changed_addresses.addresses) dependencies = tuple( SingleAddress(a.spec_path, a.target_name) for a in changed_addresses.addresses) return Specs( address_specs=AddressSpecs( dependencies=dependencies, exclude_patterns=exclude_patterns, tags=tags, ), filesystem_specs=FilesystemSpecs([]), ) return specs
def __init__(self, context, workdir, scm=None, outstream=sys.stdout): """Creates a WhatChanged task that uses an Scm to determine changed files. context: The pants execution context. workdir: The directory to work in. scm: The scm to use, taken from the globally configured scm if None. outstream: The stream to write changed files or targets to. """ super(ScmWhatChanged, self).__init__(context, workdir, ScmWorkspace(scm or get_scm()), outstream)
def generate_project(self): outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) scm = get_scm() configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=scm.worktree if scm else None, java=TemplateData( encoding=self.java_encoding, jdk=self.java_jdk, language_level="JDK_1_{}".format(self.java_language_level), ), debug_port=self.get_options().debug_port, ) abs_target_specs = [ os.path.join(get_buildroot(), spec) for spec in self.context.options.specs ] configured_workspace = TemplateData( targets=json.dumps(abs_target_specs), project_path=os.path.join(get_buildroot(), abs_target_specs[0].split(":")[0]), idea_plugin_version=IDEA_PLUGIN_VERSION, incremental_import=self.get_options().incremental_import, dep_as_jar=self.get_options().dep_as_jar, ) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) safe_mkdir(os.path.abspath(self.intellij_idea_dir)) def gen_file(template_file_name, **mustache_kwargs): return self._generate_to_tempfile( Generator( pkgutil.get_data(__name__, template_file_name).decode(), **mustache_kwargs)) idea_ws = gen_file(self.idea_workspace_template, workspace=configured_workspace) idea_modules = gen_file(self.idea_modules_template, project=configured_project) idea_dotname = self._write_to_tempfile(self.project_name) shutil.move(idea_ws, self.idea_workspace_filename) shutil.move(idea_modules, self.idea_modules_filename) shutil.move(idea_dotname, self.idea_name_filename) return self.gen_project_workdir
def __init__(self, scheduler, symbol_table, scm, workspace=None, changes_since=None, diffspec=None): """ :param scheduler: The `Scheduler` instance to use for computing file to target mappings. :param symbol_table: The symbol table. :param scm: The `Scm` instance to use for change determination. """ self._scm = scm or get_scm() self._scheduler = scheduler self._symbol_table = symbol_table self._mapper = EngineSourceMapper(self._scheduler) self._workspace = workspace or ScmWorkspace(scm) self._changes_since = changes_since self._diffspec = diffspec
def generate_project(self): outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) scm = get_scm() configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=scm.worktree, java=TemplateData(encoding=self.java_encoding, jdk=self.java_jdk, language_level='JDK_1_{}'.format( self.java_language_level)), debug_port=self.get_options().debug_port, ) if not self.context.options.target_specs: raise TaskError("No targets specified.") abs_target_specs = [ os.path.join(get_buildroot(), spec) for spec in self.context.options.target_specs ] configured_workspace = TemplateData( targets=json.dumps(abs_target_specs), project_path=os.path.join(get_buildroot(), abs_target_specs[0].split(':')[0]), idea_plugin_version=IDEA_PLUGIN_VERSION, incremental_import=self.get_options().incremental_import, ) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) def gen_file(template_file_name, **mustache_kwargs): return self._generate_to_tempfile( Generator( pkgutil.get_data(__name__, template_file_name).decode('utf-8'), **mustache_kwargs)) ipr = gen_file(self.project_template, project=configured_project) iws = gen_file(self.workspace_template, workspace=configured_workspace) self._outstream.write(self.gen_project_workdir.encode('utf-8')) shutil.move(ipr, self.project_filename) shutil.move(iws, self.workspace_filename) return self.project_filename
def change_calculator(cls, options, address_mapper, build_graph, scm=None, workspace=None): scm = scm or get_scm() if scm is None: raise TaskError('No SCM available.') workspace = workspace or ScmWorkspace(scm) return ChangeCalculator(scm, workspace, address_mapper, build_graph, fast=options.fast, changes_since=options.changes_since, diffspec=options.diffspec, include_dependees=options.include_dependees, # NB: exclude_target_regexp is a global scope option registered # elsewhere exclude_target_regexp=options.exclude_target_regexp)
def setup_legacy_graph(pants_ignore_patterns, build_root=None, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str build_root: A path to be used as the build root. If None, then default is used. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() symbol_table_cls = symbol_table_cls or LegacySymbolTable project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper(symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks(symbol_table_cls) + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls) ) scheduler = LocalScheduler(dict(), tasks, project_tree) # TODO: Do not use the cache yet, as it incurs a high overhead. engine = LocalSerialEngine(scheduler, Storage.create(), use_cache=False) change_calculator = EngineChangeCalculator(engine, scm) if scm else None return LegacyGraphHelper(scheduler, engine, symbol_table_cls, change_calculator)
def setup_legacy_graph(pants_ignore_patterns, symbol_table_cls=None, build_ignore_patterns=None, exclude_target_regexps=None): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param SymbolTable symbol_table_cls: A SymbolTable class to use for build file parsing, or None to use the default. :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :returns: A tuple of (scheduler, engine, symbol_table_cls, build_graph_cls). """ build_root = get_buildroot() scm = get_scm() project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) symbol_table_cls = symbol_table_cls or LegacySymbolTable # Register "literal" subjects required for these tasks. # TODO: Replace with `Subsystems`. address_mapper = AddressMapper( symbol_table_cls=symbol_table_cls, parser_cls=LegacyPythonCallbacksParser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps) # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = (create_legacy_graph_tasks(symbol_table_cls) + create_fs_tasks() + create_graph_tasks(address_mapper, symbol_table_cls)) scheduler = LocalScheduler(dict(), tasks, project_tree) # TODO: Do not use the cache yet, as it incurs a high overhead. engine = LocalSerialEngine(scheduler, Storage.create(), use_cache=False) change_calculator = EngineChangeCalculator(engine, scm) if scm else None return LegacyGraphHelper(scheduler, engine, symbol_table_cls, change_calculator)
def __init__(self, address=None, payload=None, image_name=None, image_tag=None, **kwargs): payload = payload or Payload() payload.add_fields({ 'image_name': PrimitiveField(image_name), 'image_tag': PrimitiveField('c' + get_scm().commit_id) }) self.image_name = image_name self.image_tag = image_tag super(DockerTargetBase, self).__init__(address=address, payload=payload, **kwargs)
def __init__(self, options, run_tracker, target_roots, requested_goals=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, spec_excludes=None, invalidation_report=None): """ :API: public """ deprecated_conditional( lambda: spec_excludes is not None, '0.0.75', 'Use address_mapper#build_ignore_patterns instead.') self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = self.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._source_roots = SourceRootConfig.global_instance( ).get_source_roots() self._lock = OwnerPrintingPIDLockFile( os.path.join(self._buildroot, '.pants.run')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._spec_excludes = spec_excludes self._replace_targets(target_roots) self._invalidation_report = invalidation_report
def __init__(self, address=None, payload=None, binary=None, image_name=None, image_tags=None, base_image=None, dockerfile=None, **kwargs): """ :param address: The Address that maps to this Target in the BuildGraph. :type address: :class:`pants.build_graph.address.Address` :param payload: The configuration encapsulated by this target. Also in charge of most fingerprinting details. :type payload: :class:`pants.base.payload.Payload` :param string binary: Target spec of the ``jvm_binary`` or the ``python_binary`` that contains the app main. :param image_name: name of docker image :type image_name: str :param image_tag: tags of the docker image :type image_tag: str[] : param dockerfile: custom docker file : type dockerfile: str """ tags = image_tags or [] tags.append('c' + get_scm().commit_id[:8]) payload = payload or Payload() payload.add_fields({ 'binary': PrimitiveField(binary), 'image_name': PrimitiveField(image_name or basename(address.spec_path)), 'image_tags': PrimitiveField(maybe_list(tags)), 'base_image': PrimitiveField(base_image or "ubuntu:18.04"), 'dockerfile': PrimitiveField(dockerfile) }) super(DockerTargetBase, self).__init__( address=address, payload=payload, **kwargs)
def generate_project(self): outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) scm = get_scm() configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=scm.worktree, java=TemplateData( encoding=self.java_encoding, jdk=self.java_jdk, language_level='JDK_1_{}'.format(self.java_language_level) ), debug_port=self.get_options().debug_port, ) if not self.context.options.target_specs: raise TaskError("No targets specified.") abs_target_specs = [os.path.join(get_buildroot(), spec) for spec in self.context.options.target_specs] configured_workspace = TemplateData( targets=json.dumps(abs_target_specs), project_path=os.path.join(get_buildroot(), abs_target_specs[0].split(':')[0]), idea_plugin_version=IDEA_PLUGIN_VERSION, incremental_import=self.get_options().incremental_import, ) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project)) iws = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.workspace_template), workspace=configured_workspace)) self._outstream.write(self.gen_project_workdir) shutil.move(ipr, self.project_filename) shutil.move(iws, self.workspace_filename) return self.project_filename
def _find_locally_changed_targets(self, sources_by_target): """Finds the targets whose sources have been modified locally. Returns a list of targets, or None if no SCM is available. """ # Compute the src->targets mapping. There should only be one target per source, # but that's not yet a hard requirement, so the value is a list of targets. # TODO(benjy): Might this inverse mapping be needed elsewhere too? targets_by_source = defaultdict(list) for tgt, srcs in sources_by_target.items(): for src in srcs: targets_by_source[src].append(tgt) ret = OrderedSet() scm = get_scm() if not scm: return None changed_files = scm.changed_files(include_untracked=True, relative_to=get_buildroot()) for f in changed_files: ret.update(targets_by_source.get(f, [])) return list(ret)
def _find_locally_changed_targets(self, sources_by_target): """Finds the targets whose sources have been modified locally. Returns a list of targets, or None if no SCM is available. """ # Compute the src->targets mapping. There should only be one target per source, # but that's not yet a hard requirement, so the value is a list of targets. # TODO(benjy): Might this inverse mapping be needed elsewhere too? targets_by_source = defaultdict(list) for tgt, srcs in sources_by_target.items(): for src in srcs: targets_by_source[src].append(tgt) ret = OrderedSet() scm = get_scm() if not scm: return None changed_files = scm.changed_files(include_untracked=True) for f in changed_files: ret.update(targets_by_source.get(f, [])) return list(ret)
def change_calculator(cls, options, address_mapper, build_graph, scm=None, workspace=None, spec_excludes=None): deprecated_conditional(lambda: spec_excludes is not None, '0.0.75', 'Use address_mapper#build_ignore_patterns instead.') scm = scm or get_scm() if scm is None: raise TaskError('No SCM available.') workspace = workspace or ScmWorkspace(scm) return ChangeCalculator(scm, workspace, address_mapper, build_graph, options.include_dependees, fast=options.fast, changes_since=options.changes_since, diffspec=options.diffspec, # NB: exclude_target_regexp is a global scope option registered # elsewhere exclude_target_regexp=options.exclude_target_regexp, spec_excludes=spec_excludes)
def __init__(self, config, options, run_tracker, target_roots, requested_goals=None, log=None, target_base=None, build_graph=None, build_file_parser=None, address_mapper=None, console_outstream=None, scm=None, workspace=None, spec_excludes=None): self._config = config self._options = options self.build_graph = build_graph self.build_file_parser = build_file_parser self.address_mapper = address_mapper self.run_tracker = run_tracker self._log = log or Context.Log(run_tracker) self._target_base = target_base or Target self._products = Products() self._buildroot = get_buildroot() self._lock = OwnerPrintingPIDLockFile(os.path.join(self._buildroot, '.pants.run')) self._java_sysprops = None # Computed lazily. self.requested_goals = requested_goals or [] self._console_outstream = console_outstream or sys.stdout self._scm = scm or get_scm() self._workspace = workspace or (ScmWorkspace(self._scm) if self._scm else None) self._spec_excludes = spec_excludes self._target_roots_have_been_accessed = False self.replace_targets(target_roots)
def __init__(self, context, workdir, scm=None): super(JarPublish, self).__init__(context, workdir) ScmPublish.__init__(self, scm or get_scm(), self.context.config.getlist( JarPublish._CONFIG_SECTION, 'restrict_push_branches')) self.cachedir = os.path.join(self.workdir, 'cache') self._jvmargs = context.config.getlist(JarPublish._CONFIG_SECTION, 'ivy_jvmargs', default=[]) if context.options.jar_publish_local: local_repo = dict( resolver='publish_local', path=os.path.abspath(os.path.expanduser(context.options.jar_publish_local)), confs=['default'], auth=None ) self.repos = defaultdict(lambda: local_repo) self.commit = False self.snapshot = context.options.jar_publish_local_snapshot else: self.repos = context.config.getdict(JarPublish._CONFIG_SECTION, 'repos') if not self.repos: raise TaskError("This repo is not yet set for publishing to the world!" "Please re-run with --publish-local") for repo, data in self.repos.items(): auth = data.get('auth') if auth: credentials = context.resolve(auth).next() user = credentials.username(data['resolver']) password = credentials.password(data['resolver']) self.context.log.debug('Found auth for repo=%s user=%s' % (repo, user)) self.repos[repo]['username'] = user self.repos[repo]['password'] = password self.commit = context.options.jar_publish_commit self.snapshot = False self.ivycp = context.config.getlist('ivy', 'classpath') self.ivysettings = context.config.get('jar-publish', 'ivy_settings') self.dryrun = context.options.jar_publish_dryrun self.transitive = context.options.jar_publish_transitive self.force = context.options.jar_publish_force def parse_jarcoordinate(coordinate): components = coordinate.split('#', 1) if len(components) == 2: org, name = components return org, name else: try: address = Address.parse(get_buildroot(), coordinate) # TODO: This is broken. try: target = Target.get(address) if not target: siblings = Target.get_all_addresses(address.buildfile) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' raise TaskError('%s => %s?:\n %s' % (address, prompt, '\n '.join(str(a) for a in siblings))) if not target.is_exported: raise TaskError('%s is not an exported target' % coordinate) return target.provides.org, target.provides.name except (ImportError, SyntaxError, TypeError): raise TaskError('Failed to parse %s' % address.buildfile.relpath) except IOError: raise TaskError('No BUILD file could be found at %s' % coordinate) self.overrides = {} if context.options.jar_publish_override: def parse_override(override): try: coordinate, rev = override.split('=', 1) try: rev = Semver.parse(rev) except ValueError as e: raise TaskError('Invalid version %s: %s' % (rev, e)) return parse_jarcoordinate(coordinate), rev except ValueError: raise TaskError('Invalid override: %s' % override) self.overrides.update(parse_override(o) for o in context.options.jar_publish_override) self.restart_at = None if context.options.jar_publish_restart_at: self.restart_at = parse_jarcoordinate(context.options.jar_publish_restart_at) context.products.require('jars') context.products.require('source_jars')
def setup_legacy_graph(pants_ignore_patterns, workdir, build_file_imports_behavior, build_root=None, native=None, build_file_aliases=None, rules=None, build_ignore_patterns=None, exclude_target_regexps=None, subproject_roots=None, include_trace_on_error=True): """Construct and return the components necessary for LegacyBuildGraph construction. :param list pants_ignore_patterns: A list of path ignore patterns for FileSystemProjectTree, usually taken from the '--pants-ignore' global option. :param str workdir: The pants workdir. :param build_file_imports_behavior: How to behave if a BUILD file being parsed tries to use import statements. Valid values: "allow", "warn", "error". :type build_file_imports_behavior: string :param str build_root: A path to be used as the build root. If None, then default is used. :param Native native: An instance of the native-engine subsystem. :param build_file_aliases: BuildFileAliases to register. :type build_file_aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` :param list build_ignore_patterns: A list of paths ignore patterns used when searching for BUILD files, usually taken from the '--build-ignore' global option. :param list exclude_target_regexps: A list of regular expressions for excluding targets. :param list subproject_roots: Paths that correspond with embedded build roots under the current build root. :param bool include_trace_on_error: If True, when an error occurs, the error message will include the graph trace. :returns: A tuple of (scheduler, engine, symbol_table, build_graph_cls). """ build_root = build_root or get_buildroot() scm = get_scm() if not build_file_aliases: build_file_aliases = EngineInitializer.get_default_build_file_aliases() if not rules: rules = [] symbol_table = LegacySymbolTable(build_file_aliases) project_tree = FileSystemProjectTree(build_root, pants_ignore_patterns) # Register "literal" subjects required for these tasks. parser = LegacyPythonCallbacksParser( symbol_table, build_file_aliases, build_file_imports_behavior ) address_mapper = AddressMapper(parser=parser, build_ignore_patterns=build_ignore_patterns, exclude_target_regexps=exclude_target_regexps, subproject_roots=subproject_roots) # Load the native backend. native = native or Native.create() # Create a Scheduler containing graph and filesystem tasks, with no installed goals. The # LegacyBuildGraph will explicitly request the products it needs. tasks = ( create_legacy_graph_tasks(symbol_table) + create_fs_rules() + create_graph_rules(address_mapper, symbol_table) + create_process_rules() + rules ) scheduler = LocalScheduler(workdir, dict(), tasks, project_tree, native, include_trace_on_error=include_trace_on_error) change_calculator = EngineChangeCalculator(scheduler, symbol_table, scm) if scm else None return LegacyGraphHelper(scheduler, symbol_table, change_calculator)
def generate_project(self, project): def create_content_root(source_set): root_relative_path = os.path.join(source_set.source_base, source_set.path) \ if source_set.path else source_set.source_base if source_set.resources_only: if source_set.is_test: content_type = 'java-test-resource' else: content_type = 'java-resource' else: content_type = '' sources = TemplateData( path=root_relative_path, package_prefix=source_set.path.replace('/', '.') if source_set.path else None, is_test=source_set.is_test, content_type=content_type ) return TemplateData( path=root_relative_path, sources=[sources], exclude_paths=[os.path.join(source_set.source_base, x) for x in source_set.excludes], ) content_roots = [create_content_root(source_set) for source_set in project.sources] if project.has_python: content_roots.extend(create_content_root(source_set) for source_set in project.py_sources) java_language_level = None for target in project.targets: if isinstance(target, JvmTarget): if java_language_level is None or java_language_level < target.platform.source_level: java_language_level = target.platform.source_level if java_language_level is not None: java_language_level = 'JDK_{0}_{1}'.format(*java_language_level.components[:2]) outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) scm = get_scm() configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=scm.worktree, java=TemplateData( encoding=self.java_encoding, jdk=self.java_jdk, language_level='JDK_1_{}'.format(self.java_language_level) ), resource_extensions=list(project.resource_extensions), debug_port=project.debug_port, extra_components=[], java_language_level=java_language_level, ) if not self.context.options.target_specs: raise TaskError("No targets specified.") abs_target_specs = [os.path.join(get_buildroot(), spec) for spec in self.context.options.target_specs] configured_workspace = TemplateData( targets=json.dumps(abs_target_specs), project_path=os.path.join(get_buildroot(), abs_target_specs[0].split(':')[0]), idea_plugin_version=IDEA_PLUGIN_VERSION ) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project)) iws = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.workspace_template), workspace=configured_workspace)) self._outstream.write(self.gen_project_workdir) shutil.move(ipr, self.project_filename) shutil.move(iws, self.workspace_filename) return self.project_filename
def setup(self): options_bootstrapper = OptionsBootstrapper() bootstrap_options = options_bootstrapper.get_bootstrap_options() # Get logging setup prior to loading backends so that they can log as needed. self._setup_logging(bootstrap_options.for_global_scope()) # Add any extra paths to python path (eg for loading extra source backends) for path in bootstrap_options.for_global_scope().pythonpath: sys.path.append(path) pkg_resources.fixup_namespace_packages(path) # Load plugins and backends. plugins = bootstrap_options.for_global_scope().plugins backend_packages = bootstrap_options.for_global_scope().backend_packages build_configuration = load_plugins_and_backends(plugins, backend_packages) # Now that plugins and backends are loaded, we can gather the known scopes. self.targets = [] known_scope_infos = [ScopeInfo.for_global_scope()] # Add scopes for all needed subsystems. subsystems = (set(self.subsystems) | Goal.subsystems() | build_configuration.subsystems()) for subsystem in subsystems: known_scope_infos.append(ScopeInfo(subsystem.options_scope, ScopeInfo.GLOBAL_SUBSYSTEM)) # Add scopes for all tasks in all goals. for goal in Goal.all(): known_scope_infos.extend(filter(None, goal.known_scope_infos())) # Now that we have the known scopes we can get the full options. self.options = options_bootstrapper.get_full_options(known_scope_infos) self.register_options(subsystems) # Make the options values available to all subsystems. Subsystem._options = self.options # Now that we have options we can instantiate subsystems. self.run_tracker = RunTracker.global_instance() self.reporting = Reporting.global_instance() report = self.reporting.initial_reporting(self.run_tracker) self.run_tracker.start(report) url = self.run_tracker.run_info.get_info('report_url') if url: self.run_tracker.log(Report.INFO, 'See a report at: {}'.format(url)) else: self.run_tracker.log(Report.INFO, '(To run a reporting server: ./pants server)') self.build_file_parser = BuildFileParser(build_configuration=build_configuration, root_dir=self.root_dir, run_tracker=self.run_tracker) rev = self.options.for_global_scope().build_file_rev if rev: ScmBuildFile.set_rev(rev) ScmBuildFile.set_scm(get_scm()) build_file_type = ScmBuildFile else: build_file_type = FilesystemBuildFile self.address_mapper = BuildFileAddressMapper(self.build_file_parser, build_file_type) self.build_graph = BuildGraph(run_tracker=self.run_tracker, address_mapper=self.address_mapper) # TODO(John Sirois): Kill when source root registration is lifted out of BUILD files. with self.run_tracker.new_workunit(name='bootstrap', labels=[WorkUnit.SETUP]): source_root_bootstrapper = SourceRootBootstrapper.global_instance() source_root_bootstrapper.bootstrap(self.address_mapper, self.build_file_parser) self._expand_goals_and_specs() # Now that we've parsed the bootstrap BUILD files, and know about the SCM system. self.run_tracker.run_info.add_scm_info()
def __init__(self, *args, **kwargs): super(JarPublish, self).__init__(*args, **kwargs) ScmPublish.__init__(self, get_scm(), self.get_options().restrict_push_branches) self.cachedir = os.path.join(self.workdir, 'cache') self._jvm_options = self.get_options().jvm_options if self.get_options().local: local_repo = dict( resolver='publish_local', path=os.path.abspath(os.path.expanduser(self.get_options().local)), confs=['default'], auth=None ) self.repos = defaultdict(lambda: local_repo) self.commit = False self.local_snapshot = self.get_options().local_snapshot else: self.repos = self.get_options().repos if not self.repos: raise TaskError("This repo is not configured to publish externally! Please configure per\n" "http://pantsbuild.github.io/publish.html#authenticating-to-the-artifact-repository,\n" "or re-run with the '--publish-local' flag.") for repo, data in self.repos.items(): auth = data.get('auth') if auth: credentials = next(iter(self.context.resolve(auth))) user = credentials.username(data['resolver']) password = credentials.password(data['resolver']) self.context.log.debug('Found auth for repo=%s user=%s' % (repo, user)) self.repos[repo]['username'] = user self.repos[repo]['password'] = password self.commit = self.get_options().commit self.local_snapshot = False self.named_snapshot = self.get_options().named_snapshot if self.named_snapshot: self.named_snapshot = Namedver.parse(self.named_snapshot) self.dryrun = self.get_options().dryrun self.transitive = self.get_options().transitive self.force = self.get_options().force def parse_jarcoordinate(coordinate): components = coordinate.split('#', 1) if len(components) == 2: org, name = components return org, name else: try: # TODO(Eric Ayers) This code is suspect. Target.get() is a very old method and almost certainly broken. # Refactor to use methods from BuildGraph or BuildFileAddressMapper address = Address.parse(get_buildroot(), coordinate) target = Target.get(address) if not target: siblings = Target.get_all_addresses(address.build_file) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' raise TaskError('%s => %s?:\n %s' % (address, prompt, '\n '.join(str(a) for a in siblings))) if not target.is_exported: raise TaskError('%s is not an exported target' % coordinate) return target.provides.org, target.provides.name except (BuildFile.BuildFileError, BuildFileParser.BuildFileParserError, AddressLookupError) as e: raise TaskError('{message}\n Problem with BUILD file at {coordinate}' .format(message=e, coordinate=coordinate)) self.overrides = {} if self.get_options().override: if self.named_snapshot: raise TaskError('Options --named-snapshot and --override are mutually exclusive!') def parse_override(override): try: coordinate, rev = override.split('=', 1) try: # overrides imply semantic versioning rev = Semver.parse(rev) except ValueError as e: raise TaskError('Invalid version %s: %s' % (rev, e)) return parse_jarcoordinate(coordinate), rev except ValueError: raise TaskError('Invalid override: %s' % override) self.overrides.update(parse_override(o) for o in self.get_options().override) self.restart_at = None if self.get_options().restart_at: self.restart_at = parse_jarcoordinate(self.get_options().restart_at)
def generate_project(self, project): def create_content_root(source_set): root_relative_path = os.path.join(source_set.source_base, source_set.path) \ if source_set.path else source_set.source_base if source_set.resources_only: if source_set.is_test: content_type = 'java-test-resource' else: content_type = 'java-resource' else: content_type = '' sources = TemplateData( path=root_relative_path, package_prefix=source_set.path.replace('/', '.') if source_set.path else None, is_test=source_set.is_test, content_type=content_type ) return TemplateData( path=root_relative_path, sources=[sources], exclude_paths=[os.path.join(source_set.source_base, x) for x in source_set.excludes], ) content_roots = [create_content_root(source_set) for source_set in project.sources] if project.has_python: content_roots.extend(create_content_root(source_set) for source_set in project.py_sources) scala = None if project.has_scala: scala = TemplateData( language_level=self.scala_language_level, maximum_heap_size=self.scala_maximum_heap_size, fsc=self.fsc, compiler_classpath=project.scala_compiler_classpath ) exclude_folders = [] if self.get_options().exclude_maven_target: exclude_folders += IdeaGen._maven_targets_excludes(get_buildroot()) exclude_folders += self.get_options().exclude_folders java_language_level = None for target in project.targets: if isinstance(target, JvmTarget): if java_language_level is None or java_language_level < target.platform.source_level: java_language_level = target.platform.source_level if java_language_level is not None: java_language_level = 'JDK_{0}_{1}'.format(*java_language_level.components[:2]) configured_module = TemplateData( root_dir=get_buildroot(), path=self.module_filename, content_roots=content_roots, bash=self.bash, python=project.has_python, scala=scala, internal_jars=[cp_entry.jar for cp_entry in project.internal_jars], internal_source_jars=[cp_entry.source_jar for cp_entry in project.internal_jars if cp_entry.source_jar], external_jars=[cp_entry.jar for cp_entry in project.external_jars], external_javadoc_jars=[cp_entry.javadoc_jar for cp_entry in project.external_jars if cp_entry.javadoc_jar], external_source_jars=[cp_entry.source_jar for cp_entry in project.external_jars if cp_entry.source_jar], annotation_processing=self.annotation_processing_template, extra_components=[], exclude_folders=exclude_folders, java_language_level=java_language_level, ) outdir = os.path.abspath(self.intellij_output_dir) if not os.path.exists(outdir): os.makedirs(outdir) scm = get_scm() configured_project = TemplateData( root_dir=get_buildroot(), outdir=outdir, git_root=scm.worktree, modules=[configured_module], java=TemplateData( encoding=self.java_encoding, maximum_heap_size=self.java_maximum_heap_size, jdk=self.java_jdk, language_level='JDK_1_{}'.format(self.java_language_level) ), resource_extensions=list(project.resource_extensions), scala=scala, checkstyle_classpath=';'.join(project.checkstyle_classpath), debug_port=project.debug_port, annotation_processing=self.annotation_processing_template, extra_components=[], ) existing_project_components = None existing_module_components = None if not self.nomerge: # Grab the existing components, which may include customized ones. existing_project_components = self._parse_xml_component_elements(self.project_filename) existing_module_components = self._parse_xml_component_elements(self.module_filename) # Generate (without merging in any extra components). safe_mkdir(os.path.abspath(self.intellij_output_dir)) ipr = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project)) iml = self._generate_to_tempfile( Generator(pkgutil.get_data(__name__, self.module_template), module=configured_module)) if not self.nomerge: # Get the names of the components we generated, and then delete the # generated files. Clunky, but performance is not an issue, and this # is an easy way to get those component names from the templates. extra_project_components = self._get_components_to_merge(existing_project_components, ipr) extra_module_components = self._get_components_to_merge(existing_module_components, iml) os.remove(ipr) os.remove(iml) # Generate again, with the extra components. ipr = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.project_template), project=configured_project.extend(extra_components=extra_project_components))) iml = self._generate_to_tempfile(Generator(pkgutil.get_data(__name__, self.module_template), module=configured_module.extend(extra_components=extra_module_components))) self.context.log.info('Generated IntelliJ project in {directory}' .format(directory=self.gen_project_workdir)) shutil.move(ipr, self.project_filename) shutil.move(iml, self.module_filename) return self.project_filename if self.open else None
def __init__(self, *args, **kwargs): super(JarPublish, self).__init__(*args, **kwargs) ScmPublish.__init__(self, get_scm(), self.context.config.getlist(self._CONFIG_SECTION, 'restrict_push_branches')) self.cachedir = os.path.join(self.workdir, 'cache') self._jvmargs = self.context.config.getlist(self._CONFIG_SECTION, 'ivy_jvmargs', default=[]) if self.context.options.jar_publish_local: local_repo = dict( resolver='publish_local', path=os.path.abspath(os.path.expanduser(self.context.options.jar_publish_local)), confs=['default'], auth=None ) self.repos = defaultdict(lambda: local_repo) self.commit = False self.snapshot = self.context.options.jar_publish_local_snapshot else: self.repos = self.context.config.getdict(self._CONFIG_SECTION, 'repos') if not self.repos: raise TaskError("This repo is not configured to publish externally! Please configure per\n" "http://pantsbuild.github.io/publish.html#authenticating-to-the-artifact-repository,\n" "or re-run with the '--publish-local' flag.") for repo, data in self.repos.items(): auth = data.get('auth') if auth: credentials = next(iter(context.resolve(auth))) user = credentials.username(data['resolver']) password = credentials.password(data['resolver']) self.context.log.debug('Found auth for repo=%s user=%s' % (repo, user)) self.repos[repo]['username'] = user self.repos[repo]['password'] = password self.commit = self.context.options.jar_publish_commit self.snapshot = False self.ivycp = self.context.config.getlist('ivy', 'classpath') self.dryrun = self.context.options.jar_publish_dryrun self.transitive = self.context.options.jar_publish_transitive self.force = self.context.options.jar_publish_force def parse_jarcoordinate(coordinate): components = coordinate.split('#', 1) if len(components) == 2: org, name = components return org, name else: try: address = Address.parse(get_buildroot(), coordinate) # TODO: This is broken. try: target = Target.get(address) if not target: siblings = Target.get_all_addresses(address.build_file) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' raise TaskError('%s => %s?:\n %s' % (address, prompt, '\n '.join(str(a) for a in siblings))) if not target.is_exported: raise TaskError('%s is not an exported target' % coordinate) return target.provides.org, target.provides.name except (ImportError, SyntaxError, TypeError): raise TaskError('Failed to parse %s' % address.build_file.relpath) except (IOError, AddressLookupError) as e: raise TaskError('{message}\n No BUILD file could be found at {coordinate}' .format(message=e, coordinate=coordinate)) self.overrides = {} if self.context.options.jar_publish_override: def parse_override(override): try: coordinate, rev = override.split('=', 1) try: rev = Semver.parse(rev) except ValueError as e: raise TaskError('Invalid version %s: %s' % (rev, e)) return parse_jarcoordinate(coordinate), rev except ValueError: raise TaskError('Invalid override: %s' % override) self.overrides.update(parse_override(o) for o in self.context.options.jar_publish_override) self.restart_at = None if self.context.options.jar_publish_restart_at: self.restart_at = parse_jarcoordinate(self.context.options.jar_publish_restart_at)
def _get_project_tree(self, build_file_rev): """Creates the project tree for build files for use in a given pants run.""" if build_file_rev: return ScmProjectTree(self._root_dir, get_scm(), build_file_rev) else: return FileSystemProjectTree(self._root_dir)
def __init__(self, *args, **kwargs): super(JarPublish, self).__init__(*args, **kwargs) self.cachedir = os.path.join(self.workdir, 'cache') self._jvm_options = self.get_options().jvm_options self.scm = get_scm() self.log = self.context.log if self.get_options().local: local_repo = dict( resolver='publish_local', path=os.path.abspath(os.path.expanduser(self.get_options().local)), confs=['default'], auth=None ) self.repos = defaultdict(lambda: local_repo) self.commit = False self.local_snapshot = self.get_options().local_snapshot else: self.repos = self.get_options().repos if not self.repos: raise TaskError( "This repo is not configured to publish externally! Please configure per\n" "http://pantsbuild.github.io/publish.html#authenticating-to-the-artifact-repository,\n" "by setting --publish-jar-repos=<dict> or re-run with '--publish-jar-local=<dir>'.") for repo, data in self.repos.items(): auth = data.get('auth') if auth: credentials = next(iter(self.context.resolve(auth))) user = credentials.username(data['resolver']) password = credentials.password(data['resolver']) self.context.log.debug('Found auth for repo={} user={}'.format(repo, user)) self.repos[repo]['username'] = user self.repos[repo]['password'] = password self.commit = self.get_options().commit self.push_postscript = self.get_options().push_postscript or '' self.local_snapshot = False self.named_snapshot = self.get_options().named_snapshot if self.named_snapshot: self.named_snapshot = Namedver.parse(self.named_snapshot) self.dryrun = self.get_options().dryrun self.transitive = self.get_options().transitive self.force = self.get_options().force self.publish_changelog = self.get_options().changelog def parse_jarcoordinate(coordinate): components = coordinate.split('#', 1) if len(components) == 2: org, name = components return org, name else: spec = components[0] address = Address.parse(spec) try: self.context.build_graph.inject_address_closure(address) target = self.context.build_graph.get_target(address) if not target: siblings = self.context.address_mapper.addresses_in_spec_path(address.spec_path) prompt = 'did you mean' if len(siblings) == 1 else 'maybe you meant one of these' raise TaskError('{} => {}?:\n {}'.format(address, prompt, '\n '.join(str(a) for a in siblings))) if not target.is_exported: raise TaskError('{} is not an exported target'.format(coordinate)) return target.provides.org, target.provides.name except (BuildFile.BuildFileError, BuildFileParser.BuildFileParserError, AddressLookupError) as e: raise TaskError('{message}\n Problem identifying target at {spec}' .format(message=e, spec=spec)) self.overrides = {} if self.get_options().override: if self.named_snapshot: raise TaskError('Options --named-snapshot and --override are mutually exclusive!') def parse_override(override): try: coordinate, rev = override.split('=', 1) try: # overrides imply semantic versioning rev = Semver.parse(rev) except ValueError as e: raise TaskError('Invalid version {}: {}'.format(rev, e)) return parse_jarcoordinate(coordinate), rev except ValueError: raise TaskError('Invalid override: {}'.format(override)) self.overrides.update(parse_override(o) for o in self.get_options().override) self.restart_at = None if self.get_options().restart_at: self.restart_at = parse_jarcoordinate(self.get_options().restart_at)