def _append_dependent_paths(self, paths, require_semicolon=False): """Appends a list of paths to the rule's dependent paths. A dependent path is a file/rule that is required for execution and, if changed, will invalidate cached versions of this rule. Args: paths: A list of paths to depend on. require_semicolon: True if all of the given paths require a semicolon (so they must be rules). Raises: NameError: One of the given paths is invalid. """ util.validate_names(paths, require_semicolon=require_semicolon) self._dependent_paths.update(paths)
def testRequireSemicolon(self): util.validate_names([':a'], require_semicolon=True) util.validate_names([':a', ':b'], require_semicolon=True) util.validate_names(['C:/a/:b'], require_semicolon=True) util.validate_names(['C:\\a\\:b'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names(['a'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names([':a', 'b'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names([':/a'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names([':\\a'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names(['C:\\a'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names(['C:\\a:\\b'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names([':a/b'], require_semicolon=True) with self.assertRaises(NameError): util.validate_names(['a:b/a'], require_semicolon=True)
def testNames(self): util.validate_names(['a']) util.validate_names([':a']) util.validate_names(['xx:a']) util.validate_names(['/a/b:a']) util.validate_names(['/a/b.c:a']) util.validate_names(['/a/b.c/:a']) util.validate_names(['a', ':b']) with self.assertRaises(TypeError): util.validate_names([None]) with self.assertRaises(TypeError): util.validate_names(['']) with self.assertRaises(TypeError): util.validate_names([{}]) with self.assertRaises(NameError): util.validate_names([' a']) with self.assertRaises(NameError): util.validate_names(['a ']) with self.assertRaises(NameError): util.validate_names([' a ']) with self.assertRaises(NameError): util.validate_names(['a', ' b'])
def testEmpty(self): util.validate_names(None) util.validate_names([])
def execute_async(self, target_rule_names): """Executes the given target rules in the context. Rules are executed in the order and, where possible, in parallel. Args: target_rule_names: A list of rule names that are to be executed. Returns: A Deferred that completes when all rules have completed. If an error occurs in any rule an errback will be called. Raises: KeyError: One of the given target rules was not found in the project. NameError: An invalid target rule was given. TypeError: An invalid target rule was given. """ # Verify that target rules are valid and exist target_rule_names = list(target_rule_names) util.validate_names(target_rule_names, require_semicolon=True) for rule_name in target_rule_names: if not self.project.resolve_rule(rule_name): raise KeyError('Target rule "%s" not found in project' % (rule_name)) # Calculate the sequence of rules to execute rule_sequence = self.rule_graph.calculate_rule_sequence(target_rule_names) any_failed = [False] main_deferred = Deferred() remaining_rules = rule_sequence[:] in_flight_rules = [] pumping = [False] def _issue_rule(rule): """Issues a single rule into the current execution context. Updates the in_flight_rules list and pumps when the rule completes. Args: rule: Rule to issue. """ def _rule_callback(*args, **kwargs): in_flight_rules.remove(rule) _pump(previous_succeeded=True) def _rule_errback(exception=None, *args, **kwargs): in_flight_rules.remove(rule) # TODO(benvanik): log result/exception/etc? if exception: # pragma: no cover print exception any_failed[0] = True _pump(previous_succeeded=False) in_flight_rules.append(rule) rule_deferred = self._execute_rule(rule) rule_deferred.add_callback_fn(_rule_callback) rule_deferred.add_errback_fn(_rule_errback) return rule_deferred def _pump(previous_succeeded=True): """Attempts to run another rule and signals the main_deferred if done. Args: previous_succeeded: Whether the previous rule succeeded. """ # If we're already done, gracefully exit if main_deferred.is_done(): return # If we failed and we are supposed to stop, gracefully stop by # killing all future rules # This is better than terminating immediately, as it allows legit tasks # to finish if any_failed[0] and self.stop_on_error: remaining_rules[:] = [] # TODO(benvanik): better error message main_deferred.errback() return if pumping[0]: return pumping[0] = True # Scan through all remaining rules - if any are unblocked, issue them to_issue = [] for i in range(0, len(remaining_rules)): next_rule = remaining_rules[i] # Ignore if any dependency on any rule before it in the list skip_rule = False if i: for old_rule in remaining_rules[:i]: if self.rule_graph.has_dependency(next_rule.path, old_rule.path): # Blocked on previous rule skip_rule = True break if skip_rule: continue # Ignore if any dependency on an in-flight rule for in_flight_rule in in_flight_rules: if self.rule_graph.has_dependency(next_rule.path, in_flight_rule.path): # Blocked on a previous rule, so pass and wait for the next pump skip_rule = True break if skip_rule: continue # If here then we found no conflicting rules, queue for running to_issue.append(next_rule) # Run all rules that we can for rule in to_issue: remaining_rules.remove(rule) for rule in to_issue: _issue_rule(rule) if (not len(remaining_rules) and not len(in_flight_rules) and not main_deferred.is_done()): assert not len(remaining_rules) # Done! # TODO(benvanik): better errbacks? some kind of BuildResults? if not any_failed[0]: # Only save the cache when we have succeeded # This causes some stuff to be rebuilt in failure cases, but prevents # a lot of weirdness when things are partially broken self.cache.save() main_deferred.callback() else: main_deferred.errback() pumping[0] = False # Keep the queue pumping if not len(in_flight_rules) and len(remaining_rules): _pump() # Kick off execution (once for each rule as a heuristic for filling the # pipeline) for rule in rule_sequence: _pump() return main_deferred
def execute_async(self, target_rule_names): """Executes the given target rules in the context. Rules are executed in the order and, where possible, in parallel. Args: target_rule_names: A list of rule names that are to be executed. Returns: A Deferred that completes when all rules have completed. If an error occurs in any rule an errback will be called. Raises: KeyError: One of the given target rules was not found in the project. NameError: An invalid target rule was given. TypeError: An invalid target rule was given. """ # Verify that target rules are valid and exist target_rule_names = list(target_rule_names) util.validate_names(target_rule_names, require_semicolon=True) for rule_name in target_rule_names: if not self.project.resolve_rule(rule_name): raise KeyError('Target rule "%s" not found in project' % (rule_name)) # Calculate the sequence of rules to execute rule_sequence = self.rule_graph.calculate_rule_sequence( target_rule_names) remaining_rules = rule_sequence[:] def _issue_rule(rule, deferred=None): """Issues a single rule into the current execution context. Updates the in_flight_rules list and pumps when the rule completes. Args: rule: Rule to issue. deferred: Deferred to wait on before executing the rule. """ def _rule_callback(*args, **kwargs): remaining_rules.remove(rule) def _rule_errback(exception=None, *args, **kwargs): remaining_rules.remove(rule) if self.stop_on_error: self.error_encountered = True # TODO(benvanik): log result/exception/etc? if exception: # pragma: no cover print exception # All RuleContexts should be created by the time this method is called. assert self.rule_contexts[rule.path] rule_deferred = self.rule_contexts[rule.path].deferred rule_deferred.add_callback_fn(_rule_callback) rule_deferred.add_errback_fn(_rule_errback) def _execute(*args, **kwargs): self._execute_rule(rule) def _on_failure(*args, **kwards): self._execute_rule(rule) if deferred: deferred.add_callback_fn(_execute) deferred.add_errback_fn(_on_failure) else: _execute() return rule_deferred def _chain_rule_execution(target_rules): """Given a list of target rules, build them and all dependencies. This method builds the passed in target rules and all dependencies. It first assembles a list of the dependencies to target rules orded as: [dependencies -> target_rules] It then traverses the list, issuing execute commands for all rules that do not have dependencies within the list. For all rules that do have dependencies within the list, a deferred is used to trigger the rule's exeution once all dependencies have completed executing. Args: target_rules: A list of rules to be executed. Returns: A deferred that resolves once all target_rules have either executed successfully or failed. """ issued_rules = [] all_deferreds = [] for rule in target_rules: # Create the RuleContexts here so that failures can cascade and the # deferred is accessible by any rules that depend on this one. rule_ctx = rule.create_context(self) self.rule_contexts[rule.path] = rule_ctx # Make the execution of the current rule dependent on the execution # of all rules it depends on. dependent_deferreds = [] for executable_rule in issued_rules: if self.rule_graph.has_dependency(rule.path, executable_rule.path): executable_ctx = self.rule_contexts[ executable_rule.path] dependent_deferreds.append(executable_ctx.deferred) if dependent_deferreds: dependent_deferred = async .gather_deferreds( dependent_deferreds, errback_if_any_fail=True) all_deferreds.append(_issue_rule(rule, dependent_deferred)) else: all_deferreds.append(_issue_rule(rule)) return async .gather_deferreds(all_deferreds, errback_if_any_fail=True) return _chain_rule_execution(rule_sequence)
def execute_async(self, target_rule_names): """Executes the given target rules in the context. Rules are executed in the order and, where possible, in parallel. Args: target_rule_names: A list of rule names that are to be executed. Returns: A Deferred that completes when all rules have completed. If an error occurs in any rule an errback will be called. Raises: KeyError: One of the given target rules was not found in the project. NameError: An invalid target rule was given. TypeError: An invalid target rule was given. """ # Verify that target rules are valid and exist target_rule_names = list(target_rule_names) util.validate_names(target_rule_names, require_semicolon=True) for rule_name in target_rule_names: if not self.project.resolve_rule(rule_name): raise KeyError('Target rule "%s" not found in project' % (rule_name)) # Calculate the sequence of rules to execute rule_sequence = self.rule_graph.calculate_rule_sequence(target_rule_names) remaining_rules = rule_sequence[:] def _issue_rule(rule, deferred=None): """Issues a single rule into the current execution context. Updates the in_flight_rules list and pumps when the rule completes. Args: rule: Rule to issue. deferred: Deferred to wait on before executing the rule. """ def _rule_callback(*args, **kwargs): remaining_rules.remove(rule) def _rule_errback(exception=None, *args, **kwargs): remaining_rules.remove(rule) if self.stop_on_error: self.error_encountered = True # TODO(benvanik): log result/exception/etc? if exception: # pragma: no cover print exception # All RuleContexts should be created by the time this method is called. assert self.rule_contexts[rule.path] rule_deferred = self.rule_contexts[rule.path].deferred rule_deferred.add_callback_fn(_rule_callback) rule_deferred.add_errback_fn(_rule_errback) def _execute(*args, **kwargs): self._execute_rule(rule) def _on_failure(*args, **kwards): self._execute_rule(rule) if deferred: deferred.add_callback_fn(_execute) deferred.add_errback_fn(_on_failure) else: _execute() return rule_deferred def _chain_rule_execution(target_rules): """Given a list of target rules, build them and all dependencies. This method builds the passed in target rules and all dependencies. It first assembles a list of the dependencies to target rules orded as: [dependencies -> target_rules] It then traverses the list, issuing execute commands for all rules that do not have dependencies within the list. For all rules that do have dependencies within the list, a deferred is used to trigger the rule's exeution once all dependencies have completed executing. Args: target_rules: A list of rules to be executed. Returns: A deferred that resolves once all target_rules have either executed successfully or failed. """ issued_rules = [] all_deferreds = [] for rule in target_rules: # Create the RuleContexts here so that failures can cascade and the # deferred is accessible by any rules that depend on this one. rule_ctx = rule.create_context(self) self.rule_contexts[rule.path] = rule_ctx # Make the execution of the current rule dependent on the execution # of all rules it depends on. dependent_deferreds = [] for executable_rule in issued_rules: if self.rule_graph.has_dependency(rule.path, executable_rule.path): executable_ctx = self.rule_contexts[executable_rule.path] dependent_deferreds.append(executable_ctx.deferred) if dependent_deferreds: dependent_deferred = async.gather_deferreds(dependent_deferreds, errback_if_any_fail=True) all_deferreds.append(_issue_rule(rule, dependent_deferred)) else: all_deferreds.append(_issue_rule(rule)) return async.gather_deferreds(all_deferreds, errback_if_any_fail=True) return _chain_rule_execution(rule_sequence)