def from_yaml(file_path, params={}): if not isinstance(file_path, basestring): raise PipelineError( 'Unexpected argument type %s expecting string' % type(file_path)) if not os.path.exists(file_path): raise PipelineError( 'PipelineDefinition: Pipeline definition file does not exist: %s' % file_path) with open(file_path) as f: try: pipeline_def = yaml.load(f) except yaml.YAMLError as e: log.exception(e) err_msg = e.problem err_msg += ' (line: {})'.format(e.problem_mark.line) raise PipelineError( 'PipelineDefinition: Pipeline definition file is not valid YAML: %s - %s' % (file_path, err_msg)) pipeline_def['vars'] = deepmerge(pipeline_def.get('vars', {}), params or {}) # Substitute {{ }} variables in tasks # vars = pipeline_def.get('vars', {}) # pipeline_def = substitute_variables(vars, pipeline_def) return Pipeline.form_dict(pipeline_def)
def _validate_executor_plugin(self, event_name): if self.plugin_mgr.get_plugin_count(event_name) > 1: raise PipelineError( 'More than one executor plugins with same name {}'.format( event_name)) if self.plugin_mgr.get_plugin_count(event_name) == 0: raise PipelineError( 'Can not find executor plugin for {}'.format(event_name))
def _run_task(self, task): log.debug('Starting to execute task {}'.format(task.name)) self.plugin_mgr.trigger('on_task_start', task) event_name = '{}.execute'.format(task.executor) self._validate_executor_plugin(event_name) try: results = self.plugin_mgr.trigger(event_name, task.args) # Run the task except KeyboardInterrupt as e: raise except Exception as e: log.warning('Unexpected error running task: %s' % e) log.exception(e) results = [TaskResult(EXECUTION_FAILED, 'Unknown Error: %s' % e)] result = results[0] if not result: raise PipelineError('Result still missing %s' % result) self.plugin_mgr.trigger('on_task_finish', task, result) log.debug('Task finished. Result: %s' % result) return result
def make_app(cookie_secret=None, workspace='fixtures/workspace', title='Pipelines', auth=None): if cookie_secret is None: raise PipelineError('Cookie secret can not be empty') if not os.path.isdir(workspace): raise PipelinesError('Workspace is not a valid directory: %s' % workspace) auth_dict = _get_auth_dict(auth) slug_regexp = '[0-9a-zA-Z_\-]+' endpoints = [ url(r"/api/pipelines/", GetPipelinesHandler), url(r"/api/pipelines/({slug})/".format(slug=slug_regexp), GetPipelineHandler), url(r"/api/pipelines/({slug})/run".format(slug=slug_regexp), RunPipelineHandler), url(r"/api/pipelines/({slug})/({slug})/status".format(slug=slug_regexp), GetStatusHandler), url(r"/api/pipelines/({slug})/({slug})/log".format(slug=slug_regexp), GetLogsHandler), url(r"/api/pipelines/({slug})/triggers".format(slug=slug_regexp), GetTriggersHandler), url(r"/api/webhook/({slug})".format(slug=slug_regexp), WebhookHandler), url(r"/api/slackbot/({slug})".format(slug=slug_regexp), SlackbotHandler), (r"/login", LoginHandler), (r'/(.*)', AuthStaticFileHandler, {'path': _get_static_path('app'), "default_filename": "index.html"}), ] if auth_dict and auth_dict.get('type') == 'gh': endpoints.insert(len(endpoints) - 1, (r"/ghauth", GithubOAuth2LoginHandler)), return Application(endpoints, title=title, workspace_path=workspace, auth=auth_dict, login_url="/login", debug="True", cookie_secret=cookie_secret )
def form_dict(definition_dict): if not isinstance(definition_dict, dict): raise PipelineError('Unexpected argument type %s expecting dict' % type(definition_dict)) tasks = definition_dict['actions'] plugins = definition_dict.get('plugins', []) return Pipeline(tasks, plugins, context=definition_dict)
def call(self, *args, **kwargs): result = self.execute(*args, **kwargs) if not isinstance(result, TaskResult): raise PipelineError( 'Executor did not return type ExecutionResult, got {}'.format( type(result))) return result
def load_tasks(self, tasks): for i, task in enumerate(tasks): normalized_task = self._normalize_task_dict(task) if not self._task_executor_valid(normalized_task['type']): raise PipelineError('Unsupported task type: %s' % normalized_task['type']) task_obj = Task.from_dict(normalized_task) if not task_obj.name: task_obj.name = 'Task-{}'.format(i + 1) self.tasks.append(task_obj)
def form_dict(definition_dict): PIPELINES_SCHEMA.validate(definition_dict) if 'vars' not in definition_dict: definition_dict['vars'] = {} if 'params' not in definition_dict: definition_dict['params'] = {} if 'triggers' not in definition_dict: definition_dict['triggers'] = {} if not isinstance(definition_dict, dict): raise PipelineError('Unexpected argument type %s expecting dict' % type(definition_dict)) tasks = definition_dict['actions'] plugins = definition_dict.get('plugins', []) return Pipeline(tasks, plugins, context=definition_dict)
def replace_vars_func(token): substituted = '' content_cursor = 0 for match in re.finditer(pattern, token): substituted += token[content_cursor:match.start()] variable_name = match.groupdict()['var'] if variable_name not in prepped_vars: raise PipelineError('Missing variable: {}'.format(variable_name)) value = prepped_vars[variable_name] substituted += str(value) content_cursor = match.end() substituted += token[content_cursor:] return substituted
m = importlib.import_module(module) except ImportError: raise PluginError('Could not import plugin {}'.format(plugin_path)) if not hasattr(m, class_name): raise PluginError( 'Could not find class for plugin {}'.format(plugin_path)) return getattr(m, plugin_path) if __name__ == '__main__': conf_logging() if len(sys.argv) != 2: raise PipelineError('Wrong number of arguments') log_file = None if 'LOG_FILE' in os.environ: log_file = os.environ['LOG_FILE'] pipeline_yaml_path = sys.argv[-1] params = {} if log_file: params['log_file'] = log_file pipe = Pipeline.from_yaml(pipeline_yaml_path, params=None) pipe.run()
module, class_name = plugin_path.rsplit('.', 1) m = importlib.import_module(module) except ImportError: raise PluginError('Could not import plugin {}'.format(plugin_path)) if not hasattr(m, class_name): raise PluginError('Could not find class for plugin {}'.format(plugin_path)) return getattr(m, plugin_path) if __name__ == '__main__': conf_logging() if len(sys.argv) != 2: raise PipelineError('Wrong number of arguments') log_file = None if 'LOG_FILE' in os.environ: log_file = os.environ['LOG_FILE'] pipeline_yaml_path = sys.argv[1] if not pipeline_yaml_path or not pipeline_yaml_path.endswith('yaml') or not os.path.exists(pipeline_yaml_path): raise PipelineError('Missing pipeline file') params = {} if log_file: params['log_file'] = log_file pipe = Pipeline.from_yaml(pipeline_yaml_path, params=params)
def make_app(cookie_secret=None, workspace='fixtures/workspace', title='Pipelines', auth=None, history_limit=0, prefix='/'): if cookie_secret is None: raise PipelineError('Cookie secret can not be empty') if not os.path.isdir(workspace): raise PipelinesError('Workspace is not a valid directory: %s' % workspace) auth_dict = _get_auth_dict(auth) slug_regexp = '[0-9a-zA-Z_\\-]+' endpoints = [ Rule(PathMatches(r"/api/pipelines/"), GetPipelinesHandler, name='api_base'), Rule(PathMatches(r"/api/pipelines/({slug})/".format(slug=slug_regexp)), GetPipelineHandler), Rule( PathMatches( r"/api/pipelines/({slug})/run".format(slug=slug_regexp)), RunPipelineHandler), Rule( PathMatches(r"/api/pipelines/({slug})/({slug})/status".format( slug=slug_regexp)), GetStatusHandler), Rule( PathMatches(r"/api/pipelines/({slug})/({slug})/log".format( slug=slug_regexp)), GetLogsHandler), Rule( PathMatches( r"/api/pipelines/({slug})/triggers".format(slug=slug_regexp)), GetTriggersHandler), Rule(PathMatches(r"/api/webhook/({slug})".format(slug=slug_regexp)), WebhookHandler), Rule(PathMatches(r"/api/slackbot/({slug})".format(slug=slug_regexp)), SlackbotHandler), Rule(PathMatches(r"/login"), LoginHandler, name='login'), Rule(PathMatches(r'/(.*)'), AuthStaticFileHandler, { 'path': _get_static_path('app'), "default_filename": "index.html" }), ] if auth_dict and auth_dict.get('type') == 'gh': hdl = Rule(PathMatches('/ghauth'), GithubOAuth2LoginHandler, name='ghauth') endpoints.insert(len(endpoints) - 1, hdl) # prefix support login_url, endpoints = add_prefix_to_handlers(prefix, auth_dict, endpoints) return Application( endpoints, title=title, workspace_path=workspace, auth=auth_dict, login_url=login_url, debug="True", cookie_secret=cookie_secret, history_limit=history_limit, prefix=prefix.rstrip('/'), )