def get(self): log.debug('Get pipelines') workspace = self.settings['workspace_path'] title = self.settings['title'] log.debug('Getting all pipelines') tasks = [] for path in _file_iterator(workspace, extensions=PIPELINES_EXT): full_path = os.path.join(workspace, path) # Validate first try: Pipeline.from_yaml(full_path) except (PipelineError, SchemaError) as e: tasks.append({ '_error': 'Invalid pipeline definition: %s' % e, '_filepath': path }) continue # Read try: with open(full_path) as f: yaml_string = f.read() except IOError as e: log.error( 'Can not read pipelines, file missing: {}'.format(path)) continue try: pipeline_def = yaml.safe_load(yaml_string) except YAMLError: log.error( 'Skipping pipeline. Could not load yaml for: {}'.format( path)) continue slug = _slugify_file(path) full_path = os.path.join(workspace, slug) run_dict = { 'slug': slug, 'run_ids': [], 'runs': [], 'definition': pipeline_def, 'raw': yaml_string } if os.path.isdir(full_path): # expect to have runs ids = list( _run_id_iterator(full_path, self.settings['history_limit'])) log.info('pipeline %s run history: %s', full_path, len(ids)) runs = _fetch_runs(full_path, ids) run_dict['run_ids'] = ids run_dict['runs'] = runs tasks.append(run_dict) # Sort the pipelines alphabetically sorted_tasks = sorted(tasks, key=lambda x: x.get('slug')) pipelines = {'title': title, 'tasks': sorted_tasks} self.write(json.dumps(pipelines, indent=2)) self.finish()
def load(self, pipeline_filepath, folder_path, params={}): base_params = { 'status_file': os.path.join(folder_path, 'status.json'), 'log_file': os.path.join(folder_path, 'output.log') } base_params.update(params) self.pipe = Pipeline.from_yaml(pipeline_filepath, base_params)
def test_load_sample_pipelines(self): for pipeline_path in self._get_pipelines(): try: res = Pipeline.from_yaml(pipeline_path, {}) except (SchemaError, PipelineError) as e: print(e) self.assertTrue( False, 'Pipeline failed to validate: {}'.format(pipeline_path))
def run(self, pipeline_file, folder_path): pipe = Pipeline.from_yaml(pipeline_file, params={ 'status_file': os.path.join(folder_path, 'status.json'), 'log_file': os.path.join(folder_path, 'output.log') }) return pipe.run()
def test_variable_passing(self): pipeline_def = { 'actions': [ 'echo \'{"test22": 1}\'', 'echo "{{ prev_result.return_obj.test22 }}"' ], } pipeline = Pipeline.form_dict(pipeline_def) res = pipeline.run() self.assertEqual(res['status'], PIPELINE_STATUS_OK) self.assertEqual(res['results'][1].data['output'].strip(), "1")
def test_ignore_errors_false(self): pipeline_def = { 'actions': [{ 'type': 'bash', 'cmd': 'echo test && exit 1', 'ignore_errors': False }, 'echo second'], } pipeline = Pipeline.form_dict(pipeline_def) res = pipeline.run() self.assertEqual(res['status'], PIPELINE_STATUS_FAIL) self.assertEqual(res['results'][0].status, EXECUTION_FAILED) self.assertEqual(len(res['results']), 1)
def test_ignore_errors(self): pipeline_def = { 'actions': [{ 'type': 'bash', 'cmd': 'echo test && exit 1', 'ignore_errors': True }, 'echo second'], } pipeline = Pipeline.form_dict(pipeline_def) res = pipeline.run() self.assertEqual(res['status'], PIPELINE_STATUS_OK) self.assertEqual(len(res['results']), 2) self.assertEqual(res['results'][0].status, EXECUTION_SUCCESSFUL) self.assertEqual(res['results'][1].status, EXECUTION_SUCCESSFUL)
def test_timeout_in_pipeline_def_timeouts(self): pipeline_def = { 'actions': [{ 'type': 'bash', 'cmd': 'sleep 2', 'timeout': 1 }], } start = datetime.now() pipeline = Pipeline.form_dict(pipeline_def) res = pipeline.run() seconds = (datetime.now() - start).total_seconds() print res print seconds self.assertEqual(res['status'], PIPELINE_STATUS_FAIL) self.assertLess(seconds, 2)
def test_variable_passing_python(self): pipeline_def = { 'actions': [{ 'type': 'python', 'script': ''' import json a = {'testpy': 'tdd', 'array': [1,2,3]} print json.dumps(a) ''' }, 'echo "{{ prev_result.return_obj.testpy }}"'], } pipeline = Pipeline.form_dict(pipeline_def) res = pipeline.run() self.assertEqual(res['status'], PIPELINE_STATUS_OK) self.assertEqual(res['results'][1].data['output'].strip(), "tdd")
def test_get_pipelines(self): for pipeline_path in self._get_pipelines(): res = Pipeline.from_yaml(pipeline_path, {}) self.assertTrue(res)
def test_parsine_minimal(self): pipeline_def = {'actions': []} pipeline = Pipeline.form_dict(pipeline_def) self.assertIsInstance(pipeline, Pipeline)