def v2_runner_on_ok(self, result, **kwargs): """Event executed after each command when it succeed. Get the output of the command and create a file associated to the current jobstate.""" super(CallbackModule, self).v2_runner_on_ok(result, **kwargs) if 'stdout_lines' in result._result: output = '\n'.join(result._result['stdout_lines']) + '\n' elif 'msg' in result._result: output = '\n'.join(result._result['msg']) + '\n' else: output = str(result._result) if result._task.get_name( ) != 'setup' and self._mime_type == 'application/junit': dci_file.create(self._dci_context, name=result._task.get_name(), content=output.encode('UTF-8'), mime=self._mime_type, job_id=self._job_id) elif result._task.get_name() != 'setup' and output != '\n': dci_file.create(self._dci_context, name=result._task.get_name(), content=output.encode('UTF-8'), mime=self._mime_type, jobstate_id=self._current_jobstate_id) self._current_step += 1
def main(): module = AnsibleModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), login=dict(required=False, type='str'), password=dict(required=False, type='str'), path=dict(type='str'), name=dict(required=False, type='str'), mime=dict(default='text/plain', type='str'), job_id=dict(type='str'), url=dict(required=False, type='str'), ), ) if not requests_found: module.fail_json(msg='The python requests module is required') login, password, url = get_details(module) if not login or not password: module.fail_json(msg='login and/or password have not been specified') ctx = dci_context.build_dci_context(url, login, password, 'ansible') name = module.params['path'] if module.params['name']: name = module.params['name'] dci_file.create(ctx, name=name, content=open(module.params['path'], 'r').read(), mime=module.params['mime'], job_id=module.params['job_id']) module.exit_json(changed=True)
def run(self, state, data=None, context=None): """Upload configuration files. """ if 'path' in self.conf: for f in self.conf['path']: content = open(f, 'r').read() dci_file.create(context=context,name=f, content=content,job_id=context.last_job_id)
def _send_log_file(self): if not self._dci_context.last_jobstate_id: return jobstate_id = self._dci_context.last_jobstate_id dci_file.create(self._dci_context, 'chainsaw.log-%s' % self._idx_file, self._current_log.getvalue(), 'text/plain', jobstate_id) self._current_log.truncate(0) self._current_log.seek(0) self._idx_file += 1
def flush_buffer(output): if output.tell() == 0: return to_send = output.getvalue() file.create( context, name=name.encode('utf-8'), content=to_send.encode('utf-8'), mime='text/plain', jobstate_id=jobstate_id) output.seek(0) output.truncate()
def upload_file(context, path, job_id, mime=None): if not mime: mime = mimetypes.guess_type(path)[0] l_file = file.create(context, name=path, content=open(path, 'r').read(), mime=mime, job_id=job_id) return l_file
def v2_runner_on_failed(self, result, ignore_errors=False): """Event executed after each command when it fails. Get the output of the command and create a failure jobstate and a file associated. """ super(CallbackModule, self).v2_runner_on_failed(result, ignore_errors) output = self.format_output(result._result) new_state = dci_jobstate.create(self._dci_context, status='failure', comment='', job_id=self._job_id).json() self._jobstate_id = new_state['jobstate']['id'] dci_file.create(self._dci_context, name=result._task.get_name().encode('UTF-8'), content=output.encode('UTF-8'), mime=self._mime_type, jobstate_id=self._jobstate_id)
def _send_log_file(self): value = self._current_log.getvalue() jobstate_id = self._current_jobstate_id if value and jobstate_id: r = dci_file.create(self._dci_context, 'logger.txt', value, 'text/plain', jobstate_id) if r.status_code != 201: raise DciLogPushFailure(r.text) self._current_log = io.StringIO()
def test_iter(dci_context, job_id): f_names = ["file_%d" % i for i in range(100)] for name in f_names: dci_file.create( dci_context, name=name, content="some content", mime="plain/text", job_id=job_id, ) cpt = 0 seen_names = [] for f in job.list_files_iter(dci_context, id=job_id): seen_names.append(f["name"]) cpt += 1 # job already comes with 2 files all_files = len(job.list_files(dci_context, id=job_id).json()["files"]) assert all_files == 100 + 2 assert cpt == 100 + 2 assert len(set(seen_names) - set(f_names)) == 2
def v2_runner_on_ok(self, result, **kwargs): """Event executed after each command when it succeed. Get the output of the command and create a file associated to the current jobstate. """ super(CallbackModule, self).v2_runner_on_ok(result, **kwargs) output = self.format_output(result._result) if (result._task.get_name() != 'setup' and self._mime_type == 'application/junit'): dci_file.create(self._dci_context, name=result._task.get_name().encode('UTF-8'), content=output.encode('UTF-8'), mime=self._mime_type, job_id=self._job_id) elif result._task.get_name() != 'setup' and output != '\n': dci_file.create(self._dci_context, name=result._task.get_name().encode('UTF-8'), content=output.encode('UTF-8'), mime=self._mime_type, jobstate_id=self._jobstate_id)
def create(): job = api_job.schedule(dci_context_remoteci, topic_id).json() job_id = job["job"]["id"] api_file.create( dci_context_remoteci, name="res_junit.xml", content=JUNIT, mime="application/junit", job_id=job_id, ) jobstate_id = api_jobstate.create(dci_context_remoteci, "pre-run", "starting", job_id).json()["jobstate"]["id"] api_file.create( dci_context_remoteci, name="pre-run", content="pre-run ongoing", mime="plain/text", jobstate_id=jobstate_id, ) api_jobstate.create(dci_context_remoteci, "running", "starting the build", job_id) return job
def v2_runner_on_failed(self, result, ignore_errors=False): """Event executed when a command failed. Create the final jobstate on failure.""" super(CallbackModule, self).v2_runner_on_failed(result, ignore_errors) if result._result['stdout']: output = 'Error Output:\n\n%s\n\nStandard Output:\n\n%s' % ( result._result['stderr'], result._result['stdout']) else: output = result._result['stderr'] new_state = jobstate.create(self._dci_context, status='failure', comment=self._current_comment, job_id=self._job_id).json() self._current_jobstate_id = new_state['jobstate']['id'] if result._task.get_name() != 'setup' and output != '\n': dci_file.create(self._dci_context, name=result._task.get_name(), content=output.encode('UTF-8'), mime=self._mime_type, jobstate_id=self._current_jobstate_id) self._current_step += 1
def run_tests(context, undercloud_ip, key_filename, remoteci_id, user='******', stack_name='overcloud'): # Retrieve the certification_id data field. In order to run # the rhcert test suite if enabled. If absent set to empty string. data = remoteci.get_data( context, remoteci_id, ['certification_id']).json() certification_id = data and data.get('certification_id', '') # redirect the log messages to the DCI Control Server # https://github.com/shazow/urllib3/issues/523 requests.packages.urllib3.disable_warnings() dci_handler = DciHandler(context) logger = logging.getLogger('tripleohelper') logger.addHandler(dci_handler) undercloud = tripleohelper.undercloud.Undercloud( hostname=undercloud_ip, user=user, key_filename=key_filename) undercloud.create_stack_user() final_status = 'success' if undercloud.run( 'test -f stackrc', user='******', success_status=(0, 1,))[1] != 0: msg = 'undercloud deployment failure' jobstate.create(context, 'failure', msg, context.last_job_id) return jobstate.create( context, 'running', 'Running tripleo-stack-dump', context.last_job_id) push_stack_details(context, undercloud, stack_name=stack_name) rcfile = stack_name + 'rc' if undercloud.run( 'test -f ' + rcfile, user='******', success_status=(0, 1,))[1] != 0: msg = 'overcloud deployment failure' jobstate.create(context, 'failure', msg, context.last_job_id) return tests = job.list_tests(context, context.last_job_id).json()['tests'] try: for t in tests['tests']: if 'url' not in t['data']: continue jobstate.create( context, 'running', 'Running test ' + t['name'], context.last_job_id) url = t['data']['url'] undercloud.add_environment_file( user='******', filename=rcfile) undercloud.run('curl -O ' + url, user='******') undercloud.run(( 'DCI_CERTIFICATION_ID=%s ' 'DCI_REMOTECI_ID=%s ' 'DCI_JOB_ID=%s ' 'DCI_OVERCLOUD_STACK_NAME=%s ' 'bash -x run.sh') % ( certification_id, remoteci_id, context.last_job_id, stack_name), user='******') with undercloud.open('result.xml', user='******') as fd: file.create( context, t['name'], fd.read(), mime='application/junit', job_id=context.last_job_id) except Exception: msg = traceback.format_exc() final_status = 'failure' print(msg) else: msg = 'test(s) success' dci_handler.emit(None) jobstate.create(context, final_status, msg, context.last_job_id)
dci_context.job_id = dci_job.schedule(dci_context, remoteci_id=dci_context.remoteci_id, topic_id=dci_context.topic_id).json()['job']['id'] job_full_data = dci_job.get_full_data(dci_context, dci_context.job_id) # create initial jobstate of pre-run jobstate = dci_jobstate.create(dci_context, 'pre-run', 'Initializing the environment', dci_context.job_id) print "This is where we'd do some stuff to init the environment" # update the jobstate to start the job run dci_jobstate.create(dci_context, 'running', 'Running the test', dci_context.job_id) jobstate_id = dci_context.last_jobstate_id result = execute_testing() # read our testing log and push to the DCI control server home = expanduser('~') with io.open(home + '/.ansible/logs/run.log', encoding='utf-8') as f: content = f.read(20 * 1024 * 1024) # default file size is 20MB dci_file.create(dci_context, home + '/.ansible/logs/run.log', content, 'text/plain', jobstate_id) # Check if our test passed successfully print "Submit result" if result: final_status = 'success' else: final_status = 'failure' # Set final job state based on test pass/fail dci_jobstate.create(dci_context, final_status, "Job has been processed.", dci_context.job_id)
def main(argv=None): logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) parser = argparse.ArgumentParser() parser.add_argument('--topic') parser.add_argument('--config', default='/etc/dci/dci_agent.yaml') parser.add_argument('--version', action='version', version=('dci-agent %s' % version)) args = parser.parse_args(argv) dci_conf = load_config(args.config) ctx = get_dci_context(**dci_conf['auth']) topic_name = args.topic if args.topic else dci_conf['topic'] topic = dci_topic.get(ctx, topic_name).json()['topic'] remoteci = dci_remoteci.get(ctx, dci_conf['remoteci']).json()['remoteci'] r = dci_job.schedule(ctx, remoteci['id'], topic_id=topic['id']) if r.status_code == 412: logging.info('Nothing to do') exit(0) elif r.status_code != 201: logging.error('Unexpected code: %d' % r.status_code) logging.error(r.text) exit(1) components = dci_job.get_components(ctx, ctx.last_job_id).json()['components'] logging.debug(components) try: prepare_local_mirror(ctx, dci_conf['mirror']['directory'], dci_conf['mirror']['url'], components) dci_jobstate.create(ctx, 'pre-run', 'director node provisioning', ctx.last_job_id) for c in dci_conf['hooks']['provisioning']: dci_helper.run_command(ctx, c, shell=True) init_undercloud_host(dci_conf['undercloud_ip'], dci_conf['key_filename']) dci_jobstate.create(ctx, 'running', 'undercloud deployment', ctx.last_job_id) for c in dci_conf['hooks']['undercloud']: dci_helper.run_command(ctx, c, shell=True) dci_jobstate.create(ctx, 'running', 'overcloud deployment', ctx.last_job_id) for c in dci_conf['hooks']['overcloud']: dci_helper.run_command(ctx, c, shell=True) dci_tripleo_helper.run_tests(ctx, undercloud_ip=dci_conf['undercloud_ip'], key_filename=dci_conf['key_filename'], remoteci_id=remoteci['id'], stack_name=dci_conf.get( 'stack_name', 'overcloud')) final_status = 'success' backtrace = '' msg = '' except Exception as e: final_status = 'failure' backtrace = traceback.format_exc() msg = str(e) pass # Teardown should happen even in case of failure and should not make the # agent run fail. try: teardown_commands = dci_conf['hooks'].get('teardown') if teardown_commands: dci_jobstate.create(ctx, 'post-run', 'teardown', ctx.last_job_id) for c in teardown_commands: dci_helper.run_command(ctx, c, shell=True) except Exception as e: backtrace_teardown = str(e) + '\n' + traceback.format_exc() logging.error(backtrace_teardown) dci_file.create(ctx, 'backtrace_teardown', backtrace_teardown, mime='text/plain', jobstate_id=ctx.last_jobstate_id) pass dci_jobstate.create(ctx, final_status, msg, ctx.last_job_id) logging.info('Final status: ' + final_status) if backtrace: logging.error(backtrace) dci_file.create(ctx, 'backtrace', backtrace, mime='text/plain', jobstate_id=ctx.last_jobstate_id) sys.exit(0 if final_status == 'success' else 1)
def main(): module = AnsibleModule( argument_spec=dict( state=dict(default='present', choices=['present', 'absent'], type='str'), # Authentication related parameters # dci_login=dict(required=False, type='str'), dci_password=dict(required=False, type='str'), dci_cs_url=dict(required=False, type='str'), # Resource related parameters # id=dict(type='str'), content=dict(type='str'), path=dict(type='str'), name=dict(type='str'), job_id=dict(type='str'), jobstate_id=dict(type='str'), mime=dict(default='text/plain', type='str'), ), ) if not dciclient_found: module.fail_json(msg='The python dciclient module is required') login, password, url = get_details(module) if not login or not password: module.fail_json(msg='login and/or password have not been specified') ctx = dci_context.build_dci_context(url, login, password, 'Ansible') # Action required: List all files # Endpoint called: /files GET via dci_file.list() # # List all files if module_params_empty(module.params): res = dci_file.list(ctx) # Action required: Delete the file matchin file id # Endpoint called: /files/<file_id> DELETE via dci_file.delete() # # If the file exist and it has been succesfully deleted the changed is # set to true, else if the file does not exist changed is set to False elif module.params['state'] == 'absent': if not module.params['id']: module.fail_json(msg='id parameter is required') res = dci_file.delete(ctx, module.params['id']) # Action required: Retrieve file informations # Endpoint called: /files/<file_id> GET via dci_job.file() # # Get file informations elif module.params['id']: res = dci_file.get(ctx, module.params['id']) # Action required: Creat a file with the specified content # Endpoint called: /files POST via dci_file.create() # # Create the file and attach it where it belongs (either jobstate or job) # with the specified content/content of path provided. # # NOTE: /files endpoint does not support PUT method, hence no update can # be accomplished. else: if not module.params['job_id'] and not module.params['jobstate_id']: module.fail_json(msg='Either job_id or jobstate_id must be specified') if (not module.params['content'] and not module.params['path']) or \ (module.params['content'] and module.params['path']): module.fail_json(msg='Either content or path must be specified') if module.params['content'] and not module.params['name']: module.fail_json(msg='name parameter must be specified when content has been specified') if module.params['path'] and not module.params['name']: name = module.params['path'] else: name = module.params['name'] if module.params['path']: try: content = open(module.params['path'], 'r').read() except IOError as e: module.fail_json(msg='The path specified cannot be read') else: content = module.params['content'] kwargs = {'name': name, 'content': content, 'mime': module.params['mime']} if module.params['job_id']: kwargs['job_id'] = module.params['job_id'] if module.params['jobstate_id']: kwargs['jobstate_id'] = module.params['jobstate_id'] res = dci_file.create(ctx, **kwargs) try: result = res.json() if res.status_code == 404: module.fail_json(msg='The resource does not exist') if res.status_code == 409: result['changed'] = False else: result['changed'] = True except: result = {} result['changed'] = True module.exit_json(**result)