def __init__(self, config): # ------- # Private # ------- self._config = config self._shippable_adapter = ShippableAdapter(config) self._is_executing = False # Consoles # -------- self._console_buffer = [] self._console_buffer_lock = threading.Lock() # Console state self._current_group_info = None self._current_group_name = None self._current_cmd_info = None self._show_group = None # Errors self._error_grp = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': 'root', 'type': 'grp', 'message': 'Error', 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer = [self._error_grp] self._has_errors = False # ------ # Public # ------ self.exit_code = 1
def __init__(self): Base.__init__(self, __name__) self.user_headers = None self.publish_queue = None self.script_runner = None self.raw_message = None self.steps = None self.__load_message_from_file() self.builder_api_token = None self.job_id = None self.parsed_message = None self.test_results_file = 'testresults.json' self.coverage_results_file = 'coverageresults.json' self.__validate_message() self.shippable_adapter = ShippableAdapter(self.builder_api_token) self.exit_code = 0
def __init__(self, config): # ------- # Private # ------- # Configs obtained from the job.env file self._config = config self._shippable_adapter = ShippableAdapter(config) # Threads self._logger_thread = None self._script_runner_thread = None # Error buffer state self._has_errors = False # Log directory and file self._temporary_log_directory = tempfile.mkdtemp() self._log_file_path = \ os.path.join(self._temporary_log_directory, 'logs') buffer_size = 0 self._write_log_file = open(self._log_file_path, 'w', buffer_size) self._read_log_file = open(self._log_file_path, 'r') # Console state self._current_group_info = None self._current_group_name = None self._current_cmd_info = None self._show_group = None # Execution error consoles self._error_grp = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': 'root', 'type': 'grp', 'message': 'Error', 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer = [self._error_grp] # ------ # Public # ------ # Assume failure by default self.exit_code = 1
def __init__(self): Base.__init__(self, __name__) self.user_headers = None self.publish_queue = None self.script_runner = None self.raw_message = None self.steps = None self.__load_message_from_file() self.builder_api_token = None self.job_id = None self.__validate_message() self.shippable_adapter = ShippableAdapter(self.builder_api_token)
class Execute(Base): def __init__(self): Base.__init__(self, __name__) self.user_headers = None self.publish_queue = None self.script_runner = None self.raw_message = None self.steps = None self.__load_message_from_file() self.builder_api_token = None self.job_id = None self.parsed_message = None self.test_results_file = 'testresults.json' self.coverage_results_file = 'coverageresults.json' self.__validate_message() self.shippable_adapter = ShippableAdapter(self.builder_api_token) self.exit_code = 0 def __load_message_from_file(self): message_json_full_path = os.path.join(self.config['MESSAGE_DIR'], self.config['MESSAGE_JSON_NAME']) if not os.path.isfile(message_json_full_path): error_message = 'The file {0} was not found'.format( message_json_full_path) raise Exception(error_message) with open(message_json_full_path, 'r') as message_json_file: raw_message = message_json_file.read() self.log.debug('Loaded raw_message from {0} with length {1}'.format( message_json_full_path, len(raw_message))) self.raw_message = raw_message def __validate_message(self): self.log.debug('Validating message') error_message = '' error_occurred = False try: self.parsed_message = json.loads(self.raw_message) steps = self.parsed_message.get('steps') if not steps: error_message = 'No "steps" property present' raise Exception(error_message) for step in steps: if not step['execOrder']: error_message = 'Missing "execOrder" property in step ' \ '{0}'.format(step) steps = sorted(steps, key=lambda step: step.get('execOrder'), \ reverse=False) self.steps = steps self.builder_api_token = self.parsed_message.get( 'builderApiToken', None) if self.builder_api_token is None: error_message = 'No "builderApiToken" property present' raise Exception(error_message) self.job_id = self.parsed_message.get('jobId', None) if self.job_id is None: error_message = 'No "jobId" property present' raise Exception(error_message) except ValueError as verr: error_message = 'Invalid message received: ' \ 'Error : {0} : {1}'.format( str(verr), self.raw_message) error_occurred = True except Exception as err: error_message = 'Invalid message received: ' \ 'Error : {0} : {1}'.format( str(err), self.raw_message) error_occurred = True finally: if error_occurred: self.log.error(error_message, self.log.logtype['USER']) raise Exception(error_message) def run(self): self.log.debug('Inside Execute') exit_code = 0 flushed_consoles_size_in_bytes = 0 sent_console_truncated_message = False exit_code = self._check_for_ssh_agent() if exit_code > 0: return exit_code if self.config['IS_NEW_BUILD_RUNNER_SUBSCRIPTION']: self.log.warn('Using new build runner') for step in self.steps: if step.get('who', None) == self.config['WHO']: script = step.get('script', None) if not script: error_message = 'No script to execute in step ' \ ' {0}'.format(step) raise Exception(error_message) if self.config['IS_NEW_BUILD_RUNNER_SUBSCRIPTION']: script_runner = ScriptRunner2( self.job_id, self.shippable_adapter, flushed_consoles_size_in_bytes, sent_console_truncated_message) else: script_runner = ScriptRunner( self.job_id, self.shippable_adapter, flushed_consoles_size_in_bytes, sent_console_truncated_message) script_status, script_exit_code, should_continue, \ flushed_consoles_size_in_bytes, \ sent_console_truncated_message = \ script_runner.execute_script(script) self._update_exit_code(script_exit_code) self.log.debug(script_status) if should_continue is False: break else: break self._push_test_results() self._push_coverage_results() self._push_post_job_envs() return self.exit_code def _update_exit_code(self, new_exit_code): if self.exit_code is 0: self.exit_code = new_exit_code def _push_post_job_envs(self): self.log.debug('Inside _push_post_job_envs') job_env_dir = '{0}/postjobenvs/'.format(self.config['ARTIFACTS_DIR']) if os.path.exists(job_env_dir): self.log.debug('postJobEnvs exist, reading dir') err, job = self.shippable_adapter.get_job_by_id(self.job_id) if err is not None: self.log.error('Failed to GET job_by_id: {0}, {1}'.format( self.job_id, err)) return env_results = [] for filename in os.listdir(job_env_dir): if filename.endswith(".json"): f = open(os.path.join(job_env_dir, filename), "r") try: env_json = json.loads(f.read()) except ValueError as err: env_json = None self.log.error('Error posting job envs: {0}'.format( str(err))) if env_json is not None: env_results.append(env_json) if env_results: job['postJobEnvs'] = env_results self.shippable_adapter.put_job_by_id(self.job_id, job) else: self.log.debug('No postJobEnvs exist, skipping') def _push_test_results(self): self.log.debug('Inside _push_test_reports') test_results_file = '{0}/testresults/{1}'.format( self.config['ARTIFACTS_DIR'], self.test_results_file) if os.path.exists(test_results_file): self.log.debug('Test results exist, reading file') test_results = '' with open(test_results_file, 'r') as results_file: test_results = results_file.read() self.log.debug('Successfully read test results, parsing') try: test_results = json.loads(test_results) test_results['jobId'] = self.job_id except ValueError as err: test_results = None self.log.error('Error parsing test results: {0}'.format( str(err))) if test_results is not None: self.shippable_adapter.post_test_results(test_results) else: self.log.debug('No test results exist, skipping') def _push_coverage_results(self): self.log.debug('Inside _push_coverage_results') coverage_results_file = '{0}/coverageresults/{1}'.format( self.config['ARTIFACTS_DIR'], self.coverage_results_file) if os.path.exists(coverage_results_file): self.log.debug('Coverage results exist, reading file') coverage_results = '' with open(coverage_results_file, 'r') as results_file: coverage_results = results_file.read() self.log.debug('Successfully read coverage results, parsing') try: coverage_results = json.loads(coverage_results) coverage_results['jobId'] = self.job_id except ValueError as err: coverage_results = None self.log.error('Error parsing coverage results: {0}'.format( str(err))) if coverage_results is not None: self.shippable_adapter.post_coverage_results(coverage_results) else: self.log.debug('No coverage results exist,skipping') def _check_for_ssh_agent(self): self.log.debug('Inside _check_for_ssh_agent') devnull = open(os.devnull, 'wb') # Unset LD_LIBRARY_PATH env = dict(os.environ) env.pop('LD_LIBRARY_PATH', None) p = subprocess.Popen('ssh-agent', shell=True, stdout=devnull, env=env) p.communicate() return p.returncode
class Executor(object): """ Sets up attributes that will be used to execute the steplet """ def __init__(self, config): # ------- # Private # ------- # Configs obtained from the job.env file self._config = config self._shippable_adapter = ShippableAdapter(config) # Threads self._logger_thread = None self._script_runner_thread = None # Error buffer state self._has_errors = False # Log directory and file self._temporary_log_directory = tempfile.mkdtemp() self._log_file_path = \ os.path.join(self._temporary_log_directory, 'logs') buffer_size = 0 self._write_log_file = open(self._log_file_path, 'w', buffer_size) self._read_log_file = open(self._log_file_path, 'r') # Console state self._current_group_info = None self._current_group_name = None self._current_cmd_info = None self._show_group = None # Execution error consoles self._error_grp = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': 'root', 'type': 'grp', 'message': 'Error', 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer = [self._error_grp] # ------ # Public # ------ # Assume failure by default self.exit_code = 1 def __del__(self): shutil.rmtree(self._temporary_log_directory, ignore_errors=True) def execute(self): """ Starts the script runner and logger threads and waits for them to finish. """ # Instantiate script runner and logger threads self._script_runner_thread = \ threading.Thread(target=self._script_runner) self._logger_thread = threading.Thread(target=self.logger) # Start both the threads. self._script_runner_thread.start() self._logger_thread.start() # Wait until the threads are completed self._script_runner_thread.join() self._logger_thread.join() if self._has_errors: self._flush_error_buffer() def _script_runner(self): """ Runs the script, handles console output and finally sets the exit code """ # We need to unset the LD_LIBRARY_PATH set by pyinstaller. This # will ensure the script prefers libraries on system rather # than the ones bundled during runtime. env = dict(os.environ) env.pop('LD_LIBRARY_PATH', None) cmd = self._config['SCRIPT_PATH'] if self._config.get('REQEXEC_SHELL'): cmd = [self._config['REQEXEC_SHELL'], self._config['SCRIPT_PATH']] try: proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self._config['STEPLET_DIR'], env=env) except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) return try: for line in iter(proc.stdout.readline, ''): is_script_success, is_complete, exit_code = self._handle_console_line( line) if is_script_success: self.exit_code = 0 if is_complete: if not is_script_success: self.exit_code = exit_code break except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) proc.kill() def logger(self): """ Reads from the log file and flushes consoles periodically or if a limit is hit """ logs_to_post = { 'stepletId': self._config['STEPLET_ID'], 'stepletConsoles': [] } logs_last_posted_at = datetime.now() while True: post_logs = False and_break = False log_line = self._read_log_file.readline() if log_line: try: parsed_log_line = json.loads(log_line) logs_to_post['stepletConsoles'].append(parsed_log_line) except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) # We added a new line, if the new array length exceeds max # log lines, post logs. if len(logs_to_post['stepletConsoles']) \ >= self._config['MAX_LOG_LINES_TO_FLUSH']: post_logs = True else: # If the script runner is dead and there are no more logs to # read, attempt to post any remaining logs and break. if not self._script_runner_thread.isAlive(): post_logs = True and_break = True # If its been a while since we posted logs, post. elif (datetime.now() - logs_last_posted_at).total_seconds() \ > self._config['MAX_LOGS_FLUSH_WAIT_TIME_IN_S'] \ and logs_to_post['stepletConsoles']: post_logs = True # Sleep a bit if there hasn't been any activity. else: time.sleep(self._config['LOGS_FILE_READ_WAIT_TIME_IN_S']) # Post logs if asked and there is something to post. if post_logs and logs_to_post['stepletConsoles']: logs_last_posted_at = datetime.now() data = json.dumps(logs_to_post) self._shippable_adapter.post_step_let_consoles(data) logs_to_post['stepletConsoles'] = [] if and_break: break def _handle_console_line(self, line): """ Parses a single line of console output and pushes it to a file This also returns whether the console line is successful and the script is complete """ is_script_success = False is_complete = False exit_code = 0 timestamp = Executor._get_timestamp() line_split = line.split('|') if line.startswith('__SH__GROUP__START__'): self._current_group_info = line_split[1] self._current_group_name = '|'.join(line_split[2:]) self._current_group_info = json.loads(self._current_group_info) self._show_group = self._current_group_info.get('is_shown', True) if self._show_group == 'false': self._show_group = False console_out = { 'consoleId': self._current_group_info.get('id'), 'parentConsoleId': 'root', 'type': 'grp', 'message': self._current_group_name, 'timestamp': timestamp, 'isShown': self._show_group } self._append_to_log_file(console_out) elif line.startswith('__SH__CMD__START__'): self._current_cmd_info = line_split[1] current_cmd_name = '|'.join(line_split[2:]) self._current_cmd_info = json.loads(self._current_cmd_info) parent_id = self._current_group_info.get('id') if \ self._current_group_info else None console_out = { 'consoleId': self._current_cmd_info.get('id'), 'parentConsoleId': parent_id, 'type': 'cmd', 'message': current_cmd_name, 'timestamp': timestamp, } if parent_id: self._append_to_log_file(console_out) elif line.startswith('__SH__CMD__END__'): current_cmd_end_info = line_split[1] current_cmd_end_name = '|'.join(line_split[2:]) current_cmd_end_info = json.loads(current_cmd_end_info) parent_id = self._current_group_info.get('id') if \ self._current_group_info else None is_cmd_success = False if current_cmd_end_info.get('exitcode') == '0': is_cmd_success = True console_out = { 'consoleId': self._current_cmd_info.get('id'), 'parentConsoleId': parent_id, 'type': 'cmd', 'message': current_cmd_end_name, 'timestamp': timestamp, 'timestampEndedAt': timestamp, 'isSuccess': is_cmd_success, 'isShown': self._show_group } if parent_id: self._append_to_log_file(console_out) elif line.startswith('__SH__GROUP__END__'): current_grp_end_info = line_split[1] current_grp_end_name = '|'.join(line_split[2:]) current_grp_end_info = json.loads(current_grp_end_info) is_cmd_success = False if current_grp_end_info.get('exitcode') == '0': is_cmd_success = True console_out = { 'consoleId': self._current_group_info.get('id'), 'parentConsoleId': 'root', 'type': 'grp', 'message': current_grp_end_name, 'timestamp': timestamp, 'timestampEndedAt': timestamp, 'isSuccess': is_cmd_success, 'isShown': self._show_group } self._append_to_log_file(console_out) elif line.startswith('__SH__SCRIPT_END_SUCCESS__'): is_script_success = True is_complete = True elif line.startswith('__SH__SCRIPT_END_FAILURE__'): try: exit_code = int(line_split[1]) except: exit_code = 1 is_script_success = False is_complete = True else: parent_id = self._current_cmd_info.get('id') if \ self._current_cmd_info else None console_out = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': parent_id, 'type': 'msg', 'message': line, 'timestamp': timestamp, } if parent_id: self._append_to_log_file(console_out) else: self._append_to_error_buffer(line) return is_script_success, is_complete, exit_code def _append_to_log_file(self, console_out): """ Pushes a console line to the log file """ try: self._write_log_file.write(json.dumps(console_out) + '\n') except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) def _append_to_error_buffer(self, error): """ Appends an error into errors buffer after ensuring it is not empty """ if not error.strip(): return self._has_errors = True error_msg = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': self._error_grp['consoleId'], 'type': 'msg', 'message': error, 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer.append(error_msg) def _flush_error_buffer(self): """ Flushes error buffer """ req_body = { 'stepletId': self._config['STEPLET_ID'], 'stepletConsoles': self._error_buffer } data = json.dumps(req_body) self._shippable_adapter.post_step_let_consoles(data) del self._error_buffer self._error_buffer = [] @staticmethod def _get_timestamp(): """ Helper method to return timestamp in a format which is acceptable for consoles """ return int(time.time() * 1000000)
class Execute(Base): def __init__(self): Base.__init__(self, __name__) self.user_headers = None self.publish_queue = None self.script_runner = None self.raw_message = None self.steps = None self.__load_message_from_file() self.builder_api_token = None self.job_id = None self.parsed_message = None self.test_results_file = 'testresults.json' self.coverage_results_file = 'coverageresults.json' self.__validate_message() self.shippable_adapter = ShippableAdapter(self.builder_api_token) def __load_message_from_file(self): message_json_full_path = os.path.join( self.config['MESSAGE_DIR'], self.config['MESSAGE_JSON_NAME']) if not os.path.isfile(message_json_full_path): error_message = 'The file {0} was not found'.format( message_json_full_path) raise Exception(error_message) with open(message_json_full_path, 'r') as message_json_file: raw_message = message_json_file.read() self.log.debug('Loaded raw_message from {0} with length {1}'.format( message_json_full_path, len(raw_message))) self.raw_message = raw_message def __validate_message(self): self.log.debug('Validating message') error_message = '' error_occurred = False try: self.parsed_message = json.loads(self.raw_message) steps = self.parsed_message.get('steps') if not steps: error_message = 'No "steps" property present' raise Exception(error_message) for step in steps: if not step['execOrder']: error_message = 'Missing "execOrder" property in step ' \ '{0}'.format(step) steps = sorted(steps, key=lambda step: step.get('execOrder'), \ reverse=False) self.steps = steps self.builder_api_token = self.parsed_message.get('builderApiToken', None) if self.builder_api_token is None: error_message = 'No "builderApiToken" property present' raise Exception(error_message) self.job_id = self.parsed_message.get('jobId', None) if self.job_id is None: error_message = 'No "jobId" property present' raise Exception(error_message) except ValueError as verr: error_message = 'Invalid message received: ' \ 'Error : {0} : {1}'.format( str(verr), self.raw_message) error_occurred = True except Exception as err: error_message = 'Invalid message received: ' \ 'Error : {0} : {1}'.format( str(err), self.raw_message) error_occurred = True finally: if error_occurred: self.log.error(error_message, self.log.logtype['USER']) raise Exception(error_message) def run(self): self.log.debug('Inside Execute') exit_code = 0 exit_code = self._check_for_ssh_agent() if exit_code > 0: return exit_code for step in self.steps: if step.get('who', None) == self.config['WHO']: script = step.get('script', None) if not script: error_message = 'No script to execute in step ' \ ' {0}'.format(step) raise Exception(error_message) self._report_step_status(step.get('id'), \ self.STATUS['PROCESSING']) script_runner = ScriptRunner(self.job_id, self.shippable_adapter) script_status, exit_code, should_continue = \ script_runner.execute_script(script) self.log.debug(script_status) self._report_step_status(step.get('id'), script_status) if should_continue is False: break else: break self._push_test_results() self._push_coverage_results() return exit_code def _push_test_results(self): self.log.debug('Inside _push_test_reports') test_results_file = '{0}/testresults/{1}'.format( self.config['ARTIFACTS_DIR'], self.test_results_file) if os.path.exists(test_results_file): self.log.debug('Test results exist, reading file') test_results = '' with open(test_results_file, 'r') as results_file: test_results = results_file.read() self.log.debug('Successfully read test results, parsing') test_results = json.loads(test_results) test_results['jobId'] = self.job_id self.shippable_adapter.post_test_results(test_results) else: self.log.debug('No test results exist, skipping') def _push_coverage_results(self): self.log.debug('Inside _push_coverage_results') coverage_results_file = '{0}/coverageresults/{1}'.format( self.config['ARTIFACTS_DIR'], self.coverage_results_file) if os.path.exists(coverage_results_file): self.log.debug('Coverage results exist, reading file') coverage_results = '' with open(coverage_results_file, 'r') as results_file: coverage_results = results_file.read() self.log.debug('Successfully read coverage results, parsing') coverage_results = json.loads(coverage_results) coverage_results['jobId'] = self.job_id self.shippable_adapter.post_coverage_results(coverage_results) else: self.log.debug('No coverage results exist,skipping') def _check_for_ssh_agent(self): self.log.debug('Inside _check_for_ssh_agent') devnull = open(os.devnull, 'wb') p = subprocess.Popen('ssh-agent', shell=True, stdout=devnull) p.communicate() return p.returncode def _report_step_status(self, step_id, step_status): self.log.debug('Inside report_job_status') err, job = self.shippable_adapter.get_job_by_id(self.job_id) if err is not None: self.log.error('Failed to GET job_by_id: {0}'.format(self.job_id)) return all_steps = job.get('steps') for step in all_steps: if step['id'] == step_id: step['status'] = step_status break self.shippable_adapter.put_job_by_id(self.job_id, job)
class Execute(Base): def __init__(self): Base.__init__(self, __name__) self.user_headers = None self.publish_queue = None self.script_runner = None self.raw_message = None self.steps = None self.__load_message_from_file() self.builder_api_token = None self.job_id = None self.__validate_message() self.shippable_adapter = ShippableAdapter(self.builder_api_token) def __load_message_from_file(self): message_json_full_path = os.path.join( self.config['MESSAGE_DIR'], self.config['MESSAGE_JSON_NAME']) if not os.path.isfile(message_json_full_path): error_message = 'The file {0} was not found'.format( message_json_full_path) raise Exception(error_message) with open(message_json_full_path, 'r') as message_json_file: raw_message = message_json_file.read() self.log.debug('Loaded raw_message from {0} with length {1}'.format( message_json_full_path, len(raw_message))) self.raw_message = raw_message def __validate_message(self): self.log.debug('Validating message') error_message = '' error_occurred = False try: self.parsed_message = json.loads(self.raw_message) steps = self.parsed_message.get('steps') if not steps: error_message = 'No "steps" property present' raise Exception(error_message) for step in steps: if not step['execOrder']: error_message = 'Missing "execOrder" property in step ' \ '{0}'.format(step) steps = sorted(steps, key=lambda step: step.get('execOrder'), reverse=False) self.steps = steps self.builder_api_token = self.parsed_message.get('builderApiToken', None) if self.builder_api_token is None: error_message = 'No "builderApiToken" property present' raise Exception(error_message) self.job_id = self.parsed_message.get('jobId', None) if self.job_id is None: error_message = 'No "jobId" property present' raise Exception(error_message) except ValueError as verr: error_message = 'Invalid message received: ' \ 'Error : {0} : {1}'.format( str(verr), self.raw_message) error_occurred = True except Exception as err: error_message = 'Invalid message received: ' \ 'Error : {0} : {1}'.format( str(err), self.raw_message) error_occurred = True finally: if error_occurred: self.log.error(error_message, self.log.logtype['USER']) raise Exception(error_message) def run(self): self.log.debug('Inside Execute') exit_code = 0 for step in self.steps: if step.get('who', None) == self.config['WHO']: script = step.get('script', None) if not script: error_message = 'No script to execute in step ' \ ' {0}'.format(step) script_runner = ScriptRunner(self.job_id, self.shippable_adapter) script_status, exit_code, should_continue = script_runner.execute_script( script) self.log.debug(script_status) self._report_step_status(step.get('id'), script_status) if should_continue is False: break else: break return exit_code def _report_step_status(self, step_id, step_status): self.log.debug('Inside report_job_status') err, job = self.shippable_adapter.get_job_by_id(self.job_id) if err is not None: self.log.error('Failed to GET job_by_id: {0}'.format(self.job_id)) return all_steps = job.get('steps') for step in all_steps: if step['id'] == step_id: step['status'] = step_status break; self.shippable_adapter.put_job_by_id(self.job_id, job)
class Execute(Base): def __init__(self): Base.__init__(self, __name__) self.user_headers = None self.publish_queue = None self.script_runner = None self.raw_message = None self.steps = None self.__load_message_from_file() self.builder_api_token = None self.job_id = None self.parsed_message = None self.test_results_file = "testresults.json" self.coverage_results_file = "coverageresults.json" self.__validate_message() self.shippable_adapter = ShippableAdapter(self.builder_api_token) self.exit_code = 0 def __load_message_from_file(self): message_json_full_path = os.path.join(self.config["MESSAGE_DIR"], self.config["MESSAGE_JSON_NAME"]) if not os.path.isfile(message_json_full_path): error_message = "The file {0} was not found".format(message_json_full_path) raise Exception(error_message) with open(message_json_full_path, "r") as message_json_file: raw_message = message_json_file.read() self.log.debug("Loaded raw_message from {0} with length {1}".format(message_json_full_path, len(raw_message))) self.raw_message = raw_message def __validate_message(self): self.log.debug("Validating message") error_message = "" error_occurred = False try: self.parsed_message = json.loads(self.raw_message) steps = self.parsed_message.get("steps") if not steps: error_message = 'No "steps" property present' raise Exception(error_message) for step in steps: if not step["execOrder"]: error_message = 'Missing "execOrder" property in step ' "{0}".format(step) steps = sorted(steps, key=lambda step: step.get("execOrder"), reverse=False) self.steps = steps self.builder_api_token = self.parsed_message.get("builderApiToken", None) if self.builder_api_token is None: error_message = 'No "builderApiToken" property present' raise Exception(error_message) self.job_id = self.parsed_message.get("jobId", None) if self.job_id is None: error_message = 'No "jobId" property present' raise Exception(error_message) except ValueError as verr: error_message = "Invalid message received: " "Error : {0} : {1}".format(str(verr), self.raw_message) error_occurred = True except Exception as err: error_message = "Invalid message received: " "Error : {0} : {1}".format(str(err), self.raw_message) error_occurred = True finally: if error_occurred: self.log.error(error_message, self.log.logtype["USER"]) raise Exception(error_message) def run(self): self.log.debug("Inside Execute") exit_code = 0 exit_code = self._check_for_ssh_agent() if exit_code > 0: return exit_code for step in self.steps: if step.get("who", None) == self.config["WHO"]: script = step.get("script", None) if not script: error_message = "No script to execute in step " " {0}".format(step) raise Exception(error_message) script_runner = ScriptRunner(self.job_id, self.shippable_adapter) script_status, script_exit_code, should_continue = script_runner.execute_script(script) self._update_exit_code(script_exit_code) self.log.debug(script_status) if should_continue is False: break else: break self._push_test_results() self._push_coverage_results() self._push_post_job_envs() return self.exit_code def _update_exit_code(self, new_exit_code): if self.exit_code is 0: self.exit_code = new_exit_code def _push_post_job_envs(self): self.log.debug("Inside _push_post_job_envs") job_env_dir = "{0}/postjobenvs/".format(self.config["ARTIFACTS_DIR"]) if os.path.exists(job_env_dir): self.log.debug("postJobEnvs exist, reading dir") err, job = self.shippable_adapter.get_job_by_id(self.job_id) if err is not None: self.log.error("Failed to GET job_by_id: {0}, {1}".format(self.job_id, err)) return env_results = [] for filename in os.listdir(job_env_dir): if filename.endswith(".json"): f = open(os.path.join(job_env_dir, filename), "r") try: env_json = json.loads(f.read()) except ValueError as err: env_json = None self.log.error("Error posting job envs: {0}".format(str(err))) if env_json is not None: env_results.append(env_json) if env_results: job["postJobEnvs"] = env_results self.shippable_adapter.put_job_by_id(self.job_id, job) else: self.log.debug("No postJobEnvs exist, skipping") def _push_test_results(self): self.log.debug("Inside _push_test_reports") test_results_file = "{0}/testresults/{1}".format(self.config["ARTIFACTS_DIR"], self.test_results_file) if os.path.exists(test_results_file): self.log.debug("Test results exist, reading file") test_results = "" with open(test_results_file, "r") as results_file: test_results = results_file.read() self.log.debug("Successfully read test results, parsing") try: test_results = json.loads(test_results) test_results["jobId"] = self.job_id except ValueError as err: test_results = None self.log.error("Error parsing test results: {0}".format(str(err))) if test_results is not None: self.shippable_adapter.post_test_results(test_results) else: self.log.debug("No test results exist, skipping") def _push_coverage_results(self): self.log.debug("Inside _push_coverage_results") coverage_results_file = "{0}/coverageresults/{1}".format( self.config["ARTIFACTS_DIR"], self.coverage_results_file ) if os.path.exists(coverage_results_file): self.log.debug("Coverage results exist, reading file") coverage_results = "" with open(coverage_results_file, "r") as results_file: coverage_results = results_file.read() self.log.debug("Successfully read coverage results, parsing") try: coverage_results = json.loads(coverage_results) coverage_results["jobId"] = self.job_id except ValueError as err: coverage_results = None self.log.error("Error parsing coverage results: {0}".format(str(err))) if coverage_results is not None: self.shippable_adapter.post_coverage_results(coverage_results) else: self.log.debug("No coverage results exist,skipping") def _check_for_ssh_agent(self): self.log.debug("Inside _check_for_ssh_agent") devnull = open(os.devnull, "wb") p = subprocess.Popen("ssh-agent", shell=True, stdout=devnull) p.communicate() return p.returncode
class Executor(object): """ Sets up config for the job, defaults for exit code and consoles """ def __init__(self, config): # ------- # Private # ------- self._config = config self._shippable_adapter = ShippableAdapter(config) self._is_executing = False # Consoles # -------- self._console_buffer = [] self._console_buffer_lock = threading.Lock() # Console state self._current_group_info = None self._current_group_name = None self._current_cmd_info = None self._show_group = None # Errors self._error_grp = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': 'root', 'type': 'grp', 'message': 'Error', 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer = [self._error_grp] self._has_errors = False # ------ # Public # ------ self.exit_code = 1 def execute(self): """ Starts threads to execute the script and flush consoles """ script_runner_thread = threading.Thread(target=self._script_runner) script_runner_thread.start() # Wait for the execution to complete. self._is_executing = True console_flush_timer = threading.Timer( self._config['CONSOLE_FLUSH_INTERVAL_SECONDS'], self._set_console_flush_timer) console_flush_timer.start() script_runner_thread.join() self._is_executing = False if self._has_errors: self._flush_error_buffer() self._flush_console_buffer() def _script_runner(self): """ Runs the script, handles console output and finally sets the exit code """ # We need to unset the LD_LIBRARY_PATH set by pyinstaller. This # will ensure the script prefers libraries on system rather # than the ones bundled during build time. env = dict(os.environ) env.pop('LD_LIBRARY_PATH', None) try: proc = subprocess.Popen(self._config['SCRIPT_PATH'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self._config['BUILD_DIR'], env=env) except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) return try: for line in iter(proc.stdout.readline, ''): is_script_success, is_complete = self._handle_console_line( line) if is_script_success: self.exit_code = 0 if is_complete: break except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) proc.kill() def _handle_console_line(self, line): """ Parses a single line of console output and pushes it to buffer This also returns whether the console line is successful and the script is complete """ is_script_success = False is_complete = False timestamp = Executor._get_timestamp() line_split = line.split('|') if line.startswith('__SH__GROUP__START__'): self._current_group_info = line_split[1] self._current_group_name = '|'.join(line_split[2:]) self._current_group_info = json.loads(self._current_group_info) self._show_group = self._current_group_info.get('is_shown', True) if self._show_group == 'false': self._show_group = False console_out = { 'consoleId': self._current_group_info.get('id'), 'parentConsoleId': 'root', 'type': 'grp', 'message': self._current_group_name, 'timestamp': timestamp, 'isShown': self._show_group } self._append_to_console_buffer(console_out) elif line.startswith('__SH__CMD__START__'): self._current_cmd_info = line_split[1] current_cmd_name = '|'.join(line_split[2:]) self._current_cmd_info = json.loads(self._current_cmd_info) parent_id = self._current_group_info.get('id') if \ self._current_group_info else None console_out = { 'consoleId': self._current_cmd_info.get('id'), 'parentConsoleId': parent_id, 'type': 'cmd', 'message': current_cmd_name, 'timestamp': timestamp, } if parent_id: self._append_to_console_buffer(console_out) elif line.startswith('__SH__CMD__END__'): current_cmd_end_info = line_split[1] current_cmd_end_name = '|'.join(line_split[2:]) current_cmd_end_info = json.loads(current_cmd_end_info) parent_id = self._current_group_info.get('id') if \ self._current_group_info else None is_cmd_success = False if current_cmd_end_info.get('exitcode') == '0': is_cmd_success = True console_out = { 'consoleId': self._current_cmd_info.get('id'), 'parentConsoleId': parent_id, 'type': 'cmd', 'message': current_cmd_end_name, 'timestamp': timestamp, 'timestampEndedAt': timestamp, 'isSuccess': is_cmd_success, 'isShown': self._show_group } if parent_id: self._append_to_console_buffer(console_out) elif line.startswith('__SH__GROUP__END__'): current_grp_end_info = line_split[1] current_grp_end_name = '|'.join(line_split[2:]) current_grp_end_info = json.loads(current_grp_end_info) is_cmd_success = False if current_grp_end_info.get('exitcode') == '0': is_cmd_success = True console_out = { 'consoleId': self._current_group_info.get('id'), 'parentConsoleId': 'root', 'type': 'grp', 'message': current_grp_end_name, 'timestamp': timestamp, 'timestampEndedAt': timestamp, 'isSuccess': is_cmd_success, 'isShown': self._show_group } self._append_to_console_buffer(console_out) elif line.startswith('__SH__SCRIPT_END_SUCCESS__'): is_script_success = True is_complete = True elif line.startswith('__SH__SCRIPT_END_FAILURE__'): is_script_success = False is_complete = True else: parent_id = self._current_cmd_info.get('id') if \ self._current_cmd_info else None console_out = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': parent_id, 'type': 'msg', 'message': line, 'timestamp': timestamp, } if parent_id: self._append_to_console_buffer(console_out) else: self._append_to_error_buffer(line) return is_script_success, is_complete def _append_to_console_buffer(self, console_out): """ Pushes a console line to buffer after taking over lock """ with self._console_buffer_lock: self._console_buffer.append(console_out) if len(self._console_buffer) > self._config['CONSOLE_BUFFER_LENGTH']: self._flush_console_buffer() def _set_console_flush_timer(self): """ Calls _flush_console_buffer to flush console buffers in constant intervals and stops when the script has finished execution """ if not self._is_executing: return self._flush_console_buffer() console_flush_timer = threading.Timer( self._config['CONSOLE_FLUSH_INTERVAL_SECONDS'], self._set_console_flush_timer) console_flush_timer.start() def _flush_console_buffer(self): """ Flushes console buffer after taking over lock """ if self._console_buffer: with self._console_buffer_lock: # If there is an exception in stringifying the data, test # each line to ensure only the sanitized ones are sent. # Errors are pushed to the error buffer. Testing on failure # will ensure that we don't test unnecessarily. try: req_body = { 'buildJobId': self._config['BUILD_JOB_ID'], 'buildJobConsoles': self._console_buffer } json.dumps(req_body) data = json.dumps(req_body) except Exception as ex: req_body = { 'buildJobId': self._config['BUILD_JOB_ID'], 'buildJobConsoles': [] } for console in self._console_buffer: try: json.dumps(console) req_body['buildJobConsoles'].append(console) except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) data = json.dumps(req_body) self._shippable_adapter.post_build_job_consoles(data) del self._console_buffer self._console_buffer = [] def _append_to_error_buffer(self, error): """ Appends an error into errors buffer after ensuring it is not empty """ if not error.strip(): return self._has_errors = True error_msg = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': self._error_grp['consoleId'], 'type': 'msg', 'message': error, 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer.append(error_msg) def _flush_error_buffer(self): """ Flushes error buffer """ req_body = { 'buildJobId': self._config['BUILD_JOB_ID'], 'buildJobConsoles': self._error_buffer } data = json.dumps(req_body) self._shippable_adapter.post_build_job_consoles(data) del self._error_buffer self._error_buffer = [] @staticmethod def _get_timestamp(): """ Helper method to return timestamp in a format which is acceptable for consoles """ return int(time.time() * 1000000)
class Executor(object): """ Sets up config for the job, defaults for exit code and consoles """ def __init__(self, config): # ------- # Private # ------- self._config = config self._shippable_adapter = ShippableAdapter(config) self._is_executing = False # Consoles # -------- self._console_buffer = [] self._console_buffer_lock = threading.Lock() # Console state self._current_group_info = None self._current_group_name = None self._current_cmd_info = None self._show_group = None # Errors self._error_grp = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': 'root', 'type': 'grp', 'message': 'Error', 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer = [self._error_grp] self._has_errors = False # ------ # Public # ------ self.exit_code = 1 def execute(self): """ Starts threads to execute the script and flush consoles """ script_runner_thread = threading.Thread(target=self._script_runner) script_runner_thread.start() # Wait for the execution to complete. self._is_executing = True console_flush_timer = threading.Timer( self._config['CONSOLE_FLUSH_INTERVAL_SECONDS'], self._set_console_flush_timer ) console_flush_timer.start() script_runner_thread.join() self._is_executing = False if self._has_errors: self._flush_error_buffer() self._flush_console_buffer() def _script_runner(self): """ Runs the script, handles console output and finally sets the exit code """ # We need to unset the LD_LIBRARY_PATH set by pyinstaller. This # will ensure the script prefers libraries on system rather # than the ones bundled during build time. env = dict(os.environ) env.pop('LD_LIBRARY_PATH', None) cmd = self._config['SCRIPT_PATH'] if self._config.get('REQEXEC_SHELL'): cmd = [self._config['REQEXEC_SHELL'], self._config['SCRIPT_PATH']] try: proc = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self._config['BUILD_DIR'], env=env ) except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) return try: for line in iter(proc.stdout.readline, ''): is_script_success, is_complete = self._handle_console_line(line) if is_script_success: self.exit_code = 0 if is_complete: break except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) proc.kill() def _handle_console_line(self, line): """ Parses a single line of console output and pushes it to buffer This also returns whether the console line is successful and the script is complete """ is_script_success = False is_complete = False timestamp = Executor._get_timestamp() line_split = line.split('|') if line.startswith('__SH__GROUP__START__'): self._current_group_info = line_split[1] self._current_group_name = '|'.join(line_split[2:]) self._current_group_info = json.loads(self._current_group_info) self._show_group = self._current_group_info.get('is_shown', True) if self._show_group == 'false': self._show_group = False console_out = { 'consoleId': self._current_group_info.get('id'), 'parentConsoleId': 'root', 'type': 'grp', 'message': self._current_group_name, 'timestamp': timestamp, 'isShown': self._show_group } self._append_to_console_buffer(console_out) elif line.startswith('__SH__CMD__START__'): self._current_cmd_info = line_split[1] current_cmd_name = '|'.join(line_split[2:]) self._current_cmd_info = json.loads(self._current_cmd_info) parent_id = self._current_group_info.get('id') if \ self._current_group_info else None console_out = { 'consoleId': self._current_cmd_info.get('id'), 'parentConsoleId': parent_id, 'type': 'cmd', 'message': current_cmd_name, 'timestamp': timestamp, } if parent_id: self._append_to_console_buffer(console_out) elif line.startswith('__SH__CMD__END__'): current_cmd_end_info = line_split[1] current_cmd_end_name = '|'.join(line_split[2:]) current_cmd_end_info = json.loads(current_cmd_end_info) parent_id = self._current_group_info.get('id') if \ self._current_group_info else None is_cmd_success = False if current_cmd_end_info.get('exitcode') == '0': is_cmd_success = True console_out = { 'consoleId': self._current_cmd_info.get('id'), 'parentConsoleId': parent_id, 'type': 'cmd', 'message': current_cmd_end_name, 'timestamp': timestamp, 'timestampEndedAt': timestamp, 'isSuccess': is_cmd_success, 'isShown': self._show_group } if parent_id: self._append_to_console_buffer(console_out) elif line.startswith('__SH__GROUP__END__'): current_grp_end_info = line_split[1] current_grp_end_name = '|'.join(line_split[2:]) current_grp_end_info = json.loads(current_grp_end_info) is_cmd_success = False if current_grp_end_info.get('exitcode') == '0': is_cmd_success = True console_out = { 'consoleId': self._current_group_info.get('id'), 'parentConsoleId': 'root', 'type': 'grp', 'message': current_grp_end_name, 'timestamp': timestamp, 'timestampEndedAt': timestamp, 'isSuccess': is_cmd_success, 'isShown': self._show_group } self._append_to_console_buffer(console_out) elif line.startswith('__SH__SCRIPT_END_SUCCESS__'): is_script_success = True is_complete = True elif line.startswith('__SH__SCRIPT_END_FAILURE__'): is_script_success = False is_complete = True else: parent_id = self._current_cmd_info.get('id') if \ self._current_cmd_info else None console_out = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': parent_id, 'type': 'msg', 'message': line, 'timestamp': timestamp, } if parent_id: self._append_to_console_buffer(console_out) else: self._append_to_error_buffer(line) return is_script_success, is_complete def _append_to_console_buffer(self, console_out): """ Pushes a console line to buffer after taking over lock """ with self._console_buffer_lock: self._console_buffer.append(console_out) if len(self._console_buffer) > self._config['CONSOLE_BUFFER_LENGTH']: self._flush_console_buffer() def _set_console_flush_timer(self): """ Calls _flush_console_buffer to flush console buffers in constant intervals and stops when the script has finished execution """ if not self._is_executing: return self._flush_console_buffer() console_flush_timer = threading.Timer( self._config['CONSOLE_FLUSH_INTERVAL_SECONDS'], self._set_console_flush_timer ) console_flush_timer.start() def _flush_console_buffer(self): """ Flushes console buffer after taking over lock """ if self._console_buffer: with self._console_buffer_lock: # If there is an exception in stringifying the data, test # each line to ensure only the sanitized ones are sent. # Errors are pushed to the error buffer. Testing on failure # will ensure that we don't test unnecessarily. try: req_body = { 'buildJobId': self._config['BUILD_JOB_ID'], 'buildJobConsoles': self._console_buffer } json.dumps(req_body) data = json.dumps(req_body) except Exception as ex: req_body = { 'buildJobId': self._config['BUILD_JOB_ID'], 'buildJobConsoles': [] } for console in self._console_buffer: try: json.dumps(console) req_body['buildJobConsoles'].append(console) except Exception as ex: trace = traceback.format_exc() error = '{0}: {1}'.format(str(ex), trace) self._append_to_error_buffer(error) data = json.dumps(req_body) self._shippable_adapter.post_build_job_consoles(data) del self._console_buffer self._console_buffer = [] def _append_to_error_buffer(self, error): """ Appends an error into errors buffer after ensuring it is not empty """ if not error.strip(): return self._has_errors = True error_msg = { 'consoleId': str(uuid.uuid4()), 'parentConsoleId': self._error_grp['consoleId'], 'type': 'msg', 'message': error, 'timestamp': Executor._get_timestamp(), 'isSuccess': False } self._error_buffer.append(error_msg) def _flush_error_buffer(self): """ Flushes error buffer """ req_body = { 'buildJobId': self._config['BUILD_JOB_ID'], 'buildJobConsoles': self._error_buffer } data = json.dumps(req_body) self._shippable_adapter.post_build_job_consoles(data) del self._error_buffer self._error_buffer = [] @staticmethod def _get_timestamp(): """ Helper method to return timestamp in a format which is acceptable for consoles """ return int(time.time() * 1000000)