def send_build_data(buildjob, detail=None): """ Send build data generated by client to keen.io. Parameters: - buildjob : BuildJob instance - detail : Data storage detail level : 'minimal', 'basic', 'full', 'extended' """ if not isinstance(buildjob, BuildJob): raise TypeError("param buildjob should be a BuildJob instance") data_detail = Settings().get_value_or_setting("data_detail", detail) if is_writable(): logger.info( "Sending client build job data to Keen.io (data detail: %s)", data_detail ) # store build job data add_event("build_jobs", {"job": buildjob.to_dict()}) # store build stages if data_detail in ("full", "extended"): add_events("build_stages", buildjob.stages_to_list())
def create_worker_app(): """Create worker app.""" # load settings settings = Settings() settings.load_settings(config_file=constants.CONFIG_FILE) settings.set_client(constants.CLIENT_NAME, constants.CLIENT_VERSION) if is_worker_enabled(): task_queue = settings.get_setting("task_queue") worker_app = Celery( 'tasks', backend=task_queue["backend"], broker=task_queue["broker_url"] ) # configure worker worker_app.conf.update( CELERY_TASK_SERIALIZER='json', CELERY_ACCEPT_CONTENT=['json'] ) if worker_app is None: logger.error("Error connection to task queue") else: logger.info( "Connected to task queue : %s", task_queue["broker_url"] ) else: worker_app = Celery() logger.warning( "Task queue is not defined," " check README.md to configure task queue" ) return worker_app
def process_end_stage(self, tags_dict): """ Process parsed end_stage tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags tag_list = list({'end_stage', 'end_substage'}) if not check_dict(tags_dict, "tags_dict", tag_list): return False logger.debug("End stage : %s", tags_dict) # construct substage name end_stagename = "{stage}.{substage}".format( stage=tags_dict['end_stage'], substage=tags_dict['end_substage']) # check if stage was started # and if substage name matches if not self.has_name() or self.stage.data["name"] != end_stagename: logger.info("Substage was not started or name doesn't match") self.finished_incomplete = True return False # stage finished successfully self.finished = True logger.info("Stage %s finished successfully", self.get_name()) return True
def create_stage(self, name, start_time, end_time): """ Create and add a stage. Parameters : - name : stage name - start_time : start of stage timestamp - end_time : end of stage timestamp """ # timestamps should be integer or floating point numbers if not (isinstance(start_time, (int, float)) and isinstance(end_time, (int, float))): return None # calculate duration from start and end timestamp duration = end_time - start_time logger.info('Duration %s : %fs', name, duration) # create stage stage = Stage() stage.set_name(name) stage.set_started_at(start_time) stage.set_finished_at(end_time) stage.set_duration(duration) self.add_stage(stage) return stage
def process_start_stage(self, tags_dict): """ Process parsed start_stage tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags tag_list = list({'start_stage', 'start_substage'}) if not check_dict(tags_dict, "tags_dict", tag_list): return False logger.debug("Start stage : %s", tags_dict) result = False if self.has_started(): logger.info("Substage already started") else: name = "{stage:s}.{substage:s}".format( stage=tags_dict['start_stage'], substage=tags_dict['start_substage']) result = self.set_name(name) return result
def process_end_stage(self, tags_dict): """ Process parsed end_stage tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags tag_list = list({'end_stage', 'end_substage'}) if not check_dict(tags_dict, "tags_dict", tag_list): return False logger.debug("End stage : %s", tags_dict) # construct substage name end_stagename = "{stage}.{substage}".format( stage=tags_dict['end_stage'], substage=tags_dict['end_substage'] ) # check if stage was started # and if substage name matches if not self.has_name() or self.stage.data["name"] != end_stagename: logger.info("Substage was not started or name doesn't match") self.finished_incomplete = True return False # stage finished successfully self.finished = True logger.info("Stage %s finished successfully", self.get_name()) return True
def process_start_stage(self, tags_dict): """ Process parsed start_stage tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags tag_list = list({'start_stage', 'start_substage'}) if not check_dict(tags_dict, "tags_dict", tag_list): return False logger.debug("Start stage : %s", tags_dict) result = False if self.has_started(): logger.info("Substage already started") else: name = "{stage:s}.{substage:s}".format( stage=tags_dict['start_stage'], substage=tags_dict['start_substage'] ) result = self.set_name(name) return result
def set_name(self, name): """Set stage name.""" if name is None: return False self.data["name"] = str(name) logger.info("Set name : %s", name) return True
def process_travis_buildlog(self, repo, build): """ Process Travis CI buildlog. Check parameters, load build data from Travis CI, process it and send to Keen.io for storage. """ repo = str(repo) build = str(build) try: result = check_process_parameters(repo, build) except Exception as msg: if self.request.called_directly: return msg else: # When checking if build exists fails, retry later # Keen.io API might be down raise self.retry() if result is not None: logger.warning(result) return result travis_data = TravisData(repo, build) data_detail = service.get_repo_data_detail(repo) # retrieve build data using Travis CI API message = "Retrieving build #%s data of %s from Travis CI" logger.info(message, build, repo) ret_msg = message % (cgi.escape(build), cgi.escape(repo)) if not travis_data.get_build_data(): if self.request.called_directly: ret_msg += "\nError retrieving build data." return ret_msg else: # retry if retrieving build data failed. raise self.retry() # process all build jobs and # send build job data to Keen.io for build_job in travis_data.process_build_jobs(): build_job_id = build_job.properties.get_items()["job"] message = "Send %s build job #%s data to Keen.io" logger.warning(message, repo, build_job_id) ret_msg += "\n" + message % \ (cgi.escape(repo), cgi.escape(build_job_id)) keenio.send_build_data_service(build_job, data_detail) # check if collection is empty if travis_data.build_jobs: message = "Successfully retrieved build #%s data of %s" \ " from Travis CI and sent to Keen.io" else: message = "No data found for build #%s of %s" logger.warning(message, build, repo) ret_msg += "\n" + message % (cgi.escape(build), cgi.escape(repo)) return ret_msg
def download_job_log(self, job_id): """ Retrieve Travis CI job log. Parameters: - job_id : ID of the job to process """ request = 'jobs/{}/log'.format(str(job_id)) logger.info("Request build job log #%s", str(job_id)) return self._handle_request(request)
def set_end_timestamp(self, timestamp): """ Set end timestamp. Parameters: - timestamp : end timestamp """ if isinstance(timestamp, (int, float)) and timestamp > 0: logger.info("Set end_timestamp : %f", timestamp) self.end_timestamp = timestamp
def generate_write_key(): """Create scoped key for write access to Keen.io database.""" if not has_master_key(): logger.warning("Keen.io Write Key was not created," " keen.master_key is not defined.") return None master_key = keen.master_key or os.environ.get("KEEN_MASTER_KEY") privileges = { "allowed_operations": ["write"] } logger.info("Keen.io Write Key is created") return scoped_keys.encrypt(master_key, privileges)
def add_event(event_collection, payload): """ Wrapper for keen.add_event(), adds project info. Param event_collection : collection event data is submitted to Param payload : data that is submitted """ # add project info to this event payload = add_project_info_dict(payload) # submit list of events to Keen.io keen.add_event(event_collection, payload) logger.info( "Sent single event to '%s' collection (Keen.io)", event_collection )
def add_events(event_collection, payload): """ Wrapper for keen.add_events(), adds project info to each event. Param event_collection : collection event data is submitted to Param payload : array of events that is submitted """ # add project info to each event payload = add_project_info_list(payload) # submit list of events to Keen.io keen.add_events({event_collection: payload}) logger.info( "Sent multiple events to '%s' collection (Keen.io)", event_collection )
def _handle_request(self, request, params=None): """ Retrieve Travis CI data using API. Parameters: - request : request to be sent to API - params : HTTP request parameters """ request_url = self.api_url + request request_params = self.request_params.copy() if params is not None and check_dict(params, "params"): request_params.update(params) req = Request(request_url, None, request_params) opener = build_opener() logger.info("Request from Travis CI API : %s", request_url) return opener.open(req)
def check_authorization(repo, auth_header): """ Check if Travis CI notification has a correct Authorization header. This check is enabled if travis_account_token is defined in settings. More information on the Authorization header : http://docs.travis-ci.com/user/notifications/#Authorization-for-Webhooks Returns true if Authorization header is valid, but also if travis_account_token is not defined. Parameters: - repo : git repo name - auth_header : Travis CI notification Authorization header """ # get Travis account token from Settings token = Settings().get_setting("travis_account_token") # return True if token is not set if token is None: logger.info("Setting travis_account_token is not defined," " Travis CI notification Authorization header" " is not checked.") return True # check if parameters are strings if is_string(repo) and is_string(auth_header) and is_string(token): # generate hash (encode string to bytes first) auth_hash = sha256((repo + token).encode('utf-8')).hexdigest() # compare hash with Authorization header if auth_hash == auth_header: logger.info("Travis CI notification Authorization header" " is correct.") return True else: logger.error("Travis CI notification Authorization header" " is incorrect.") return False else: logger.debug("repo, auth_header and travis_auth_token" " should be strings.") return False
def process_end_time(self, tags_dict): """ Process parsed end_time tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags tag_list = list({ 'end_hash', 'start_timestamp', 'finish_timestamp', 'duration' }) if not check_dict(tags_dict, "tags_dict", tag_list): return False logger.debug("End time : %s", tags_dict) result = False # check if timing was started # and if hash matches if (not self.has_timing_hash() or self.timing_hash != tags_dict['end_hash']): logger.info("Substage timing was not started or" " hash doesn't match") self.finished_incomplete = True else: set_started = set_finished = set_duration = False # Set started timestamp if self.stage.set_started_at_nano(tags_dict['start_timestamp']): logger.info("Stage started at %s", self.stage.data["started_at"]["isotimestamp"]) set_started = True # Set finished timestamp if self.stage.set_finished_at_nano(tags_dict['finish_timestamp']): logger.info("Stage finished at %s", self.stage.data["finished_at"]["isotimestamp"]) set_finished = True # Set duration if self.stage.set_duration_nano(tags_dict['duration']): logger.info("Stage duration : %ss", self.stage.data['duration']) set_duration = True result = set_started and set_finished and set_duration return result
def _handle_request(self, request, params=None): """ Retrieve Travis CI data using API. Parameters: - request : request to be sent to API - params : HTTP request parameters """ request_url = self.api_url + request request_params = self.request_params.copy() if params is not None and check_dict(params, "params"): request_params.update(params) req = Request( request_url, None, request_params ) opener = build_opener() logger.info("Request from Travis CI API : %s", request_url) return opener.open(req)
def process_start_time(self, tags_dict): """ Process parsed start_time tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags if not check_dict(tags_dict, "tags_dict", 'start_hash'): return False logger.debug("Start time : %s", tags_dict) if self.has_timing_hash(): logger.info("Substage timing already set") return False self.timing_hash = tags_dict['start_hash'] logger.info("Set timing hash : %s", self.timing_hash) return True
def process_end_time(self, tags_dict): """ Process parsed end_time tags. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags tag_list = list( {'end_hash', 'start_timestamp', 'finish_timestamp', 'duration'}) if not check_dict(tags_dict, "tags_dict", tag_list): return False logger.debug("End time : %s", tags_dict) result = False # check if timing was started # and if hash matches if (not self.has_timing_hash() or self.timing_hash != tags_dict['end_hash']): logger.info("Substage timing was not started or" " hash doesn't match") self.finished_incomplete = True else: set_started = set_finished = set_duration = False # Set started timestamp if self.stage.set_started_at_nano(tags_dict['start_timestamp']): logger.info("Stage started at %s", self.stage.data["started_at"]["isotimestamp"]) set_started = True # Set finished timestamp if self.stage.set_finished_at_nano(tags_dict['finish_timestamp']): logger.info("Stage finished at %s", self.stage.data["finished_at"]["isotimestamp"]) set_finished = True # Set duration if self.stage.set_duration_nano(tags_dict['duration']): logger.info("Stage duration : %ss", self.stage.data['duration']) set_duration = True result = set_started and set_finished and set_duration return result
def generate_read_key(repo): """ Create scoped key for reading only the build-stages related data. Param repo : github repository slug (fe. buildtimetrend/python-lib) """ if not has_master_key(): logger.warning("Keen.io Read Key was not created," " keen.master_key is not defined.") return None master_key = keen.master_key or os.environ.get("KEEN_MASTER_KEY") privileges = { "allowed_operations": ["read"] } if repo is not None: privileges["filters"] = [get_repo_filter(repo)] logger.info("Keen.io Read Key is created for %s", repo) return scoped_keys.encrypt(master_key, privileges)
def process_command(self, tags_dict): """ Process parsed command tag. Parameters: - tags_dict : dictionary with parsed tags """ # check if parameter tags_dict is a dictionary and # if it contains all required tags if not check_dict(tags_dict, "tags_dict", 'command'): return False logger.debug("Command : %s", tags_dict) result = False if self.has_command(): logger.info("Command is already set") elif self.stage.set_command(tags_dict['command']): logger.info("Set command : %s", tags_dict['command']) result = True return result
def modify_index(file_original, file_modified): """ Modify html file for Buildtime Trend as a Service. Adjust paths to 'assets' : the relative path is changed to an absolute path. Parameters: - file_original : Path of the original file - file_modified : Path of the modified file hosted on the service """ if not file_is_newer(file_modified, file_original): with open(file_original, 'r') as infile, \ open(file_modified, 'w') as outfile: for line in infile: line = line.replace("assets", ASSETS_URL) outfile.write(line) if check_file(file_modified): logger.info("Created index service file : %s", file_modified) return True else: return False
def process_notification_payload(payload): """ Extract repo slug and build number from Travis notification payload. Returns a dictionary with "repo" and "build" information, or an empty dictionary if the payload could not be processed. Deprecated behaviour : Currently the repo and build information are also stored in the "settings" object, but this will be removed in the near future. Parameters: - payload : Travis CI notification payload """ settings = Settings() parameters = {} if payload is None: logger.warning("Travis notification payload is not set") return parameters if not is_string(payload): logger.warning( "Travis notification payload is incorrect :" " string expected, got %s", type(payload)) return parameters json_payload = json.loads(payload) logger.info("Travis Payload : %r.", json_payload) # get repo name from payload if ("repository" in json_payload and "owner_name" in json_payload["repository"] and "name" in json_payload["repository"]): repo = get_repo_slug(json_payload["repository"]["owner_name"], json_payload["repository"]["name"]) logger.info("Build repo : %s", repo) settings.set_project_name(repo) parameters["repo"] = repo # get build number from payload if "number" in json_payload: logger.info("Build number : %s", str(json_payload["number"])) settings.add_setting('build', json_payload['number']) parameters["build"] = json_payload['number'] return parameters