def get_patchsets(self, patchlist): """ Retrieve a list of applicable series summaries for the specified list of patch IDs. Patches which names match one of skip patterns (self.skip) are excluded from the series. Args: patchlist: List of patch IDs to retrieve series summaries for, or skip over. Returns: A list of SeriesSummary objects. """ series_list = list() seen = set() logging.debug("get_patchsets: %s", patchlist) # For each patch ID for pid in patchlist: patch = self.get_patch_by_id(pid) if patch: # For each series the patch belongs to for series in patch.get("series"): sid = series.get("id") if sid not in seen: series_list += self.__get_series_from_url( join_with_slash(self.apiurls.get("series"), str(sid))) seen.add(sid) return series_list
def __get_patch_message(self, patch_id): """ Retrieve patch's mbox as email object. Args: patch_id: The ID of the patch which mbox should be retrieved. Returns: Email object created from the mbox file. Raises: requests.exceptions.RequestException (and subexceptions) in case of requests exceptions, Exception in case of unexpected return code (eg. nonexistent patch). """ mbox_url = join_with_slash(self.baseurl, 'patch', str(patch_id), self._get_mbox_url_sfx()) try: response = requests.get(mbox_url) except requests.exceptions.RequestException as exc: raise exc if response.status_code != requests.codes.ok: raise Exception('Failed to retrieve patch from %s, returned %d' % (mbox_url, response.status_code)) return email.message_from_string(response.content)
def __substitute_and_attach(self, text, directory, name_label=None): """ Substitute the placeholders for attachment filenames in the report text and add the references attachments to self.attachments. Args: text: Original text of the report. directory: Parent directory of the logs. name_label: Label to add to the filename attachment, to be able to differentiate between logs from different runs which have the same name. Defaults to None (eg. for merge reports). Returns: String reporesenting modified text. """ for to_attach in SUBSTITUTE_RE.findall(text): stripped_name = to_attach.strip('}{') attachment_path = join_with_slash(directory, stripped_name) if name_label: try: prefix, suffix = stripped_name.rsplit('.', 1) new_name = '{}_{}.{}'.format(prefix, name_label, suffix) except ValueError: new_name = '{}_{}'.format(stripped_name, name_label) else: new_name = stripped_name text = text.replace(to_attach, new_name) self.attachments.append(MailAttachment(new_name, attachment_path)) return text
def get_mbox_url(self): """ Get the URL pointing at the object's mbox. Returns: URL pointing at the object's mbox. """ return join_with_slash(self.url, self.mbox_sfx)
def __create_data(self, merge_report, result_set_list=[]): """ Format data from logs into a report, attach the body of the report (including the template header and footer) and populate self.attachments with any attachments specified. Args: merge_report: Absolute path to the merge report file. result_set_list: List of sets of reports, each set representing one run to report, defaults to []. Each set contains absolute paths to the .report and .result files of the test run, for all stages that were executed. """ full_report = '' test_summary = SUMMARY_PASS with open(merge_report, 'r') as merge_file: full_report = merge_file.read() merge_dir = os.path.dirname(merge_report) full_report = self.__substitute_and_attach(full_report, merge_dir) with open(join_with_slash(merge_dir, 'merge.result')) as merge_result: if merge_result.read().startswith('false'): test_summary = SUMMARY_MERGE_FAILURE stage_to_summary_map = { 'build': SUMMARY_BUILD_FAILURE, 'run': SUMMARY_TEST_FAILURE, 'console_check': SUMMARY_TRACE_FOUND } for index, test_run in enumerate(result_set_list): test_result_dir = os.path.dirname(next(iter(test_run))) # Keep the right order of reports for stage in ['build', 'run', 'console_check']: stage_summary, stage_report = self.__get_stage_report( stage, test_run, stage_to_summary_map[stage]) stage_report = self.__substitute_and_attach( stage_report, test_result_dir, index) if test_summary == SUMMARY_PASS: test_summary = stage_summary if stage == 'build': full_report += '\n' full_report += stage_report summary = self.__create_summary(test_summary) with open(self.report_intro, 'r') as report_intro_file: report_intro_text = report_intro_file.read() with open(self.report_footer, 'r') as report_footer_file: report_footer_text = report_footer_file.read() self.report.attach( MIMEText('\n'.join( [report_intro_text, summary, full_report, report_footer_text])))
def create_report(self): """ Build the report by merging all info found in the self.assets_dir. """ filename_list = os.listdir(self.assets_dir) if 'merge.result' not in filename_list: raise Exception('No merge results found in %s! Please check if the' ' provided directory is correct and the testing ' 'completed without errors.' % self.assets_dir) merge_report = join_with_slash(self.assets_dir, 'merge.report') result_set_list = [] if not next((filename for filename in filename_list if filename.startswith(('build', 'run'))), None): # Try to grab build / run files from subdirectories for filename in filename_list: if os.path.isdir(join_with_slash(self.assets_dir, filename)): result_set_list.append( self.__get_results( join_with_slash(self.assets_dir, filename))) result_set_list = [ result_set for result_set in result_set_list if result_set ] if not result_set_list: # Only merge stage ran logging.info('Reporting merge results.') self.__create_data(merge_report) for attachment in self.attachments: self.report.attach(attachment.data) return logging.info('Data from multiple runs expected, creating ' 'multireport.') self.__create_data(merge_report, result_set_list) else: logging.info('Creating single report from %s', self.assets_dir) result_set_list.append(self.__get_results(self.assets_dir)) self.__create_data(merge_report, result_set_list) for attachment in self.attachments: self.report.attach(attachment.data)
def _get_patch_url(self, patch): """ Build a Patchwork URL for passed patch object. Args: patch: Patch object, either Patchwork2's JSON object or Patchwork1's XMLRPC object. Returns: Patch URL. """ return join_with_slash(self.baseurl, 'patch', str(patch.get('id')))
def get_result_url(self, buildid): """ Get the URL of the web representation of the specified build of the specified Jenkins project. Args: jobname: Jenkins project name. buildid: Jenkins build ID. Result: The URL of the build result. """ return join_with_slash(self.__base_server_url(), "job", str(buildid))
def __get_apiurls(self, baseurl): """ Retrieve JSON representation of the list of API URLs supported by the Patchwork server. Returns: The JSON representation of the API URLs. """ response = requests.get(join_with_slash(baseurl, "api")) if response.status_code != 200: raise Exception("Can't get apiurls: %d" % response.status_code) return response.json()
def __get_patchsets_by_patch(self, url, seen=set()): """ Retrieve a list of series summaries, which weren't already "seen", and which contain the patch or patches available at the specified URL. Args: url: The URL pointing to a patch or a patch list to retrieve the list of patch series from. seen: A set of IDs of patch series which should be ignored, and which should have patch series IDs added once they're processed. Returns: A list of SeriesSummary objects. """ series_list = list() logging.debug("get_patchsets_by_patch %s", url) response = requests.get(url) if response.status_code != 200: raise Exception("Can't get series from url %s (%d)" % (url, response.status_code)) pdata = response.json() # If there is a single patch returned we get a dict, not a list with # a single element. Fix this inconsistency for easier processing. if not isinstance(pdata, list): pdata = [pdata] for patch in pdata: # For each patch series the patch belongs to for series in patch.get("series"): sid = series.get("id") if sid in seen: continue else: series_list += self.__get_series_from_url( join_with_slash(self.apiurls.get("series"), str(sid))) seen.add(sid) link = response.headers.get("Link") if link: match = re.match('<(.*)>; rel="next"', link) if match: next_url = match.group(1) # TODO Limit recursion series_list += self.__get_patchsets_by_patch(next_url, seen) return series_list
def _get_project_id(self, project_name): """ Retrieve project ID based on project's name. Args: project_name: The name of the project to retrieve. Returns: Integer representing project's ID. """ response = requests.get( join_with_slash(self.apiurls.get("projects"), project_name)) if response.status_code != requests.codes.ok: raise Exception("Can't get project data: %s %d" % (project_name, response.status_code)) return response.json().get('id')
def get_patch_by_id(self, pid): """ Retrieve a patch object by patch ID. Args: pid: ID of the patch to retrieve. Returns: Parsed JSON object representing the patch and its attributes. The set of supported attributes depends on which API versions are supported by a specific Patchwork instance. """ response = requests.get( join_with_slash(self.apiurls.get("patches"), str(pid))) if response.status_code != 200: raise Exception("Can't get patch by id %d (%d)" % (pid, response.status_code)) return response.json()
def __get_results(self, dir_path): """ Retrieve a set of results for a single run from specified directory. Args: dir_path: Absolute path to the directory result files are supposed to be in. Returns: A set of absolute file paths of results retrieved from the directory. """ results = set([ join_with_slash(dir_path, filename) for filename in os.listdir(dir_path) if filename.endswith(('.result', '.report')) ]) logging.debug('Results retrieved from %s: %s', dir_path, results) return results
def __get_rpc(self, baseurl): """ Create an XML RPC interface for a Patchwork base URL and initialize compatibility information. Args: baseurl: Patchwork base URL to create the interface with. Returns: The XML RPC interface for the Patchwork """ rpc = xmlrpclib.ServerProxy(join_with_slash(baseurl, "xmlrpc/")) try: ver = rpc.pw_rpc_version() # check for normal patchwork1 xmlrpc version numbers if not (ver == [1, 3, 0] or ver == 1): raise Exception("Unknown xmlrpc version %s", ver) except xmlrpclib.Fault as err: if err.faultCode == 1 and \ re.search("index out of range", err.faultString): # possible internal RH instance rpc = RpcWrapper(rpc) ver = rpc.pw_rpc_version() if ver < 1010: raise Exception("Unsupported xmlrpc version %s", ver) # grab extra info for later parsing self.fields = [ 'id', 'name', 'submitter', 'msgid', ['root_comment', ['headers']], 'date', 'project_id' ] else: raise Exception("Unknown xmlrpc fault: %s", err.faultString) return rpc