def load_bugs(self, filename): # Bug file format: # $(module)/$(checkname) $(bugnumber) # Sample bug file: # evolution/LibGnomeCanvas 571742 # # Alternatively, the $(checkname) can be replaced by a column number, # like: evolution/col:2 543234 # # also, if there is only a single check, the /$(checkname) part # can be skipped. self.bugs = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org' ) and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception as e: logging.warning('could not download %s: %s' % (filename, e)) return for line in file(filename): line = line.strip() if not line: continue if line.startswith('#'): continue part, bugnumber = line.split() if '/' in part: module_name, check = part.split('/') if check.startswith('col:'): check = self.checks[int(check[4:]) - 1].__name__ elif len(self.checks) == 1: module_name = part check = self.checks[0].__name__ else: continue self.bugs[(module_name, check)] = bugnumber self.bug_status = {} bug_status = httpcache.load( 'http://bugzilla.gnome.org/show_bug.cgi?%s&' 'ctype=xml&field=bug_id&field=bug_status&field=emblems&' 'field=resolution' % '&'.join(['id=' + x for x in self.bugs.values() if x.isdigit()]), age=0) tree = ET.parse(bug_status) for bug in tree.findall('bug'): bug_id = bug.find('bug_id').text bug_resolved = (bug.find('resolution') is not None) bug_has_patch = (bug.find('emblems') is not None and 'P' in bug.find('emblems').text) self.bug_status[bug_id] = { 'resolution': bug_resolved, 'patch': bug_has_patch, }
def _do_patches(self, buildscript): # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlparse.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load( patch, nonetwork=buildscript.config.nonetwork) except urllib2.HTTPError as e: raise BuildStateError( _('could not download patch (error: %s)') % e.code) except urllib2.URLError as e: raise BuildStateError(_('could not download patch')) elif self.repository.moduleset_uri: # get it relative to the moduleset uri, either in the same # directory or a patches/ subdirectory for patch_prefix in ('.', 'patches', '../patches'): uri = urlparse.urljoin(self.repository.moduleset_uri, os.path.join(patch_prefix, patch)) try: patchfile = httpcache.load( uri, nonetwork=buildscript.config.nonetwork) except Exception as e: continue if not os.path.isfile(patchfile): continue break else: patchfile = '' if not patchfile: # nothing else, use jhbuild provided patches possible_locations = [] if self.config.modulesets_dir: possible_locations.append( os.path.join(self.config.modulesets_dir, 'patches')) possible_locations.append( os.path.join(self.config.modulesets_dir, '../patches')) if PKGDATADIR: possible_locations.append( os.path.join(PKGDATADIR, 'patches')) if SRCDIR: possible_locations.append(os.path.join(SRCDIR, 'patches')) for dirname in possible_locations: patchfile = os.path.join(dirname, patch) if os.path.exists(patchfile): break else: raise CommandError(_('Failed to find patch: %s') % patch) buildscript.set_action(_('Applying patch'), self, action_target=patch) # patchfile can be a relative file buildscript.execute('patch -p%d < "%s"' % (patchstrip, os.path.abspath(patchfile)), cwd=self.raw_srcdir)
def _get_patch_files(self, buildscript): patch_files = [] # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlutils.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load( patch, nonetwork=buildscript.config.nonetwork) except urlutils.HTTPError as e: raise BuildStateError( _('could not download patch (error: %s)') % e.code) except urlutils.URLError: raise BuildStateError(_('could not download patch')) elif self.repository.moduleset_uri: # get it relative to the moduleset uri, either in the same # directory or a patches/ subdirectory for patch_prefix in ('.', 'patches', '../patches'): uri = urlutils.urljoin(self.repository.moduleset_uri, os.path.join(patch_prefix, patch)) try: patchfile = httpcache.load( uri, nonetwork=buildscript.config.nonetwork) except Exception: continue if not os.path.isfile(patchfile): continue break else: patchfile = '' if not patchfile: # nothing else, use jhbuild provided patches possible_locations = [] if self.config.modulesets_dir: possible_locations.append( os.path.join(self.config.modulesets_dir, 'patches')) possible_locations.append( os.path.join(self.config.modulesets_dir, '../patches')) if PKGDATADIR: possible_locations.append( os.path.join(PKGDATADIR, 'patches')) if SRCDIR: possible_locations.append(os.path.join(SRCDIR, 'patches')) for dirname in possible_locations: patchfile = os.path.join(dirname, patch) if os.path.exists(patchfile): break else: raise CommandError(_('Failed to find patch: %s') % patch) patch_files.append((patchfile, patch, patchstrip)) return patch_files
def load_bugs(self, filename): # Bug file format: # $(module)/$(checkname) $(bugnumber) # Sample bug file: # evolution/LibGnomeCanvas 571742 # # Alternatively, the $(checkname) can be replaced by a column number, # like: evolution/col:2 543234 # # also, if there is only a single check, the /$(checkname) part # can be skipped. self.bugs = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org') and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception as e: logging.warning('could not download %s: %s' % (filename, e)) return for line in file(filename): line = line.strip() if not line: continue if line.startswith('#'): continue part, bugnumber = line.split() if '/' in part: module_name, check = part.split('/') if check.startswith('col:'): check = self.checks[int(check[4:])-1].__name__ elif len(self.checks) == 1: module_name = part check = self.checks[0].__name__ else: continue self.bugs[(module_name, check)] = bugnumber self.bug_status = {} bug_status = httpcache.load( 'http://bugzilla.gnome.org/show_bug.cgi?%s&' 'ctype=xml&field=bug_id&field=bug_status&field=emblems&' 'field=resolution' % '&'.join(['id=' + x for x in self.bugs.values() if x.isdigit()]), age=0) tree = ET.parse(bug_status) for bug in tree.findall('bug'): bug_id = bug.find('bug_id').text bug_resolved = (bug.find('resolution') is not None) bug_has_patch = (bug.find('emblems') is not None and 'P' in bug.find('emblems').text) self.bug_status[bug_id] = { 'resolution': bug_resolved, 'patch': bug_has_patch, }
def _do_patches(self, buildscript): # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlparse.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load(patch, nonetwork=buildscript.config.nonetwork) except urllib2.HTTPError as e: raise BuildStateError(_('could not download patch (error: %s)') % e.code) except urllib2.URLError as e: raise BuildStateError(_('could not download patch')) elif self.repository.moduleset_uri: # get it relative to the moduleset uri, either in the same # directory or a patches/ subdirectory for patch_prefix in ('.', 'patches', '../patches'): uri = urlparse.urljoin(self.repository.moduleset_uri, os.path.join(patch_prefix, patch)) try: patchfile = httpcache.load(uri, nonetwork=buildscript.config.nonetwork) except Exception as e: continue if not os.path.isfile(patchfile): continue break else: patchfile = '' if not patchfile: # nothing else, use jhbuild provided patches possible_locations = [] if self.config.modulesets_dir: possible_locations.append(os.path.join(self.config.modulesets_dir, 'patches')) possible_locations.append(os.path.join(self.config.modulesets_dir, '../patches')) if PKGDATADIR: possible_locations.append(os.path.join(PKGDATADIR, 'patches')) if SRCDIR: possible_locations.append(os.path.join(SRCDIR, 'patches')) for dirname in possible_locations: patchfile = os.path.join(dirname, patch) if os.path.exists(patchfile): break else: raise CommandError(_('Failed to find patch: %s') % patch) buildscript.set_action(_('Applying patch'), self, action_target=patch) # patchfile can be a relative file buildscript.execute('patch -p%d < "%s"' % (patchstrip, os.path.abspath(patchfile)), cwd=self.raw_srcdir)
def load_false_positives(self, filename): self.false_positives = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org' ) and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception as e: logging.warning('could not download %s: %s' % (filename, e)) return for line in file(filename): line = line.strip() if not line: continue if line.startswith('#'): continue if ' ' in line: part, extra = line.split(' ', 1) else: part, extra = line, '-' if '/' in part: module_name, check = part.split('/') if check.startswith('col:'): check = self.checks[int(check[4:]) - 1].__name__ elif len(self.checks) == 1: module_name = part check = self.checks[0].__name__ else: continue self.false_positives[(module_name, check)] = extra
def load_false_positives(self, filename): self.false_positives = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org') and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception as e: logging.warning('could not download %s: %s' % (filename, e)) return for line in file(filename): line = line.strip() if not line: continue if line.startswith('#'): continue if ' ' in line: part, extra = line.split(' ', 1) else: part, extra = line, '-' if '/' in part: module_name, check = part.split('/') if check.startswith('col:'): check = self.checks[int(check[4:])-1].__name__ elif len(self.checks) == 1: module_name = part check = self.checks[0].__name__ else: continue self.false_positives[(module_name, check)] = extra
def install_mingw_w32(self, config, buildscript): buildscript.set_action(_("Downloading MinGW W32"), self) path = load(MINGW_W32_i686_LINUX) mingw_root = self.mingw_root(os.path.join(config.prefix, '..'), 'w32') buildscript.set_action(_("Extracting %s") % os.path.basename(path), self) unpack_tar_file(path, mingw_root) buildscript.set_action(_("Fixing lib paths"), self) self.fix_lib_paths(buildscript, W32_i686_LINUX_SYSROOT, os.path.join(mingw_root, 'i686-w64-mingw32', 'lib'), '%s/i686-w64-mingw32/lib32' % mingw_root)
def _do_patches(self, buildscript): # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlparse.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load(patch, nonetwork=buildscript.config.nonetwork) except urllib2.HTTPError, e: raise BuildStateError(_('could not download patch (error: %s)') % e.code) except urllib2.URLError, e: raise BuildStateError(_('could not download patch'))
def load_false_positives(self, filename): self.false_positives = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org') and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception, e: logging.warning('could not download %s: %s' % (filename, e)) return
def load_false_positives(self, filename): self.false_positives = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org' ) and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception, e: logging.warning('could not download %s: %s' % (filename, e)) return
def _do_patches(self, buildscript): # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlparse.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load( patch, nonetwork=buildscript.config.nonetwork) except urllib2.HTTPError, e: raise BuildStateError( _('could not download patch (error: %s)') % e.code) except urllib2.URLError, e: raise BuildStateError(_('could not download patch'))
def load_bugs(self, filename): # Bug file format: # $(module)/$(checkname) $(bugnumber) # Sample bug file: # evolution/LibGnomeCanvas 571742 # # Alternatively, the $(checkname) can be replaced by a column number, # like: evolution/col:2 543234 # # also, if there is only a single check, the /$(checkname) part # can be skipped. self.bugs = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org') and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception, e: logging.warning('could not download %s: %s' % (filename, e)) return
def load_bugs(self, filename): # Bug file format: # $(module)/$(checkname) $(bugnumber) # Sample bug file: # evolution/LibGnomeCanvas 571742 # # Alternatively, the $(checkname) can be replaced by a column number, # like: evolution/col:2 543234 # # also, if there is only a single check, the /$(checkname) part # can be skipped. self.bugs = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org' ) and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception, e: logging.warning('could not download %s: %s' % (filename, e)) return
def _parse_module_set(config, uri): try: filename = httpcache.load(uri, nonetwork=config.nonetwork, age=0) except Exception, e: raise FatalError(_('could not download %s: %s') % (uri, e))
class TarballBranch(Branch): """A class representing a Tarball.""" def __init__(self, repository, module, version, checkoutdir, source_size, source_hash, branch_id, source_subdir=None, expect_standard_tarball=True): Branch.__init__(self, repository, module, checkoutdir) self.version = version self.source_size = source_size self.source_hash = source_hash self.patches = [] self.quilt = None self.branch_id = branch_id self.source_subdir = source_subdir self.expect_standard_tarball = expect_standard_tarball def _local_tarball(self): basename = os.path.basename(self.module) if not basename: raise FatalError( _('URL has no filename component: %s') % self.module) localfile = os.path.join(self.config.tarballdir, basename) return localfile _local_tarball = property(_local_tarball) def raw_srcdir(self): if self.checkoutdir: return os.path.join(self.checkoutroot, self.checkoutdir) localdir = os.path.join(self.checkoutroot, os.path.basename(self.module)) # strip off packaging extension ... if localdir.endswith('.tar.gz'): localdir = localdir[:-7] elif localdir.endswith('.tar.bz2'): localdir = localdir[:-8] elif localdir.endswith('.tar.lzma'): localdir = localdir[:-9] elif localdir.endswith('.tar.xz'): localdir = localdir[:-7] elif localdir.endswith('.tgz'): localdir = localdir[:-4] elif localdir.endswith('.zip'): localdir = localdir[:-4] if localdir.endswith('.src'): localdir = localdir[:-4] return localdir raw_srcdir = property(raw_srcdir) def srcdir(self): if self.source_subdir: return os.path.join(self.raw_srcdir, self.source_subdir) return self.raw_srcdir srcdir = property(srcdir) def branchname(self): return self.version branchname = property(branchname) def _check_tarball(self): """Check whether the tarball has been downloaded correctly.""" localfile = self._local_tarball if not os.path.exists(localfile): raise BuildStateError(_('file not downloaded')) if self.source_size is not None: local_size = os.stat(localfile).st_size if local_size != self.source_size: raise BuildStateError( _('downloaded file size is incorrect (expected %(size1)d, got %(size2)d)' ) % { 'size1': self.source_size, 'size2': local_size }) if self.source_hash is not None: try: algo, hash = self.source_hash.split(':') except ValueError: logging.warning( _('invalid hash attribute on module %s') % self.module) return if hasattr(hashlib, algo): local_hash = getattr(hashlib, algo)() fp = open(localfile, 'rb') data = fp.read(32768) while data: local_hash.update(data) data = fp.read(32768) fp.close() if local_hash.hexdigest() != hash: raise BuildStateError( _('file hash is incorrect (expected %(sum1)s, got %(sum2)s)' ) % { 'sum1': hash, 'sum2': local_hash.hexdigest() }) else: logging.warning( _('skipped hash check (missing support for %s)') % algo) def _download_and_unpack(self, buildscript): localfile = self._local_tarball if not os.path.exists(self.config.tarballdir): try: os.makedirs(self.config.tarballdir) except OSError: raise FatalError( _('tarball dir (%s) can not be created') % self.config.tarballdir) if not os.access(self.config.tarballdir, os.R_OK | os.W_OK | os.X_OK): raise FatalError( _('tarball dir (%s) must be writable') % self.config.tarballdir) try: self._check_tarball() except BuildStateError: # don't have the tarball, try downloading it and check again if has_command('wget'): res = buildscript.execute( ['wget', '--continue', self.module, '-O', localfile], extra_env={ 'LD_LIBRARY_PATH': os.environ.get('UNMANGLED_LD_LIBRARY_PATH'), 'PATH': os.environ.get('UNMANGLED_PATH') }) elif has_command('curl'): res = buildscript.execute( [ 'curl', '--continue-at', '-', '-L', self.module, '-o', localfile ], extra_env={ 'LD_LIBRARY_PATH': os.environ.get('UNMANGLED_LD_LIBRARY_PATH'), 'PATH': os.environ.get('UNMANGLED_PATH') }) else: raise FatalError(_("unable to find wget or curl")) self._check_tarball() # now to unpack it. unpack_dir = self.checkoutroot if not os.path.exists(unpack_dir): os.makedirs(unpack_dir) # now to unpack it try: unpack_archive(buildscript, localfile, unpack_dir, self.checkoutdir) except CommandError: raise FatalError(_('failed to unpack %s') % localfile) if self.expect_standard_tarball and not os.path.exists(self.srcdir): raise BuildStateError( _('could not unpack tarball (expected %s dir)') % os.path.basename(self.srcdir)) if self.patches: self._do_patches(buildscript) def _do_patches(self, buildscript): # now patch the working tree for (patch, patchstrip) in self.patches: patchfile = '' if urlparse.urlparse(patch)[0]: # patch name has scheme, get patch from network try: patchfile = httpcache.load( patch, nonetwork=buildscript.config.nonetwork) except urllib2.HTTPError, e: raise BuildStateError( _('could not download patch (error: %s)') % e.code) except urllib2.URLError, e: raise BuildStateError(_('could not download patch')) elif self.repository.moduleset_uri: # get it relative to the moduleset uri, either in the same # directory or a patches/ subdirectory for patch_prefix in ('.', 'patches', '../patches'): uri = urlparse.urljoin(self.repository.moduleset_uri, os.path.join(patch_prefix, patch)) try: patchfile = httpcache.load( uri, nonetwork=buildscript.config.nonetwork) except Exception, e: continue if not os.path.isfile(patchfile): continue break
class cmd_goalreport(Command): doc = _('Report GNOME modules status wrt various goals') name = 'goalreport' checks = None page_intro = None title = 'GNOME Goal Report' def __init__(self): Command.__init__(self, [ make_option('-o', '--output', metavar='FILE', action='store', dest='output', default=None), make_option('--bugs-file', metavar='BUGFILE', action='store', dest='bugfile', default=None), make_option('--false-positives-file', metavar='FILE', action='store', dest='falsepositivesfile', default=None), make_option('--devhelp-dirname', metavar='DIR', action='store', dest='devhelp_dirname', default=None), make_option('--cache', metavar='FILE', action='store', dest='cache', default=None), make_option('--all-modules', action='store_true', dest='list_all_modules', default=False), make_option('--check', metavar='CHECK', action='append', dest='checks', default=[], help=_('check to perform')), ]) def load_checks_from_options(self, checks): self.checks = [] for check_option in checks: check_class_name, args = check_option.split(':', 1) args = args.split(',') check_base_class = globals().get(check_class_name) check = check_base_class.create_from_args(*args) self.checks.append(check) def run(self, config, options, args, help=None): if options.output: output = StringIO() global curses if curses and config.progress_bar: try: curses.setupterm() except: curses = None else: output = sys.stdout if not self.checks: self.load_checks_from_options(options.checks) self.load_bugs(options.bugfile) self.load_false_positives(options.falsepositivesfile) config.devhelp_dirname = options.devhelp_dirname config.partial_build = False module_set = jhbuild.moduleset.load(config) if options.list_all_modules: self.module_list = module_set.modules.values() else: self.module_list = module_set.get_module_list( args or config.modules, config.skip) results = {} try: cachedir = os.path.join(os.environ['XDG_CACHE_HOME'], 'jhbuild') except KeyError: cachedir = os.path.join(os.environ['HOME'], '.cache', 'jhbuild') if options.cache: try: results = cPickle.load( file(os.path.join(cachedir, options.cache))) except: pass self.repeat_row_header = 0 if len(self.checks) > 4: self.repeat_row_header = 1 for module_num, mod in enumerate(self.module_list): if mod.type in ('meta', 'tarball'): continue if not mod.branch or not mod.branch.repository.__class__.__name__ in ( 'SubversionRepository', 'GitRepository'): if not mod.moduleset_name.startswith('gnome-external-deps'): continue if not os.path.exists(mod.branch.srcdir): continue tree_id = mod.branch.tree_id() valid_cache = (tree_id and results.get(mod.name, {}).get('tree-id') == tree_id) if not mod.name in results: results[mod.name] = {'results': {}} results[mod.name]['tree-id'] = tree_id r = results[mod.name]['results'] for check in self.checks: if valid_cache and check.__name__ in r: continue try: c = check(config, mod) except ExcludedModuleException: continue if output != sys.stdout and config.progress_bar: progress_percent = 1.0 * (module_num - 1) / len( self.module_list) msg = '%s: %s' % (mod.name, check.__name__) self.display_status_line(progress_percent, module_num, msg) try: c.run() except CouldNotPerformCheckException: continue except ExcludedModuleException: continue try: c.fix_false_positive( self.false_positives.get((mod.name, check.__name__))) except ExcludedModuleException: continue r[check.__name__] = [c.status, c.complexity, c.result_comment] if not os.path.exists(cachedir): os.makedirs(cachedir) if options.cache: cPickle.dump(results, file(os.path.join(cachedir, options.cache), 'w')) print >> output, HTML_AT_TOP % {'title': self.title} if self.page_intro: print >> output, self.page_intro print >> output, '<table>' print >> output, '<thead>' print >> output, '<tr><td></td>' for check in self.checks: print >> output, '<th>%s</th>' % check.__name__ print >> output, '<td></td></tr>' if [x for x in self.checks if x.header_note]: print >> output, '<tr><td></td>' for check in self.checks: print >> output, '<td>%s</td>' % (check.header_note or '') print >> output, '</tr>' print >> output, '</thead>' print >> output, '<tbody>' suites = [] for module_key, module in module_set.modules.items(): if not isinstance(module_set.get_module(module_key), MetaModule): continue if module_key.endswith('upcoming-deprecations'): # mark deprecated modules as processed, so they don't show in "Others" try: metamodule = module_set.get_module(meta_key) except KeyError: continue for module_name in metamodule.dependencies: processed_modules[module_name] = True else: suites.append([module_key, module_key.replace('meta-', '')]) processed_modules = {'gnome-common': True} not_other_module_names = [] for suite_key, suite_label in suites: metamodule = module_set.get_module(suite_key) module_names = [x for x in metamodule.dependencies if x in results] if not module_names: continue print >> output, '<tr><td class="heading" colspan="%d">%s</td></tr>' % ( 1 + len(self.checks) + self.repeat_row_header, suite_label) for module_name in module_names: if module_name in not_other_module_names: continue r = results[module_name].get('results') print >> output, self.get_mod_line(module_name, r) processed_modules[module_name] = True not_other_module_names.extend(module_names) external_deps = [x for x in results.keys() if \ x in [y.name for y in self.module_list] and \ not x in processed_modules and \ module_set.get_module(x).moduleset_name.startswith('gnome-external-deps')] if external_deps: print >> output, '<tr><td class="heading" colspan="%d">%s</td></tr>' % ( 1 + len(self.checks) + self.repeat_row_header, 'External Dependencies') for module_name in sorted(external_deps): if not module_name in results: continue r = results[module_name].get('results') try: version = module_set.get_module(module_name).branch.version except: version = None print >> output, self.get_mod_line(module_name, r, version_number=version) other_module_names = [x for x in results.keys() if \ not x in processed_modules and not x in external_deps] if other_module_names: print >> output, '<tr><td class="heading" colspan="%d">%s</td></tr>' % ( 1 + len(self.checks) + self.repeat_row_header, 'Others') for module_name in sorted(other_module_names): if not module_name in results: continue r = results[module_name].get('results') print >> output, self.get_mod_line(module_name, r) print >> output, '</tbody>' print >> output, '<tfoot>' print >> output, '<tr><td></td>' for check in self.checks: print >> output, '<th>%s</th>' % check.__name__ print >> output, '<td></td></tr>' print >> output, self.get_stat_line(results, not_other_module_names) print >> output, '</tfoot>' print >> output, '</table>' if (options.bugfile and options.bugfile.startswith('http://')) or \ (options.falsepositivesfile and options.falsepositivesfile.startswith('http://')): print >> output, '<div id="data">' print >> output, '<p>The following data sources are used:</p>' print >> output, '<ul>' if options.bugfile.startswith('http://'): print >> output, ' <li><a href="%s">Bugs</a></li>' % options.bugfile if options.falsepositivesfile.startswith('http://'): print >> output, ' <li><a href="%s">False positives</a></li>' % options.falsepositivesfile print >> output, '</ul>' print >> output, '</div>' print >> output, '<div id="footer">' print >> output, 'Generated:', time.strftime('%Y-%m-%d %H:%M:%S %z') print >> output, 'on ', socket.getfqdn() print >> output, '</div>' print >> output, '</body>' print >> output, '</html>' if output != sys.stdout: file(options.output, 'w').write(output.getvalue()) if output != sys.stdout and config.progress_bar: sys.stdout.write('\n') sys.stdout.flush() def get_mod_line(self, module_name, r, version_number=None): s = [] s.append('<tr>') if version_number: s.append('<th>%s (%s)</th>' % (module_name, version_number)) else: s.append('<th>%s</th>' % module_name) for check in self.checks: ri = r.get(check.__name__) if not ri: classname = 'n-a' label = 'n/a' comment = '' else: classname = ri[0] if classname == 'todo': classname += '-' + ri[1] label = ri[1] else: label = ri[0] comment = ri[2] or '' if label == 'ok': label = '' s.append('<td class="%s" title="%s">' % (classname, comment)) k = (module_name, check.__name__) if k in self.bugs: bug_classes = [] if self.bug_status.get(self.bugs[k], {}).get('resolution'): bug_classes.append('bug-closed') if label: bug_classes.append('warn-bug-status') if label and self.bug_status.get(self.bugs[k], {}).get('patch'): bug_classes.append('has-patch') bug_class = '' if bug_classes: bug_class = ' class="%s"' % ' '.join(bug_classes) if self.bugs[k].isdigit(): s.append( '<a href="http://bugzilla.gnome.org/show_bug.cgi?id=%s"%s>' % (self.bugs[k], bug_class)) else: s.append('<a href="%s"%s>' % (self.bugs[k], bug_class)) if label == '': label = 'done' s.append(label) if k in self.bugs: s.append('</a>') s.append('</td>') if self.repeat_row_header: s.append('<th>%s</th>' % module_name) s.append('</tr>') return '\n'.join(s) def get_stat_line(self, results, module_names): s = [] s.append('<tr>') s.append('<td>Stats<br/>(excluding "Others")</td>') for check in self.checks: s.append('<td>') for complexity in ('low', 'average', 'complex'): nb_modules = len([x for x in module_names if \ results[x].get('results') and results[x]['results'].get(check.__name__) and results[x]['results'][check.__name__][0] == 'todo' and results[x]['results'][check.__name__][1] == complexity]) s.append('%s: %s' % (complexity, nb_modules)) s.append('<br/>') nb_with_bugs = 0 nb_with_bugs_done = 0 for module_name in module_names: k = (module_name, check.__name__) if not k in self.bugs or not check.__name__ in results[ module_name]['results']: continue nb_with_bugs += 1 if results[module_name]['results'][check.__name__][0] == 'ok': nb_with_bugs_done += 1 if nb_with_bugs: s.append('<br/>') s.append('fixed: %d%%' % (100. * nb_with_bugs_done / nb_with_bugs)) s.append('</td>') s.append('<td></td>') s.append('</tr>') return '\n'.join(s) def load_bugs(self, filename): # Bug file format: # $(module)/$(checkname) $(bugnumber) # Sample bug file: # evolution/LibGnomeCanvas 571742 # # Alternatively, the $(checkname) can be replaced by a column number, # like: evolution/col:2 543234 # # also, if there is only a single check, the /$(checkname) part # can be skipped. self.bugs = {} if not filename: return if filename.startswith('http://'): if filename.startswith('http://live.gnome.org' ) and not filename.endswith('?action=raw'): filename += '?action=raw' try: filename = httpcache.load(filename, age=0) except Exception, e: logging.warning('could not download %s: %s' % (filename, e)) return for line in file(filename): line = line.strip() if not line: continue if line.startswith('#'): continue part, bugnumber = line.split() if '/' in part: module_name, check = part.split('/') if check.startswith('col:'): check = self.checks[int(check[4:]) - 1].__name__ elif len(self.checks) == 1: module_name = part check = self.checks[0].__name__ else: continue self.bugs[(module_name, check)] = bugnumber self.bug_status = {} bug_status = httpcache.load( 'http://bugzilla.gnome.org/show_bug.cgi?%s&' 'ctype=xml&field=bug_id&field=bug_status&field=emblems&' 'field=resolution' % '&'.join(['id=' + x for x in self.bugs.values() if x.isdigit()]), age=0) tree = ET.parse(bug_status) for bug in tree.findall('bug'): bug_id = bug.find('bug_id').text bug_resolved = (bug.find('resolution') is not None) bug_has_patch = (bug.find('emblems') is not None and 'P' in bug.find('emblems').text) self.bug_status[bug_id] = { 'resolution': bug_resolved, 'patch': bug_has_patch, }
def _parse_module_set(config, uri): try: filename = httpcache.load(uri, nonetwork=config.nonetwork, age=0) except Exception as e: raise FatalError(_('could not download %s: %s') % (uri, e)) filename = os.path.normpath(filename) try: document = xml.dom.minidom.parse(filename) except IOError as e: raise FatalError(_('failed to parse %s: %s') % (filename, e)) except xml.parsers.expat.ExpatError as e: raise FatalError(_('failed to parse %s: %s') % (uri, e)) assert document.documentElement.nodeName == 'moduleset' for node in _child_elements_matching(document.documentElement, ['redirect']): new_url = node.getAttribute('href') logging.info('moduleset is now located at %s', new_url) return _parse_module_set(config, new_url) _handle_conditions(config, document.documentElement) moduleset = ModuleSet(config = config) moduleset_name = document.documentElement.getAttribute('name') if not moduleset_name: moduleset_name = os.path.basename(uri) if moduleset_name.endswith('.modules'): moduleset_name = moduleset_name[:-len('.modules')] # load up list of repositories repositories = {} default_repo = None for node in _child_elements_matching( document.documentElement, ['repository', 'cvsroot', 'svnroot', 'arch-archive']): name = node.getAttribute('name') if node.getAttribute('default') == 'yes': default_repo = name if node.nodeName == 'repository': repo_type = node.getAttribute('type') repo_class = get_repo_type(repo_type) kws = {} for attr in repo_class.init_xml_attrs: if node.hasAttribute(attr): kws[attr.replace('-', '_')] = node.getAttribute(attr) if name in repositories: logging.warning(_('Duplicate repository:') + ' '+ name) repositories[name] = repo_class(config, name, **kws) repositories[name].moduleset_uri = uri mirrors = {} for mirror in _child_elements_matching(node, ['mirror']): mirror_type = mirror.getAttribute('type') mirror_class = get_repo_type(mirror_type) kws = {} for attr in mirror_class.init_xml_attrs: if mirror.hasAttribute(attr): kws[attr.replace('-','_')] = mirror.getAttribute(attr) mirrors[mirror_type] = mirror_class(config, name, **kws) # mirrors[mirror_type].moduleset_uri = uri setattr(repositories[name], "mirrors", mirrors) if node.nodeName == 'cvsroot': cvsroot = node.getAttribute('root') if node.hasAttribute('password'): password = node.getAttribute('password') else: password = None repo_type = get_repo_type('cvs') repositories[name] = repo_type(config, name, cvsroot=cvsroot, password=password) elif node.nodeName == 'svnroot': svnroot = node.getAttribute('href') repo_type = get_repo_type('svn') repositories[name] = repo_type(config, name, href=svnroot) elif node.nodeName == 'arch-archive': archive_uri = node.getAttribute('href') repo_type = get_repo_type('arch') repositories[name] = repo_type(config, name, archive=name, href=archive_uri) # and now module definitions for node in _child_elements(document.documentElement): if node.nodeName == 'include': href = node.getAttribute('href') inc_uri = urlutils.urljoin(uri, href) try: inc_moduleset = _parse_module_set(config, inc_uri) except UndefinedRepositoryError: raise except FatalError as e: if inc_uri[0] == '/': raise e # look up in local modulesets inc_uri = os.path.join(os.path.dirname(__file__), '..', 'modulesets', href) inc_moduleset = _parse_module_set(config, inc_uri) moduleset.modules.update(inc_moduleset.modules) elif node.nodeName in ['repository', 'cvsroot', 'svnroot', 'arch-archive']: pass else: module = modtypes.parse_xml_node(node, config, uri, repositories, default_repo) if moduleset_name: module.tags.append(moduleset_name) module.moduleset_name = moduleset_name moduleset.add(module) # keep default repository around, used when creating automatic modules global _default_repo if default_repo: _default_repo = repositories[default_repo] return moduleset
def _parse_module_set(config, uri): try: filename = httpcache.load(uri, nonetwork=config.nonetwork, age=0) except Exception as e: raise FatalError(_('could not download %s: %s') % (uri, e)) filename = os.path.normpath(filename) try: document = xml.dom.minidom.parse(filename) except IOError as e: raise FatalError(_('failed to parse %s: %s') % (filename, e)) except xml.parsers.expat.ExpatError as e: raise FatalError(_('failed to parse %s: %s') % (uri, e)) assert document.documentElement.nodeName == 'moduleset' for node in _child_elements_matching(document.documentElement, ['redirect']): new_url = node.getAttribute('href') logging.info('moduleset is now located at %s', new_url) return _parse_module_set(config, new_url) _handle_conditions(config, document.documentElement) moduleset = ModuleSet(config = config) moduleset_name = document.documentElement.getAttribute('name') if not moduleset_name: moduleset_name = os.path.basename(uri) if moduleset_name.endswith('.modules'): moduleset_name = moduleset_name[:-len('.modules')] # load up list of repositories repositories = {} default_repo = None for node in _child_elements_matching( document.documentElement, ['repository', 'cvsroot', 'svnroot', 'arch-archive']): name = node.getAttribute('name') if node.getAttribute('default') == 'yes': default_repo = name if node.nodeName == 'repository': repo_type = node.getAttribute('type') repo_class = get_repo_type(repo_type) kws = {} for attr in repo_class.init_xml_attrs: if node.hasAttribute(attr): kws[attr.replace('-', '_')] = node.getAttribute(attr) if name in repositories: logging.warning(_('Duplicate repository:') + ' '+ name) repositories[name] = repo_class(config, name, **kws) repositories[name].moduleset_uri = uri mirrors = {} for mirror in _child_elements_matching(node, ['mirror']): mirror_type = mirror.getAttribute('type') mirror_class = get_repo_type(mirror_type) kws = {} for attr in mirror_class.init_xml_attrs: if mirror.hasAttribute(attr): kws[attr.replace('-','_')] = mirror.getAttribute(attr) mirrors[mirror_type] = mirror_class(config, name, **kws) #mirrors[mirror_type].moduleset_uri = uri setattr(repositories[name], "mirrors", mirrors) if node.nodeName == 'cvsroot': cvsroot = node.getAttribute('root') if node.hasAttribute('password'): password = node.getAttribute('password') else: password = None repo_type = get_repo_type('cvs') repositories[name] = repo_type(config, name, cvsroot=cvsroot, password=password) elif node.nodeName == 'svnroot': svnroot = node.getAttribute('href') repo_type = get_repo_type('svn') repositories[name] = repo_type(config, name, href=svnroot) elif node.nodeName == 'arch-archive': archive_uri = node.getAttribute('href') repo_type = get_repo_type('arch') repositories[name] = repo_type(config, name, archive=name, href=archive_uri) # and now module definitions for node in _child_elements(document.documentElement): if node.nodeName == 'include': href = node.getAttribute('href') inc_uri = urlparse.urljoin(uri, href) try: inc_moduleset = _parse_module_set(config, inc_uri) except UndefinedRepositoryError: raise except FatalError as e: if inc_uri[0] == '/': raise e # look up in local modulesets inc_uri = os.path.join(os.path.dirname(__file__), '..', 'modulesets', href) inc_moduleset = _parse_module_set(config, inc_uri) moduleset.modules.update(inc_moduleset.modules) elif node.nodeName in ['repository', 'cvsroot', 'svnroot', 'arch-archive']: pass else: module = modtypes.parse_xml_node(node, config, uri, repositories, default_repo) if moduleset_name: module.tags.append(moduleset_name) module.moduleset_name = moduleset_name moduleset.add(module) # keep default repository around, used when creating automatic modules global _default_repo if default_repo: _default_repo = repositories[default_repo] return moduleset