Exemplo n.º 1
0
class Project(object):
    def __init__(self, name):
        self.name = name
        Config(apiurl, name)
        self.api = StagingAPI(apiurl, name)
        self.staging_projects = dict()
        self.listener = None
        self.logger = logging.getLogger(__name__)
        self.replace_string = self.api.attribute_value_load('OpenQAMapping')

    def init(self):
        for p in self.api.get_staging_projects():
            if self.api.is_adi_project(p):
                continue
            self.staging_projects[p] = self.initial_staging_state(p)
            self.update_staging_status(p)

    def staging_letter(self, name):
        return name.split(':')[-1]

    def map_iso(self, staging_project, iso):
        parts = self.replace_string.split('/')
        if parts[0] != 's':
            raise Exception("{}'s iso_replace_string does not start with s/".format(self.name))
        old = parts[1]
        new = parts[2]
        new = new.replace('$LETTER', self.staging_letter(staging_project))
        return re.compile(old).sub(new, iso)

    def gather_isos(self, name, repository):
        url = self.api.makeurl(['published', name, repository, 'iso'])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ret = []
        for entry in root.findall('entry'):
            if entry.get('name').endswith('iso'):
                ret.append(self.map_iso(name, entry.get('name')))
        return ret

    def gather_buildid(self, name, repository):
        url = self.api.makeurl(['published', name, repository], {'view': 'status'})
        f = self.api.retried_GET(url)
        id = ET.parse(f).getroot().find('buildid')
        if id is not None:
            return id.text

    def initial_staging_state(self, name):
        return {'isos': self.gather_isos(name, 'images'),
                'id': self.gather_buildid(name, 'images')}

    def fetch_openqa_jobs(self, staging, iso):
        buildid = self.staging_projects[staging].get('id')
        if not buildid:
            self.logger.info("I don't know the build id of " + staging)
            return
        # all openQA jobs are created at the same URL
        url = self.api.makeurl(['status_reports', 'published', staging, 'images', 'reports', buildid])
        openqa = self.listener.jobs_for_iso(iso)
        # collect job infos to pick names
        openqa_infos = dict()
        for job in openqa:
            print(staging, iso, job['id'], job['state'], job['result'],
                  job['settings']['MACHINE'], job['settings']['TEST'])
            openqa_infos[job['id']] = {'url': self.listener.test_url(job)}
            openqa_infos[job['id']]['state'] = self.map_openqa_result(job)
            openqa_infos[job['id']]['name'] = job['settings']['TEST']
            openqa_infos[job['id']]['machine'] = job['settings']['MACHINE']

        # make sure the names are unique
        taken_names = dict()
        for id in openqa_infos:
            name = openqa_infos[id]['name']
            if name in taken_names:
                openqa_infos[id]['name'] = openqa_infos[id]['name'] + "@" + openqa_infos[id]['machine']
                # the other id
                id = taken_names[name]
                openqa_infos[id]['name'] = openqa_infos[id]['name'] + "@" + openqa_infos[id]['machine']
            taken_names[name] = id

        for info in openqa_infos.values():
            xml = self.openqa_check_xml(info['url'], info['state'], 'openqa:' + info['name'])
            try:
                http_POST(url, data=xml)
            except HTTPError:
                self.logger.error('failed to post status to ' + url)

    def update_staging_status(self, project):
        for iso in self.staging_projects[project]['isos']:
            self.fetch_openqa_jobs(project, iso)

    def update_staging_buildid(self, project, repository, buildid):
        self.staging_projects[project]['id'] = buildid
        self.staging_projects[project]['isos'] = self.gather_isos(project, repository)
        self.update_staging_status(project)

    def check_published_repo(self, project, repository, buildid):
        if repository != 'images':
            return
        for p in self.staging_projects:
            if project == p:
                self.update_staging_buildid(project, repository, buildid)

    def matching_project(self, iso):
        for p in self.staging_projects:
            if iso in self.staging_projects[p]['isos']:
                return p

    def map_openqa_result(self, job):
        if job['result'] in ['passed', 'softfailed']:
            return 'success'
        if job['result'] == 'none':
            return 'pending'
        return 'failure'

    def openqa_job_change(self, iso):
        staging = self.matching_project(iso)
        if not staging:
            return
        # we fetch all openqa jobs so we can avoid long job names
        self.fetch_openqa_jobs(staging, iso)

    def openqa_check_xml(self, url, state, name):
        check = ET.Element('check')
        se = ET.SubElement(check, 'url')
        se.text = url
        se = ET.SubElement(check, 'state')
        se.text = state
        se = ET.SubElement(check, 'name')
        se.text = name
        return ET.tostring(check)
Exemplo n.º 2
0
class ToTestBase(object):
    """Base class to store the basic interface"""

    def __init__(self, project, dryrun = False):
        self.project = project
        self.dryrun = dryrun
        self.api = StagingAPI(osc.conf.config['apiurl'], project='openSUSE:%s' % project)
        self.known_failures = self.known_failures_from_dashboard(project)

    def openqa_group(self):
        return self.project

    def iso_prefix(self):
        return self.project

    def jobs_num(self):
        return 70

    def current_version(self):
        return self.release_version()

    def binaries_of_product(self, project, product):
        url = self.api.makeurl(['build', project, 'images', 'local', product])
        try:
            f = self.api.retried_GET(url)
        except urllib2.HTTPError:
            return []

        ret = []
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            ret.append(binary.get('filename'))

        return ret

    def get_current_snapshot(self):
        """Return the current snapshot in :ToTest"""

        # for now we hardcode all kind of things
        for binary in self.binaries_of_product('openSUSE:%s:ToTest' % self.project, '_product:openSUSE-cd-mini-%s' % self.arch()):
            result = re.match(r'openSUSE-%s-NET-.*-Snapshot(.*)-Media.iso' % self.iso_prefix(),
                              binary)
            if result:
                return result.group(1)

        return None


    def ftp_build_version(self, tree):
        for binary in self.binaries_of_product('openSUSE:%s' % self.project, tree):
            result = re.match(r'openSUSE.*Build(.*)-Media1.report', binary)

        if result:
            return result.group(1)
        raise Exception("can't find %s version" % self.project)

    def release_version(self):
       url = self.api.makeurl(['build', 'openSUSE:%s' % self.project, 'standard', self.arch(),
                               '_product:openSUSE-release'])
       f = self.api.retried_GET(url)
       root = ET.parse(f).getroot()
       for binary in root.findall('binary'):
           binary = binary.get('filename', '')
           result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
           if result:
              return result.group(1)

       raise Exception("can't find %s version" % self.project)

    def find_openqa_results(self, snapshot):
        """Return the openqa jobs of a given snapshot and filter out the
        cloned jobs

        """

        url = makeurl('https://openqa.opensuse.org', ['api', 'v1', 'jobs'], { 'group': self.openqa_group(), 'build': snapshot } )
        f = self.api.retried_GET(url)
        jobs = []
        for job in json.load(f)['jobs']:
            if job['clone_id'] or job['result'] == 'obsoleted':
                continue
            job['name'] = job['name'].replace(snapshot, '')
            jobs.append(job)
        return jobs

    def _result2str(self, result):
        if result == QA_INPROGRESS:
            return 'inprogress'
        elif result == QA_FAILED:
            return 'failed'
        else:
            return 'passed'

    def find_failed_module(self, testmodules):
        # print json.dumps(testmodules, sort_keys=True, indent=4)
        for module in testmodules:
            if module['result'] != 'failed':
                continue
            flags = module['flags']
            if 'fatal' in flags or 'important' in flags:
                return module['name']
                break
            logger.info('%s %s %s'%(module['name'], module['result'], module['flags']))

    def overall_result(self, snapshot):
        """Analyze the openQA jobs of a given snapshot Returns a QAResult"""

        if snapshot is None:
            return QA_FAILED

        jobs = self.find_openqa_results(snapshot)

        if len(jobs) < self.jobs_num():  # not yet scheduled
            logger.warning('we have only %s jobs' % len(jobs))
            return QA_INPROGRESS

        number_of_fails = 0
        in_progress = False
        machines = []
        for job in jobs:
            machines.append(job['settings']['MACHINE'])
            # print json.dumps(job, sort_keys=True, indent=4)
            if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted'):
                jobname = job['name'] + '@' + job['settings']['MACHINE']
                # Record machines we have tests for
                if jobname in self.known_failures:
                    self.known_failures.remove(jobname)
                    continue
                number_of_fails += 1
                # print json.dumps(job, sort_keys=True, indent=4), jobname
                failedmodule = self.find_failed_module(job['modules'])
                url = 'https://openqa.opensuse.org/tests/%s' % job['id']
                print (jobname, url, failedmodule, job['retry_avbl'])
                # if number_of_fails < 3: continue
            elif job['result'] == 'passed' or job['result'] == 'softfailed':
                continue
            elif job['result'] == 'none':
                if job['state'] != 'cancelled':
                    in_progress = True
            else:
                raise Exception(job['result'])

        if number_of_fails > 0:
            return QA_FAILED

        if in_progress:
            return QA_INPROGRESS

        machines = list(set(machines))
        for item in machines:
            for item2 in self.known_failures:
                if item2.split('@')[1] == item:
                    logger.info('now passing %s'%item2)
        return QA_PASSED

    def all_repos_done(self, project, codes=None):
        """Check the build result of the project and only return True if all
        repos of that project are either published or unpublished

        """

        # coolo's experience says that 'finished' won't be
        # sufficient here, so don't try to add it :-)
        codes = ['published', 'unpublished'] if not codes else codes

        url = self.api.makeurl(['build', project, '_result'], {'code': 'failed'})
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ready = True
        for repo in root.findall('result'):
            # ignore ports. 'factory' is used by arm for repos that are not
            # meant to use the totest manager.
            if repo.get('repository') in ('ports', 'factory', 'images_staging'):
                continue
            # ignore 32bit for now. We're only interesed in aarch64 here
            if repo.get('arch') in ('armv6l', 'armv7l'):
                continue
            if repo.get('dirty', '') == 'true':
                logger.info('%s %s %s -> %s'%(repo.get('project'), repo.get('repository'), repo.get('arch'), 'dirty'))
                ready = False
            if repo.get('code') not in codes:
                logger.info('%s %s %s -> %s'%(repo.get('project'), repo.get('repository'), repo.get('arch'), repo.get('code')))
                ready = False
        return ready

    def maxsize_for_package(self, package):
        if re.match(r'.*-mini-.*', package):
            return 737280000  # a CD needs to match

        if re.match(r'.*-dvd5-.*', package):
            return 4700372992  # a DVD needs to match

        if re.match(r'.*-image-livecd-x11.*', package):
            return 681574400  # not a full CD

        if re.match(r'.*-image-livecd.*', package):
            return 999999999  # a GB stick

        if re.match(r'.*-dvd9-dvd-.*', package):
            return 8539996159

        if package.startswith('_product:openSUSE-ftp-ftp-'):
            return None

        if package == '_product:openSUSE-Addon-NonOss-ftp-ftp-i586_x86_64':
            return None

        raise Exception('No maxsize for {}'.format(package))

    def package_ok(self, project, package, repository, arch):
        """Checks one package in a project and returns True if it's succeeded

        """

        query = {'package': package, 'repository': repository, 'arch': arch}

        url = self.api.makeurl(['build', project, '_result'], query)
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for repo in root.findall('result'):
            status = repo.find('status')
            if status.get('code') != 'succeeded':
                logger.info('%s %s %s %s -> %s'%(project, package, repository, arch, status.get('code')))
                return False

        maxsize = self.maxsize_for_package(package)
        if not maxsize:
            return True

        url = self.api.makeurl(['build', project, repository, arch, package])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            if not binary.get('filename', '').endswith('.iso'):
                continue
            isosize = int(binary.get('size', 0))
            if isosize > maxsize:
                logger.error('%s %s %s %s: %s'%(project, package, repository, arch, 'too large by %s bytes' % (isosize-maxsize)))
                return False

        return True

    def factory_snapshottable(self):
        """Check various conditions required for factory to be snapshotable

        """

        if not self.all_repos_done('openSUSE:%s' % self.project):
            return False

        for product in self.ftp_products + self.main_products:
            if not self.package_ok('openSUSE:%s' % self.project, product, 'images', 'local'):
                return False

            if len(self.livecd_products):

                if not self.all_repos_done('openSUSE:%s:Live' % self.project):
                    return False

                for arch in ['i586', 'x86_64' ]:
                    for product in self.livecd_products:
                        if not self.package_ok('openSUSE:%s:Live' % self.project, product, 'standard', arch):
                            return False

        return True

    def release_package(self, project, package, set_release=None):
        query = {'cmd': 'release'}

        if set_release:
            query['setrelease'] = set_release

        # FIXME: make configurable. openSUSE:Factory:ARM currently has multiple
        # repos with release targets, so obs needs to know which one to release
        if project == 'openSUSE:Factory:ARM':
            query['repository'] = 'images'

        baseurl = ['source', project, package]

        url = self.api.makeurl(baseurl, query=query)
        if self.dryrun:
            logger.info("release %s/%s (%s)"%(project, package, set_release))
        else:
            self.api.retried_POST(url)

    def update_totest(self, snapshot):
        logger.info('Updating snapshot %s' % snapshot)
        if not self.dryrun:
            self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project, flag='publish', state='disable')

        for product in self.ftp_products:
            self.release_package('openSUSE:%s' % self.project, product)

        for cd in self.livecd_products:
            self.release_package('openSUSE:%s:Live' % self.project, cd, set_release='Snapshot%s' % snapshot)

        for cd in self.main_products:
            self.release_package('openSUSE:%s' % self.project, cd, set_release='Snapshot%s' % snapshot)

    def publish_factory_totest(self):
        logger.info('Publish ToTest')
        if not self.dryrun:
            self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project, flag='publish', state='enable')

    def totest_is_publishing(self):
        """Find out if the publishing flag is set in totest's _meta"""

        url = self.api.makeurl(['source', 'openSUSE:%s:ToTest' % self.project, '_meta'])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        if not root.find('publish'):  # default true
            return True

        for flag in root.find('publish'):
            if flag.get('repository', None) or flag.get('arch', None):
                continue
            if flag.tag == 'enable':
                return True
        return False

    def totest(self):
        current_snapshot = self.get_current_snapshot()
        new_snapshot = self.current_version()

        current_result = self.overall_result(current_snapshot)
        current_qa_version = self.api.load_file_content("%s:Staging" % self.api.project, "dashboard", "version_totest")

        logger.info('current_snapshot %s: %s'%(current_snapshot, self._result2str(current_result)))
        logger.debug('new_snapshot %s', new_snapshot)
        logger.debug('current_qa_version %s', current_qa_version)

        snapshotable = self.factory_snapshottable()
        logger.debug("snapshotable: %s", snapshotable)
        can_release = (current_result != QA_INPROGRESS and snapshotable)

        # not overwriting
        if new_snapshot == current_snapshot:
            logger.debug("no change in snapshot version")
            can_release = False
        elif not self.all_repos_done('openSUSE:%s:ToTest' % self.project):
            logger.debug("not all repos done, can't release")
            # the repos have to be done, otherwise we better not touch them with a new release
            can_release = False

        can_publish = (current_result == QA_PASSED)

        # already published
        if self.totest_is_publishing():
            logger.debug("totest already publishing")
            can_publish = False

        if can_publish:
            if current_qa_version == current_snapshot:
                self.publish_factory_totest()
                self.write_version_to_dashboard("snapshot", current_snapshot)
                can_release = False  # we have to wait
            else:
                # We reached a very bad status: openQA testing is 'done', but not of the same version
                # currently in :ToTest. This can happen when 'releasing' the product failed
                raise Exception("Publishing stopped: tested version (%s) does not match :ToTest version (%s)" 
                    % (current_qa_version, current_snapshot))

        if can_release:
            self.update_totest(new_snapshot)
            self.write_version_to_dashboard("totest", new_snapshot)

    def release(self):
        new_snapshot = self.current_version()
        self.update_totest(new_snapshot)

    def known_failures_from_dashboard(self, project):
        known_failures = []
        if self.project == "Factory:PowerPC":
            project = "Factory"
        else:
            project = self.project

        url = self.api.makeurl(['source', 'openSUSE:%s:Staging' % project, 'dashboard', 'known_failures'])
        f = self.api.retried_GET(url)
        for line in f:
            if not line[0] == '#':
                known_failures.append(line.strip())
        return known_failures

    def write_version_to_dashboard(self, target, version):
        if not self.dryrun:
            url = self.api.makeurl(['source', 'openSUSE:%s:Staging' % self.project, 'dashboard', 'version_%s' % target])
            osc.core.http_PUT(url + '?comment=Update+version', data=version)
Exemplo n.º 3
0
class StagingWorkflow(object):
    def __init__(self, project=PROJECT):
        """
        Initialize the configuration
        """
        THIS_DIR = os.path.dirname(os.path.abspath(__file__))
        oscrc = os.path.join(THIS_DIR, 'test.oscrc')

        # set to None so we return the destructor early in case of exceptions
        self.api = None
        self.apiurl = APIURL
        self.project = project
        self.projects = {}
        self.requests = []
        self.groups = []
        self.users = []
        logging.basicConfig()

        # clear cache from other tests - otherwise the VCR is replayed depending
        # on test order, which can be harmful
        memoize_session_reset()

        osc.core.conf.get_config(override_conffile=oscrc,
                                 override_no_keyring=True,
                                 override_no_gnome_keyring=True)
        os.environ['OSC_CONFIG'] = oscrc

        if os.environ.get('OSC_DEBUG'):
            osc.core.conf.config['debug'] = 1

        CacheManager.test = True
        # disable caching, the TTLs break any reproduciblity
        Cache.CACHE_DIR = None
        Cache.PATTERNS = {}
        Cache.init()
        self.setup_remote_config()
        self.load_config()
        self.api = StagingAPI(APIURL, project)

    def load_config(self, project=None):
        if project is None:
            project = self.project
        self.config = Config(APIURL, project)

    def create_attribute_type(self, namespace, name, values=None):
        meta = """
        <namespace name='{}'>
            <modifiable_by user='******'/>
        </namespace>""".format(namespace)
        url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
        osc.core.http_PUT(url, data=meta)

        meta = "<definition name='{}' namespace='{}'><description/>".format(
            name, namespace)
        if values:
            meta += "<count>{}</count>".format(values)
        meta += "<modifiable_by role='maintainer'/></definition>"
        url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
        osc.core.http_PUT(url, data=meta)

    def setup_remote_config(self):
        self.create_target()
        self.create_attribute_type('OSRT', 'Config', 1)

        config = {
            'overridden-by-local': 'remote-nope',
            'staging-group': 'factory-staging',
            'remote-only': 'remote-indeed',
        }
        self.remote_config_set(config, replace_all=True)

    def remote_config_set(self, config, replace_all=False):
        if not replace_all:
            config_existing = Config.get(self.apiurl, self.project)
            config_existing.update(config)
            config = config_existing

        config_lines = []
        for key, value in config.items():
            config_lines.append(f'{key} = {value}')

        attribute_value_save(APIURL, self.project, 'Config',
                             '\n'.join(config_lines))

    def create_group(self, name, users=[]):

        meta = """
        <group>
          <title>{}</title>
        </group>
        """.format(name)

        if len(users):
            root = ET.fromstring(meta)
            persons = ET.SubElement(root, 'person')
            for user in users:
                ET.SubElement(persons, 'person', {'userid': user})
            meta = ET.tostring(root)

        if not name in self.groups:
            self.groups.append(name)
        url = osc.core.makeurl(APIURL, ['group', name])
        osc.core.http_PUT(url, data=meta)

    def create_user(self, name):
        if name in self.users: return
        meta = """
        <person>
          <login>{}</login>
          <email>{}@example.com</email>
          <state>confirmed</state>
        </person>
        """.format(name, name)
        self.users.append(name)
        url = osc.core.makeurl(APIURL, ['person', name])
        osc.core.http_PUT(url, data=meta)
        url = osc.core.makeurl(APIURL, ['person', name],
                               {'cmd': 'change_password'})
        osc.core.http_POST(url, data='opensuse')
        home_project = 'home:' + name
        self.projects[home_project] = Project(home_project, create=False)

    def create_target(self):
        if self.projects.get('target'): return
        self.create_user('staging-bot')
        self.create_group('factory-staging', users=['staging-bot'])
        p = Project(name=self.project,
                    reviewer={'groups': ['factory-staging']})
        self.projects['target'] = p
        self.projects[self.project] = p

        url = osc.core.makeurl(APIURL, ['staging', self.project, 'workflow'])
        data = "<workflow managers='factory-staging'/>"
        osc.core.http_POST(url, data=data)
        # creates A and B as well
        self.projects['staging:A'] = Project(self.project + ':Staging:A',
                                             create=False)
        self.projects['staging:B'] = Project(self.project + ':Staging:B',
                                             create=False)

    def setup_rings(self):
        self.create_target()
        self.projects['ring0'] = Project(name=self.project +
                                         ':Rings:0-Bootstrap')
        self.projects['ring1'] = Project(name=self.project +
                                         ':Rings:1-MinimalX')
        target_wine = Package(name='wine', project=self.projects['target'])
        target_wine.create_commit()
        self.create_link(target_wine, self.projects['ring1'])

    def create_package(self, project, package):
        project = self.create_project(project)
        return Package(name=package, project=project)

    def create_link(self, source_package, target_project, target_package=None):
        if not target_package:
            target_package = source_package.name
        target_package = Package(name=target_package, project=target_project)
        url = self.api.makeurl(
            ['source', target_project.name, target_package.name, '_link'])
        osc.core.http_PUT(url,
                          data='<link project="{}" package="{}"/>'.format(
                              source_package.project.name,
                              source_package.name))
        return target_package

    def create_project(self,
                       name,
                       reviewer={},
                       maintainer={},
                       project_links=[]):
        if isinstance(name, Project):
            return name
        if name in self.projects:
            return self.projects[name]
        self.projects[name] = Project(name,
                                      reviewer=reviewer,
                                      maintainer=maintainer,
                                      project_links=project_links)
        return self.projects[name]

    def submit_package(self, package=None, project=None):
        if not project:
            project = self.project
        request = Request(source_package=package, target_project=project)
        self.requests.append(request)
        return request

    def request_package_delete(self, package, project=None):
        if not project:
            project = package.project
        request = Request(target_package=package,
                          target_project=project,
                          type='delete')
        self.requests.append(request)
        return request

    def create_submit_request(self, project, package, text=None):
        project = self.create_project(project)
        package = Package(name=package, project=project)
        package.create_commit(text=text)
        return self.submit_package(package)

    def create_staging(self,
                       suffix,
                       freeze=False,
                       rings=None,
                       with_repo=False):
        staging_key = 'staging:{}'.format(suffix)
        # do not reattach if already present
        if not staging_key in self.projects:
            staging_name = self.project + ':Staging:' + suffix
            staging = Project(staging_name, create=False, with_repo=with_repo)
            url = osc.core.makeurl(
                APIURL, ['staging', self.project, 'staging_projects'])
            data = '<workflow><staging_project>{}</staging_project></workflow>'
            osc.core.http_POST(url, data=data.format(staging_name))
            self.projects[staging_key] = staging
        else:
            staging = self.projects[staging_key]

        project_links = []
        if rings == 0:
            project_links.append(self.project + ":Rings:0-Bootstrap")
        if rings == 1 or rings == 0:
            project_links.append(self.project + ":Rings:1-MinimalX")
        staging.update_meta(project_links=project_links,
                            maintainer={'groups': ['factory-staging']},
                            with_repo=with_repo)

        if freeze:
            FreezeCommand(self.api).perform(staging.name)

        return staging

    def __del__(self):
        if not self.api:
            return
        try:
            self.remove()
        except:
            # normally exceptions in destructors are ignored but a info
            # message is displayed. Make this a little more useful by
            # printing it into the capture log
            traceback.print_exc(None, sys.stdout)

    def remove(self):
        print('deleting staging workflow')
        for project in self.projects.values():
            project.remove()
        for request in self.requests:
            request.revoke()
        for group in self.groups:
            url = osc.core.makeurl(APIURL, ['group', group])
            try:
                osc.core.http_DELETE(url)
            except HTTPError:
                pass
        print('done')
        if hasattr(self.api, '_invalidate_all'):
            self.api._invalidate_all()
Exemplo n.º 4
0
class StagingWorkflow(ABC):
    """This abstract base class is intended to setup and manipulate the environment (projects,
    users, etc.) in the local OBS instance used to tests the release tools. Thus, the derivative
    classes make easy to setup scenarios similar to the ones used during the real (open)SUSE
    development.
    """
    def __init__(self, project=PROJECT):
        """Initializes the configuration

        Note this constructor calls :func:`create_target`, which implies several projects and users
        are created right away.

        :param project: default target project
        :type project: str
        """
        THIS_DIR = os.path.dirname(os.path.abspath(__file__))
        oscrc = os.path.join(THIS_DIR, 'test.oscrc')

        # set to None so we return the destructor early in case of exceptions
        self.api = None
        self.apiurl = APIURL
        self.project = project
        self.projects = {}
        self.requests = []
        self.groups = []
        self.users = []
        self.attr_types = {}
        logging.basicConfig()

        # clear cache from other tests - otherwise the VCR is replayed depending
        # on test order, which can be harmful
        memoize_session_reset()

        osc.core.conf.get_config(override_conffile=oscrc,
                                 override_no_keyring=True,
                                 override_no_gnome_keyring=True)
        os.environ['OSC_CONFIG'] = oscrc

        if os.environ.get('OSC_DEBUG'):
            osc.core.conf.config['debug'] = 1

        CacheManager.test = True
        # disable caching, the TTLs break any reproduciblity
        Cache.CACHE_DIR = None
        Cache.PATTERNS = {}
        Cache.init()
        # Note this implicitly calls create_target()
        self.setup_remote_config()
        self.load_config()
        self.api = StagingAPI(APIURL, project)

    @abstractmethod
    def initial_config(self):
        """Values to use to initialize the 'Config' attribute at :func:`setup_remote_config`"""
        pass

    @abstractmethod
    def staging_group_name(self):
        """Name of the group in charge of the staging workflow"""
        pass

    def load_config(self, project=None):
        """Loads the corresponding :class:`osclib.Config` object into the attribute ``config``

        Such an object represents the set of values stored on the attribute 'Config' of the
        target project. See :func:`remote_config_set`.

        :param project: target project name
        :type project: str
        """

        if project is None:
            project = self.project
        self.config = Config(APIURL, project)

    def create_attribute_type(self, namespace, name, values=None):
        """Creates a new attribute type in the OBS instance."""

        if namespace not in self.attr_types: self.attr_types[namespace] = []

        if name not in self.attr_types[namespace]:
            self.attr_types[namespace].append(name)

        meta = """
        <namespace name='{}'>
            <modifiable_by user='******'/>
        </namespace>""".format(namespace)
        url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
        osc.core.http_PUT(url, data=meta)

        meta = "<definition name='{}' namespace='{}'><description/>".format(
            name, namespace)
        if values:
            meta += "<count>{}</count>".format(values)
        meta += "<modifiable_by role='maintainer'/></definition>"
        url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
        osc.core.http_PUT(url, data=meta)

    def setup_remote_config(self):
        """Creates the attribute 'Config' for the target project, with proper initial content.

        See :func:`remote_config_set` for more information about that attribute.

        Note this calls :func:`create_target` to ensure the target project exists.
        """
        # First ensure the existence of both the target project and the 'Config' attribute type
        self.create_target()
        self.create_attribute_type('OSRT', 'Config', 1)

        self.remote_config_set(self.initial_config(), replace_all=True)

    def remote_config_set(self, config, replace_all=False):
        """Sets the values of the 'Config' attribute for the target project.

        That attribute stores a set of values that are useful to influence the behavior of several
        tools and bots in the context of the given project. For convenience, such a collection of
        values is usually accessed using a :class:`osclib.Config` object. See :func:`load_config`.

        :param config: values to write into the attribute
        :type config: dict[str, str]
        :param replace_all: whether the previous content of 'Config' should be cleared up
        :type replace_all: bool
        """

        if not replace_all:
            config_existing = Config.get(self.apiurl, self.project)
            config_existing.update(config)
            config = config_existing

        config_lines = []
        for key, value in config.items():
            config_lines.append(f'{key} = {value}')

        attribute_value_save(APIURL, self.project, 'Config',
                             '\n'.join(config_lines))

    def create_group(self, name, users=[]):
        """Creates a group and assigns users to it.

        If the group already exists then it just updates it users.

        :param name: name of group
        :type name: str
        :param users: list of users to be in group
        :type users: list(str)
        """
        meta = """
        <group>
          <title>{}</title>
        </group>
        """.format(name)

        if len(users):
            root = ET.fromstring(meta)
            persons = ET.SubElement(root, 'person')
            for user in users:
                ET.SubElement(persons, 'person', {'userid': user})
            meta = ET.tostring(root)

        if name not in self.groups:
            self.groups.append(name)
        url = osc.core.makeurl(APIURL, ['group', name])
        osc.core.http_PUT(url, data=meta)

    def create_user(self, name):
        """Creates a user and their home project.

        Do nothing if the user already exists.
        Password is always "opensuse".

        The home project is not really created in the OBS instance, but :func:`Project.update_meta`
        can be used to create it.

        :param name: name of the user
        :type name: str
        """
        if name in self.users: return
        meta = """
        <person>
          <login>{}</login>
          <email>{}@example.com</email>
          <state>confirmed</state>
        </person>
        """.format(name, name)
        self.users.append(name)
        url = osc.core.makeurl(APIURL, ['person', name])
        osc.core.http_PUT(url, data=meta)
        url = osc.core.makeurl(APIURL, ['person', name],
                               {'cmd': 'change_password'})
        osc.core.http_POST(url, data='opensuse')
        home_project = 'home:' + name
        self.projects[home_project] = Project(home_project, create=False)

    def create_target(self):
        """Creates the main project that represents the product being developed and, as such, is
        expected to be the target for requests. It also creates all the associated projects, users
        and groups involved in the development workflow.

        In the base implementation, that includes:

            - The target project (see :func:`create_target_project`)
            - A group of staging managers including the "staging-bot" user
              (see :func:`create_staging_users`)
            - A couple of staging projects for the target one
            - The ProductVersion attribute type, that is used by the staging tools

        After the execution, the target project is indexed in the projects dictionary twice,
        by its name and as 'target'.
        """
        if self.projects.get('target'): return

        self.create_target_project()
        self.create_staging_users()

        self.projects['staging:A'] = Project(self.project + ':Staging:A',
                                             create=False)
        self.projects['staging:B'] = Project(self.project + ':Staging:B',
                                             create=False)

        # The ProductVersion is required for some actions, like accepting a staging project
        self.create_attribute_type('OSRT', 'ProductVersion', 1)

    def create_package(self, project, package):
        project = self.create_project(project)
        return Package(name=package, project=project)

    def create_link(self, source_package, target_project, target_package=None):
        if not target_package:
            target_package = source_package.name
        target_package = Package(name=target_package, project=target_project)
        url = self.api.makeurl(
            ['source', target_project.name, target_package.name, '_link'])
        osc.core.http_PUT(url,
                          data='<link project="{}" package="{}"/>'.format(
                              source_package.project.name,
                              source_package.name))
        return target_package

    def create_project(self,
                       name,
                       reviewer={},
                       maintainer={},
                       project_links=[]):
        """Creates project if it does not already exist.

        For params see the constructor of :class:`Project`

        :return: the project instance representing the given project
        :rtype: Project
        """
        if isinstance(name, Project):
            return name
        if name in self.projects:
            return self.projects[name]
        self.projects[name] = Project(name,
                                      reviewer=reviewer,
                                      maintainer=maintainer,
                                      project_links=project_links)
        return self.projects[name]

    def submit_package(self, package, project=None):
        """Creates submit request from package to target project.

        Both have to exist (Use :func:`create_submit_request` otherwise).

        :param package: package to submit
        :type package: Package
        :param project: project where to send submit request, None means use the default.
        :type project: Project or str or None
        :return: created request.
        :rtype: Request
        """
        if not project:
            project = self.project
        request = Request(source_package=package, target_project=project)
        self.requests.append(request)
        return request

    def request_package_delete(self, package, project=None):
        if not project:
            project = package.project
        request = Request(target_package=package,
                          target_project=project,
                          type='delete')
        self.requests.append(request)
        return request

    def create_submit_request(self,
                              project,
                              package,
                              text=None,
                              add_commit=True):
        """Creates submit request from package in specified project to default project.

        It creates project if not exist and also package.
        Package is commited with optional text.
        Note different parameters than submit_package.

        :param project: project where package will live
        :type project: Project or str
        :param package: package name to create
        :type package: str
        :param text: commit message for initial package creation
        :type text: str
        :param add_commit: whether add initial package commit. Useful to disable
               if package already exists
        :type add_commit: bool
        :return: created request.
        :rtype: Request
        """
        project = self.create_project(project)
        package = Package(name=package, project=project)
        if add_commit:
            package.create_commit(text=text)
        return self.submit_package(package)

    def __del__(self):
        if not self.api:
            return
        try:
            self.remove()
        except:
            # normally exceptions in destructors are ignored but a info
            # message is displayed. Make this a little more useful by
            # printing it into the capture log
            traceback.print_exc(None, sys.stdout)

    def remove(self):
        print('deleting staging workflow')

        for project in self.projects.values():
            project.remove()
        for request in self.requests:
            request.revoke()
        for group in self.groups:
            self.remove_group(group)
        for namespace in self.attr_types:
            self.remove_attribute_types(namespace)

        print('done')

        if hasattr(self.api, '_invalidate_all'):
            self.api._invalidate_all()

    def remove_group(self, group):
        """Removes a group from the OBS instance

        :param group: name of the group to remove
        :type group: str
        """
        print('deleting group', group)
        url = osc.core.makeurl(APIURL, ['group', group])
        self._safe_delete(url)

    def remove_attribute_types(self, namespace):
        """Removes an attributes namespace and all the attribute types it contains

        :param namespace: attributes namespace to remove
        :type namespace: str
        """
        for name in self.attr_types[namespace]:
            print('deleting attribute type {}:{}'.format(namespace, name))
            url = osc.core.makeurl(APIURL,
                                   ['attribute', namespace, name, '_meta'])
            self._safe_delete(url)
        print('deleting namespace', namespace)
        url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
        self._safe_delete(url)

    def _safe_delete(self, url):
        """Performs a delete request to the OBS instance, ignoring possible http errors

        :param url: url to use for the http delete request
        :type url: str
        """
        try:
            osc.core.http_DELETE(url)
        except HTTPError:
            pass

    def create_target_project(self):
        """Creates the main target project (see :func:`create_target`)"""
        p = Project(name=self.project)
        self.projects['target'] = p
        self.projects[self.project] = p

    def create_staging_users(self):
        """Creates users and groups for the staging workflow for the target project
        (see :func:`create_target`)
        """
        group = self.staging_group_name()

        self.create_user('staging-bot')
        self.create_group(group, users=['staging-bot'])
        self.projects['target'].add_reviewers(groups=[group])

        url = osc.core.makeurl(APIURL, ['staging', self.project, 'workflow'])
        data = f"<workflow managers='{group}'/>"
        osc.core.http_POST(url, data=data)
Exemplo n.º 5
0
class ToTestBase(object):

    """Base class to store the basic interface"""

    product_repo = 'images'
    product_arch = 'local'
    livecd_repo = 'images'
    livecd_archs = ['i586', 'x86_64']

    def __init__(self, project, dryrun=False, api_url=None, openqa_server='https://openqa.opensuse.org', test_subproject=None):
        self.project = project
        self.dryrun = dryrun
        if not api_url:
            api_url = osc.conf.config['apiurl']
        self.api = StagingAPI(api_url, project=project)
        self.openqa_server = openqa_server
        if not test_subproject:
            test_subproject = 'ToTest'
        self.test_project = '%s:%s' % (self.project, test_subproject)
        self.openqa = OpenQA_Client(server=openqa_server)
        self.issues_to_ignore = []
        self.issuefile = "{}_{}".format(self.project, ISSUE_FILE)
        if os.path.isfile(self.issuefile):
            with open(self.issuefile, 'r') as f:
                for line in f.readlines():
                    self.issues_to_ignore.append(line.strip())
        self.project_base = project.split(':')[0]
        self.update_pinned_descr = False

    def openqa_group(self):
        return self.project

    def iso_prefix(self):
        return self.project

    def jobs_num(self):
        return 70

    def current_version(self):
        return self.release_version()

    def binaries_of_product(self, project, product):
        url = self.api.makeurl(['build', project, self.product_repo, self.product_arch, product])
        try:
            f = self.api.retried_GET(url)
        except urllib2.HTTPError:
            return []

        ret = []
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            ret.append(binary.get('filename'))

        return ret

    def get_current_snapshot(self):
        """Return the current snapshot in the test project"""

        for binary in self.binaries_of_product(self.test_project, '_product:%s-cd-mini-%s' % (self.project_base, self.arch())):
            result = re.match(r'%s-%s-NET-.*-Snapshot(.*)-Media.iso' % (self.project_base, self.iso_prefix()),
                              binary)
            if result:
                return result.group(1)

        return None

    def ftp_build_version(self, project, tree, base=None):
        if not base:
            base = self.project_base
        for binary in self.binaries_of_product(project, tree):
            result = re.match(r'%s.*Build(.*)-Media1.report' % base, binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s ftp version" % project)

    def iso_build_version(self, project, tree, base=None):
        if not base:
            base = self.project_base
        for binary in self.binaries_of_product(project, tree):
            result = re.match(r'%s.*Build(.*)-Media(.*).iso' % base, binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s iso version" % project)

    def release_version(self):
        url = self.api.makeurl(['build', self.project, 'standard', self.arch(),
                                '_product:%s-release' % self.project_base])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            binary = binary.get('filename', '')
            result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
            if result:
                return result.group(1)

        raise NotFoundException("can't find %s version" % self.project)

    def current_qa_version(self):
        return self.api.dashboard_content_load('version_totest')

    def find_openqa_results(self, snapshot):
        """Return the openqa jobs of a given snapshot and filter out the
        cloned jobs

        """

        url = makeurl(self.openqa_server,
                      ['api', 'v1', 'jobs'], {'group': self.openqa_group(), 'build': snapshot, 'latest': 1})
        f = self.api.retried_GET(url)
        jobs = []
        for job in json.load(f)['jobs']:
            if job['clone_id'] or job['result'] == 'obsoleted':
                continue
            job['name'] = job['name'].replace(snapshot, '')
            jobs.append(job)
        return jobs

    def _result2str(self, result):
        if result == QA_INPROGRESS:
            return 'inprogress'
        elif result == QA_FAILED:
            return 'failed'
        else:
            return 'passed'

    def find_failed_module(self, testmodules):
        # print json.dumps(testmodules, sort_keys=True, indent=4)
        for module in testmodules:
            if module['result'] != 'failed':
                continue
            flags = module['flags']
            if 'fatal' in flags or 'important' in flags:
                return module['name']
                break
            logger.info('%s %s %s' %
                        (module['name'], module['result'], module['flags']))

    def update_openqa_status_message(self):
        url = makeurl(self.openqa_server,
                      ['api', 'v1', 'job_groups'])
        f = self.api.retried_GET(url)
        job_groups = json.load(f)
        group_id = 0
        for jg in job_groups:
            if jg['name'] == self.openqa_group():
                group_id = jg['id']
                break

        if not group_id:
            logger.debug('No openQA group id found for status comment update, ignoring')
            return

        pinned_ignored_issue = 0
        issues = ' , '.join(self.issues_to_ignore)
        status_flag = 'publishing' if self.status_for_openqa['is_publishing'] else \
            'preparing' if self.status_for_openqa['can_release'] else \
                'testing' if self.status_for_openqa['snapshotable'] else \
                'building'
        status_msg = "tag:{}:{}:{}".format(self.status_for_openqa['new_snapshot'], status_flag, status_flag)
        msg = "pinned-description: Ignored issues\r\n\r\n{}\r\n\r\n{}".format(issues, status_msg)
        data = {'text': msg}

        url = makeurl(self.openqa_server,
                      ['api', 'v1', 'groups', str(group_id), 'comments'])
        f = self.api.retried_GET(url)
        comments = json.load(f)
        for comment in comments:
            if comment['userName'] == 'ttm' and \
                    comment['text'].startswith('pinned-description: Ignored issues'):
                pinned_ignored_issue = comment['id']

        logger.debug('Writing openQA status message: {}'.format(data))
        if not self.dryrun:
            if pinned_ignored_issue:
                self.openqa.openqa_request(
                    'PUT', 'groups/%s/comments/%d' % (group_id, pinned_ignored_issue), data=data)
            else:
                self.openqa.openqa_request(
                    'POST', 'groups/%s/comments' % group_id, data=data)

    def overall_result(self, snapshot):
        """Analyze the openQA jobs of a given snapshot Returns a QAResult"""

        if snapshot is None:
            return QA_FAILED

        jobs = self.find_openqa_results(snapshot)

        if len(jobs) < self.jobs_num():  # not yet scheduled
            logger.warning('we have only %s jobs' % len(jobs))
            return QA_INPROGRESS

        number_of_fails = 0
        in_progress = False
        for job in jobs:
            # print json.dumps(job, sort_keys=True, indent=4)
            if job['result'] in ('failed', 'incomplete', 'skipped', 'user_cancelled', 'obsoleted', 'parallel_failed'):
                jobname = job['name']
                # print json.dumps(job, sort_keys=True, indent=4), jobname
                url = makeurl(self.openqa_server,
                              ['api', 'v1', 'jobs', str(job['id']), 'comments'])
                f = self.api.retried_GET(url)
                comments = json.load(f)
                refs = set()
                labeled = 0
                to_ignore = False
                for comment in comments:
                    for ref in comment['bugrefs']:
                        refs.add(str(ref))
                    if comment['userName'] == 'ttm' and comment['text'] == 'label:unknown_failure':
                        labeled = comment['id']
                    if re.search(r'@ttm:? ignore', comment['text']):
                        to_ignore = True
                ignored = len(refs) > 0
                for ref in refs:
                    if ref not in self.issues_to_ignore:
                        if to_ignore:
                            self.issues_to_ignore.append(ref)
                            self.update_pinned_descr = True
                            with open(self.issuefile, 'a') as f:
                                f.write("%s\n" % ref)
                        else:
                            ignored = False

                if not ignored:
                    number_of_fails += 1
                    if not labeled and len(refs) > 0 and not self.dryrun:
                        data = {'text': 'label:unknown_failure'}
                        self.openqa.openqa_request(
                            'POST', 'jobs/%s/comments' % job['id'], data=data)
                elif labeled:
                    # remove flag - unfortunately can't delete comment unless admin
                    data = {'text': 'Ignored issue'}
                    self.openqa.openqa_request(
                        'PUT', 'jobs/%s/comments/%d' % (job['id'], labeled), data=data)

                if ignored:
                    logger.info("job %s failed, but was ignored", jobname)
                else:
                    joburl = '%s/tests/%s' % (self.openqa_server, job['id'])
                    logger.info("job %s failed, see %s", jobname, joburl)

            elif job['result'] == 'passed' or job['result'] == 'softfailed':
                continue
            elif job['result'] == 'none':
                if job['state'] != 'cancelled':
                    in_progress = True
            else:
                raise Exception(job['result'])

        if number_of_fails > 0:
            return QA_FAILED

        if in_progress:
            return QA_INPROGRESS

        return QA_PASSED

    def all_repos_done(self, project, codes=None):
        """Check the build result of the project and only return True if all
        repos of that project are either published or unpublished

        """

        # coolo's experience says that 'finished' won't be
        # sufficient here, so don't try to add it :-)
        codes = ['published', 'unpublished'] if not codes else codes

        url = self.api.makeurl(
            ['build', project, '_result'], {'code': 'failed'})
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ready = True
        for repo in root.findall('result'):
            # ignore ports. 'factory' is used by arm for repos that are not
            # meant to use the totest manager.
            if repo.get('repository') in ('ports', 'factory', 'images_staging'):
                continue
            # ignore 32bit for now. We're only interesed in aarch64 here
            if repo.get('arch') in ('armv6l', 'armv7l'):
                continue
            if repo.get('dirty', '') == 'true':
                logger.info('%s %s %s -> %s' % (repo.get('project'),
                                                repo.get('repository'), repo.get('arch'), 'dirty'))
                ready = False
            if repo.get('code') not in codes:
                logger.info('%s %s %s -> %s' % (repo.get('project'),
                                                repo.get('repository'), repo.get('arch'), repo.get('code')))
                ready = False
        return ready

    def maxsize_for_package(self, package):
        if re.match(r'.*-mini-.*', package):
            return 737280000  # a CD needs to match

        if re.match(r'.*-dvd5-.*', package):
            return 4700372992  # a DVD needs to match

        if re.match(r'livecd-x11', package):
            return 681574400  # not a full CD

        if re.match(r'livecd-.*', package):
            return 999999999  # a GB stick

        if re.match(r'.*-(dvd9-dvd|cd-DVD)-.*', package):
            return 8539996159

        if re.match(r'.*-ftp-(ftp|POOL)-', package):
            return None

        if ':%s-Addon-NonOss-ftp-ftp' % self.base in package:
            return None

        raise Exception('No maxsize for {}'.format(package))

    def package_ok(self, project, package, repository, arch):
        """Checks one package in a project and returns True if it's succeeded

        """

        query = {'package': package, 'repository': repository, 'arch': arch}

        url = self.api.makeurl(['build', project, '_result'], query)
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for repo in root.findall('result'):
            status = repo.find('status')
            if status.get('code') != 'succeeded':
                logger.info(
                    '%s %s %s %s -> %s' % (project, package, repository, arch, status.get('code')))
                return False

        maxsize = self.maxsize_for_package(package)
        if not maxsize:
            return True

        url = self.api.makeurl(['build', project, repository, arch, package])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            if not binary.get('filename', '').endswith('.iso'):
                continue
            isosize = int(binary.get('size', 0))
            if isosize > maxsize:
                logger.error('%s %s %s %s: %s' % (
                    project, package, repository, arch, 'too large by %s bytes' % (isosize - maxsize)))
                return False

        return True

    def is_snapshottable(self):
        """Check various conditions required for factory to be snapshotable

        """

        if not self.all_repos_done(self.project):
            return False

        for product in self.ftp_products + self.main_products:
            if not self.package_ok(self.project, product, self.product_repo, self.product_arch):
                return False

            if len(self.livecd_products):

                if not self.all_repos_done('%s:Live' % self.project):
                    return False

                for arch in self.livecd_archs:
                    for product in self.livecd_products:
                        if not self.package_ok('%s:Live' % self.project, product, self.livecd_repo, arch):
                            return False

        return True

    def _release_package(self, project, package, set_release=None):
        query = {'cmd': 'release'}

        if set_release:
            query['setrelease'] = set_release

        # FIXME: make configurable. openSUSE:Factory:ARM currently has multiple
        # repos with release targets, so obs needs to know which one to release
        if project == 'openSUSE:Factory:ARM':
            query['repository'] = 'images'

        baseurl = ['source', project, package]

        url = self.api.makeurl(baseurl, query=query)
        if self.dryrun:
            logger.info("release %s/%s (%s)" % (project, package, set_release))
        else:
            self.api.retried_POST(url)

    def _release(self, set_release=None):
        for product in self.ftp_products:
            self._release_package(self.project, product)

        for cd in self.livecd_products:
            self._release_package('%s:Live' %
                                  self.project, cd, set_release=set_release)

        for cd in self.main_products:
            self._release_package(self.project, cd, set_release=set_release)

    def update_totest(self, snapshot=None):
        release = 'Snapshot%s' % snapshot if snapshot else None
        logger.info('Updating snapshot %s' % snapshot)
        if not self.dryrun:
            self.api.switch_flag_in_prj(self.test_project, flag='publish', state='disable')

        self._release(set_release=release)

    def publish_factory_totest(self):
        logger.info('Publish test project content')
        if not self.dryrun:
            self.api.switch_flag_in_prj(
                self.test_project, flag='publish', state='enable')

    def totest_is_publishing(self):
        """Find out if the publishing flag is set in totest's _meta"""

        url = self.api.makeurl(
            ['source', self.test_project, '_meta'])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        if not root.find('publish'):  # default true
            return True

        for flag in root.find('publish'):
            if flag.get('repository', None) or flag.get('arch', None):
                continue
            if flag.tag == 'enable':
                return True
        return False

    def totest(self):
        try:
            current_snapshot = self.get_current_snapshot()
        except NotFoundException as e:
            # nothing in test project (yet)
            logger.warn(e)
            current_snapshot = None
        new_snapshot = self.current_version()
        self.update_pinned_descr = False
        current_result = self.overall_result(current_snapshot)
        current_qa_version = self.current_qa_version()

        logger.info('current_snapshot %s: %s' %
                    (current_snapshot, self._result2str(current_result)))
        logger.debug('new_snapshot %s', new_snapshot)
        logger.debug('current_qa_version %s', current_qa_version)

        snapshotable = self.is_snapshottable()
        logger.debug("snapshotable: %s", snapshotable)
        can_release = ((current_snapshot is None or current_result != QA_INPROGRESS) and snapshotable)

        # not overwriting
        if new_snapshot == current_snapshot:
            logger.debug("no change in snapshot version")
            can_release = False
        elif not self.all_repos_done(self.test_project):
            logger.debug("not all repos done, can't release")
            # the repos have to be done, otherwise we better not touch them
            # with a new release
            can_release = False

        can_publish = (current_result == QA_PASSED)

        # already published
        totest_is_publishing = self.totest_is_publishing()
        if totest_is_publishing:
            logger.debug("totest already publishing")
            can_publish = False

        if self.update_pinned_descr:
            self.status_for_openqa = {
                'current_snapshot': current_snapshot,
                'new_snapshot': new_snapshot,
                'snapshotable': snapshotable,
                'can_release': can_release,
                'is_publishing': totest_is_publishing,
            }
            self.update_openqa_status_message()

        if can_publish:
            if current_qa_version == current_snapshot:
                self.publish_factory_totest()
                self.write_version_to_dashboard("snapshot", current_snapshot)
                can_release = False  # we have to wait
            else:
                # We reached a very bad status: openQA testing is 'done', but not of the same version
                # currently in test project. This can happen when 'releasing' the
                # product failed
                raise Exception("Publishing stopped: tested version (%s) does not match version in test project (%s)"
                                % (current_qa_version, current_snapshot))

        if can_release:
            self.update_totest(new_snapshot)
            self.write_version_to_dashboard("totest", new_snapshot)

    def release(self):
        new_snapshot = self.current_version()
        self.update_totest(new_snapshot)

    def write_version_to_dashboard(self, target, version):
        if not self.dryrun:
            self.api.dashboard_content_ensure('version_%s' % target, version, comment='Update version')
Exemplo n.º 6
0
class ToTestManager(ToolBase.ToolBase):

    def __init__(self, tool):
        ToolBase.ToolBase.__init__(self)
        # copy attributes
        self.logger = logging.getLogger(__name__)
        self.apiurl = tool.apiurl
        self.debug = tool.debug
        self.caching = tool.caching
        self.dryrun = tool.dryrun

    def setup(self, project):
        self.project = ToTest(project, self.apiurl)
        self.api = StagingAPI(self.apiurl, project=project)

    def version_file(self, target):
        return 'version_%s' % target

    def write_version_to_dashboard(self, target, version):
        if self.dryrun or self.project.do_not_release:
            return
        self.api.pseudometa_file_ensure(self.version_file(target), version,
                                        comment='Update version')

    def current_qa_version(self):
        return self.api.pseudometa_file_load(self.version_file('totest'))

    def iso_build_version(self, project, tree, repo=None, arch=None):
        for binary in self.binaries_of_product(project, tree, repo=repo, arch=arch):
            result = re.match(r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.raw\.xz)', binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s iso version" % project)

    def version_from_totest_project(self):
        if len(self.project.main_products):
            return self.iso_build_version(self.project.test_project, self.project.main_products[0])

        return self.iso_build_version(self.project.test_project, self.project.image_products[0].package,
                                      arch=self.project.image_products[0].archs[0])

    def binaries_of_product(self, project, product, repo=None, arch=None):
        if repo is None:
            repo = self.project.product_repo
        if arch is None:
            arch = self.project.product_arch

        url = self.api.makeurl(['build', project, repo, arch, product])
        try:
            f = self.api.retried_GET(url)
        except HTTPError:
            return []

        ret = []
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            ret.append(binary.get('filename'))

        return ret

    def ftp_build_version(self, project, tree):
        for binary in self.binaries_of_product(project, tree):
            result = re.match(r'.*-Build(.*)-Media1.report', binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s ftp version" % project)

    # make sure to update the attribute as atomic as possible - as such
    # only update the snapshot and don't erase anything else. The snapshots
    # have very different update times within the pipeline, so there is
    # normally no chance that releaser and publisher overwrite states
    def update_status(self, status, snapshot):
        status_dict = self.get_status_dict()
        if self.dryrun:
            self.logger.info('setting {} snapshot to {}'.format(status, snapshot))
            return
        if status_dict.get(status) != snapshot:
            status_dict[status] = snapshot
            text = yaml.safe_dump(status_dict)
            self.api.attribute_value_save('ToTestManagerStatus', text)

    def get_status_dict(self):
        text = self.api.attribute_value_load('ToTestManagerStatus')
        if text:
            return yaml.safe_load(text)
        return dict()

    def get_status(self, status):
        return self.get_status_dict().get(status)

    def release_package(self, project, package, set_release=None, repository=None,
                         target_project=None, target_repository=None):
        query = {'cmd': 'release'}

        if set_release:
            query['setrelease'] = set_release

        if repository is not None:
            query['repository'] = repository

        if target_project is not None:
            # Both need to be set
            query['target_project'] = target_project
            query['target_repository'] = target_repository

        baseurl = ['source', project, package]

        url = self.api.makeurl(baseurl, query=query)
        if self.dryrun or self.project.do_not_release:
            self.logger.info('release %s/%s (%s)' % (project, package, query))
        else:
            self.api.retried_POST(url)

    def all_repos_done(self, project, codes=None):
        """Check the build result of the project and only return True if all
        repos of that project are either published or unpublished

        """

        # coolo's experience says that 'finished' won't be
        # sufficient here, so don't try to add it :-)
        codes = ['published', 'unpublished'] if not codes else codes

        url = self.api.makeurl(
            ['build', project, '_result'], {'code': 'failed'})
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ready = True
        for repo in root.findall('result'):
            # ignore ports. 'factory' is used by arm for repos that are not
            # meant to use the totest manager.
            if repo.get('repository') in ('ports', 'factory', 'images_staging'):
                continue
            if repo.get('dirty') == 'true':
                self.logger.info('%s %s %s -> %s' % (repo.get('project'),
                                                repo.get('repository'), repo.get('arch'), 'dirty'))
                ready = False
            if repo.get('code') not in codes:
                self.logger.info('%s %s %s -> %s' % (repo.get('project'),
                                                repo.get('repository'), repo.get('arch'), repo.get('code')))
                ready = False
        return ready
Exemplo n.º 7
0
class ToTestBase(object):
    """Base class to store the basic interface"""
    def __init__(self, project, dryrun):
        self.project = project
        self.dryrun = dryrun
        self.api = StagingAPI(osc.conf.config['apiurl'],
                              project='openSUSE:%s' % project)
        self.known_failures = self.known_failures_from_dashboard(project)

    def openqa_group(self):
        return self.project

    def iso_prefix(self):
        return self.project

    def jobs_num(self):
        return 90

    def current_version(self):
        return self.release_version()

    def binaries_of_product(self, project, product):
        url = self.api.makeurl(['build', project, 'images', 'local', product])
        try:
            f = self.api.retried_GET(url)
        except urllib2.HTTPError:
            return []

        ret = []
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            ret.append(binary.get('filename'))

        return ret

    def get_current_snapshot(self):
        """Return the current snapshot in :ToTest"""

        # for now we hardcode all kind of things
        for binary in self.binaries_of_product(
                'openSUSE:%s:ToTest' % self.project,
                '_product:openSUSE-cd-mini-%s' % self.arch()):
            result = re.match(
                r'openSUSE-%s-NET-.*-Snapshot(.*)-Media.iso' %
                self.iso_prefix(), binary)
            if result:
                return result.group(1)

        return None

    def ftp_build_version(self, tree):
        for binary in self.binaries_of_product('openSUSE:%s' % self.project,
                                               tree):
            result = re.match(r'openSUSE.*Build(.*)-Media1.report', binary)

        if result:
            return result.group(1)
        raise Exception("can't find %s version" % self.project)

    def release_version(self):
        url = self.api.makeurl([
            'build',
            'openSUSE:%s' % self.project, 'standard',
            self.arch(), '_product:openSUSE-release'
        ])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            binary = binary.get('filename', '')
            result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
            if result:
                return result.group(1)

        raise Exception("can't find %s version" % self.project)

    def find_openqa_results(self, snapshot):
        """Return the openqa jobs of a given snapshot and filter out the
        cloned jobs

        """

        url = makeurl('https://openqa.opensuse.org', ['api', 'v1', 'jobs'], {
            'group': self.openqa_group(),
            'build': snapshot
        })
        f = self.api.retried_GET(url)
        jobs = []
        for job in json.load(f)['jobs']:
            if job['clone_id']:
                continue
            job['name'] = job['name'].replace(snapshot, '')
            jobs.append(job)
        return jobs

    def _result2str(self, result):
        if result == QA_INPROGRESS:
            return 'inprogress'
        elif result == QA_FAILED:
            return 'failed'
        else:
            return 'passed'

    def find_failed_module(self, testmodules):
        # print json.dumps(testmodules, sort_keys=True, indent=4)
        for module in testmodules:
            if module['result'] != 'failed':
                continue
            flags = module['flags']
            if 'fatal' in flags or 'important' in flags:
                return module['name']
                break
            print module['name'], module['result'], module['flags']

    def overall_result(self, snapshot):
        """Analyze the openQA jobs of a given snapshot Returns a QAResult"""

        if snapshot is None:
            return QA_FAILED

        jobs = self.find_openqa_results(snapshot)

        if len(jobs) < self.jobs_num():  # not yet scheduled
            print 'we have only %s jobs' % len(jobs)
            return QA_INPROGRESS

        number_of_fails = 0
        in_progress = False
        machines = []
        for job in jobs:
            # print json.dumps(job, sort_keys=True, indent=4)
            if job['result'] in ('failed', 'incomplete', 'skipped',
                                 'user_cancelled', 'obsoleted'):
                jobname = job['name'] + '@' + job['settings']['MACHINE']
                # Record machines we have tests for
                machines.append(job['settings']['MACHINE'])
                if jobname in self.known_failures:
                    self.known_failures.remove(jobname)
                    continue
                number_of_fails += 1
                # print json.dumps(job, sort_keys=True, indent=4), jobname
                failedmodule = self.find_failed_module(job['modules'])
                url = 'https://openqa.opensuse.org/tests/%s' % job['id']
                print jobname, url, failedmodule, job['retry_avbl']
                # if number_of_fails < 3: continue
            elif job['result'] == 'passed':
                continue
            elif job['result'] == 'none':
                if job['state'] != 'cancelled':
                    in_progress = True
            else:
                raise Exception(job['result'])

        if number_of_fails > 0:
            return QA_FAILED

        if in_progress:
            return QA_INPROGRESS

        machines = list(set(machines))
        for item in machines:
            for item2 in self.known_failures:
                if item2.split('@')[1] == item:
                    print 'now passing', item2
        return QA_PASSED

    def all_repos_done(self, project, codes=None):
        """Check the build result of the project and only return True if all
        repos of that project are either published or unpublished

        """

        codes = ['published', 'unpublished'] if not codes else codes

        url = self.api.makeurl(['build', project, '_result'],
                               {'code': 'failed'})
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ready = True
        for repo in root.findall('result'):
            # ignore ports. 'factory' is used by arm for repos that are not
            # meant to use the totest manager.
            if repo.get('repository') in ('ports', 'factory'):
                continue
            # ignore 32bit for now. We're only interesed in aarch64 here
            if repo.get('arch') in ('armv6l', 'armv7l'):
                continue
            if repo.get('dirty', '') == 'true':
                print repo.get('project'), repo.get('repository'), repo.get(
                    'arch'), 'dirty'
                ready = False
            if repo.get('code') not in codes:
                print repo.get('project'), repo.get('repository'), repo.get(
                    'arch'), repo.get('code')
                ready = False
        return ready

    def maxsize_for_package(self, package):
        if re.match(r'.*-mini-.*', package):
            return 737280000  # a CD needs to match

        if re.match(r'.*-dvd5-.*', package):
            return 4700372992  # a DVD needs to match

        if re.match(r'.*-image-livecd-x11.*', package):
            return 681574400  # not a full CD

        if re.match(r'.*-image-livecd.*', package):
            return 999999999  # a GB stick

        if re.match(r'.*-dvd9-dvd-.*', package):
            return 8539996159

        if package.startswith('_product:openSUSE-ftp-ftp-'):
            return None

        if package == '_product:openSUSE-Addon-NonOss-ftp-ftp-i586_x86_64':
            return None

        raise Exception('No maxsize for {}'.format(package))

    def package_ok(self, project, package, repository, arch):
        """Checks one package in a project and returns True if it's succeeded

        """

        query = {'package': package, 'repository': repository, 'arch': arch}

        url = self.api.makeurl(['build', project, '_result'], query)
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for repo in root.findall('result'):
            status = repo.find('status')
            if status.get('code') != 'succeeded':
                print project, package, repository, arch, status.get('code')
                return False

        maxsize = self.maxsize_for_package(package)
        if not maxsize:
            return True

        url = self.api.makeurl(['build', project, repository, arch, package])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            if not binary.get('filename', '').endswith('.iso'):
                continue
            isosize = int(binary.get('size', 0))
            if isosize > maxsize:
                print project, package, repository, arch, 'too large by %s bytes' % (
                    isosize - maxsize)
                return False

        return True

    def factory_snapshottable(self):
        """Check various conditions required for factory to be snapshotable

        """

        if not self.all_repos_done('openSUSE:%s' % self.project):
            return False

        for product in self.ftp_products + self.main_products:
            if not self.package_ok('openSUSE:%s' % self.project, product,
                                   'images', 'local'):
                return False

            if len(self.livecd_products):

                if not self.all_repos_done('openSUSE:%s:Live' % self.project):
                    return False

                for arch in ['i586', 'x86_64']:
                    for product in self.livecd_products:
                        if not self.package_ok(
                                'openSUSE:%s:Live' % self.project, product,
                                'standard', arch):
                            return False

        return True

    def release_package(self, project, package, set_release=None):
        query = {'cmd': 'release'}

        if set_release:
            query['setrelease'] = set_release

        baseurl = ['source', project, package]

        url = self.api.makeurl(baseurl, query=query)
        if self.dryrun:
            print "release %s/%s (%s)" % (project, package, set_release)
        else:
            self.api.retried_POST(url)

    def update_totest(self, snapshot):
        print 'Updating snapshot %s' % snapshot
        if not self.dryrun:
            self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project,
                                        flag='publish',
                                        state='disable')

        for product in self.ftp_products:
            self.release_package('openSUSE:%s' % self.project, product)

        for cd in self.livecd_products:
            self.release_package('openSUSE:%s:Live' % self.project,
                                 cd,
                                 set_release='Snapshot%s' % snapshot)

        for cd in self.main_products:
            self.release_package('openSUSE:%s' % self.project,
                                 cd,
                                 set_release='Snapshot%s' % snapshot)

    def publish_factory_totest(self):
        print 'Publish ToTest'
        if not self.dryrun:
            self.api.switch_flag_in_prj('openSUSE:%s:ToTest' % self.project,
                                        flag='publish',
                                        state='enable')

    def totest_is_publishing(self):
        """Find out if the publishing flag is set in totest's _meta"""

        url = self.api.makeurl(
            ['source', 'openSUSE:%s:ToTest' % self.project, '_meta'])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        if not root.find('publish'):  # default true
            return True

        for flag in root.find('publish'):
            if flag.get('repository', None) or flag.get('arch', None):
                continue
            if flag.tag == 'enable':
                return True
        return False

    def totest(self):
        current_snapshot = self.get_current_snapshot()
        new_snapshot = self.current_version()

        current_result = self.overall_result(current_snapshot)

        print 'current_snapshot', current_snapshot, self._result2str(
            current_result)

        can_release = (current_result != QA_INPROGRESS
                       and self.factory_snapshottable())

        # not overwriting
        if new_snapshot == current_snapshot:
            can_release = False
        elif not self.all_repos_done('openSUSE:%s:ToTest' % self.project):
            # the repos have to be done, otherwise we better not touch them with a new release
            can_release = False

        can_publish = (current_result == QA_PASSED)

        # already published
        if self.totest_is_publishing():
            can_publish = False

        if can_publish:
            self.publish_factory_totest()
            can_release = False  # we have to wait

        if can_release:
            self.update_totest(new_snapshot)

    def release(self):
        new_snapshot = self.current_version()
        self.update_totest(new_snapshot)

    def known_failures_from_dashboard(self, project):
        known_failures = []
        if self.project in ("Factory:PowerPC", "Factory:ARM"):
            project = "Factory"
        else:
            project = self.project

        url = self.api.makeurl([
            'source',
            'openSUSE:%s:Staging' % project, 'dashboard', 'known_failures'
        ])
        f = self.api.retried_GET(url)
        for line in f:
            if not line[0] == '#':
                known_failures.append(line.strip())
        return known_failures
Exemplo n.º 8
0
class ToTestManager(ToolBase.ToolBase):
    def __init__(self, tool):
        ToolBase.ToolBase.__init__(self)
        # copy attributes
        self.logger = logging.getLogger(__name__)
        self.apiurl = tool.apiurl
        self.debug = tool.debug
        self.caching = tool.caching
        self.dryrun = tool.dryrun

    def setup(self, project):
        self.project = ToTest(project, self.apiurl)
        self.api = StagingAPI(self.apiurl, project=project)

    def version_file(self, target):
        return 'version_%s' % target

    def write_version_to_dashboard(self, target, version):
        if self.dryrun or self.project.do_not_release:
            return
        self.api.pseudometa_file_ensure(self.version_file(target),
                                        version,
                                        comment='Update version')

    def current_qa_version(self):
        return self.api.pseudometa_file_load(self.version_file('totest'))

    def iso_build_version(self, project, tree, repo=None, arch=None):
        for binary in self.binaries_of_product(project,
                                               tree,
                                               repo=repo,
                                               arch=arch):
            result = re.match(
                r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz|\.raw\.xz|\.appx)',
                binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s iso version" % project)

    def version_from_totest_project(self):
        if len(self.project.main_products):
            return self.iso_build_version(self.project.test_project,
                                          self.project.main_products[0])

        return self.iso_build_version(
            self.project.test_project,
            self.project.image_products[0].package,
            arch=self.project.image_products[0].archs[0])

    def binaries_of_product(self, project, product, repo=None, arch=None):
        if repo is None:
            repo = self.project.product_repo
        if arch is None:
            arch = self.project.product_arch

        url = self.api.makeurl(['build', project, repo, arch, product])
        try:
            f = self.api.retried_GET(url)
        except HTTPError:
            return []

        ret = []
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            ret.append(binary.get('filename'))

        return ret

    def ftp_build_version(self, project, tree):
        for binary in self.binaries_of_product(project, tree):
            result = re.match(r'.*-Build(.*)-Media1.report', binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s ftp version" % project)

    # make sure to update the attribute as atomic as possible - as such
    # only update the snapshot and don't erase anything else. The snapshots
    # have very different update times within the pipeline, so there is
    # normally no chance that releaser and publisher overwrite states
    def update_status(self, status, snapshot):
        status_dict = self.get_status_dict()
        if self.dryrun:
            self.logger.info('setting {} snapshot to {}'.format(
                status, snapshot))
            return
        if status_dict.get(status) != snapshot:
            status_dict[status] = snapshot
            text = yaml.safe_dump(status_dict)
            self.api.attribute_value_save('ToTestManagerStatus', text)

    def get_status_dict(self):
        text = self.api.attribute_value_load('ToTestManagerStatus')
        if text:
            return yaml.safe_load(text)
        return dict()

    def get_status(self, status):
        return self.get_status_dict().get(status)

    def release_package(self,
                        project,
                        package,
                        set_release=None,
                        repository=None,
                        target_project=None,
                        target_repository=None):
        query = {'cmd': 'release'}

        if set_release:
            query['setrelease'] = set_release

        if repository is not None:
            query['repository'] = repository

        if target_project is not None:
            # Both need to be set
            query['target_project'] = target_project
            query['target_repository'] = target_repository

        baseurl = ['source', project, package]

        url = self.api.makeurl(baseurl, query=query)
        if self.dryrun or self.project.do_not_release:
            self.logger.info('release %s/%s (%s)' % (project, package, query))
        else:
            self.api.retried_POST(url)

    def all_repos_done(self, project, codes=None):
        """Check the build result of the project and only return True if all
        repos of that project are either published or unpublished

        """

        # coolo's experience says that 'finished' won't be
        # sufficient here, so don't try to add it :-)
        codes = ['published', 'unpublished'] if not codes else codes

        url = self.api.makeurl(['build', project, '_result'],
                               {'code': 'failed'})
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ready = True
        for repo in root.findall('result'):
            # ignore ports. 'factory' is used by arm for repos that are not
            # meant to use the totest manager.
            if repo.get('repository') in ('ports', 'factory',
                                          'images_staging'):
                continue
            if repo.get('dirty') == 'true':
                self.logger.info('%s %s %s -> %s' %
                                 (repo.get('project'), repo.get('repository'),
                                  repo.get('arch'), 'dirty'))
                ready = False
            if repo.get('code') not in codes:
                self.logger.info('%s %s %s -> %s' %
                                 (repo.get('project'), repo.get('repository'),
                                  repo.get('arch'), repo.get('code')))
                ready = False
        return ready
class ToTestBase(object):
    """Base class to store the basic interface"""

    product_repo = 'images'
    product_arch = 'local'
    livecd_repo = 'images'
    totest_container_repo = 'containers'

    main_products = []
    ftp_products = []
    container_products = []
    livecd_products = []
    image_products = []

    def __init__(self,
                 project,
                 dryrun=False,
                 norelease=False,
                 api_url=None,
                 openqa_server='https://openqa.opensuse.org',
                 test_subproject=None):
        self.project = project
        self.dryrun = dryrun
        self.norelease = norelease
        if not api_url:
            api_url = osc.conf.config['apiurl']
        self.api = StagingAPI(api_url, project=project)
        self.openqa_server = openqa_server
        if not test_subproject:
            test_subproject = 'ToTest'
        self.test_project = '%s:%s' % (self.project, test_subproject)
        self.openqa = OpenQA_Client(server=openqa_server)
        self.load_issues_to_ignore()
        self.project_base = project.split(':')[0]
        self.update_pinned_descr = False
        self.amqp_url = osc.conf.config.get('ttm_amqp_url')

    def load_issues_to_ignore(self):
        text = self.api.attribute_value_load('IgnoredIssues')
        if text:
            root = yaml.load(text)
            self.issues_to_ignore = root.get('last_seen')
        else:
            self.issues_to_ignore = dict()

    def save_issues_to_ignore(self):
        if self.dryrun:
            return
        text = yaml.dump({'last_seen': self.issues_to_ignore},
                         default_flow_style=False)
        self.api.attribute_value_save('IgnoredIssues', text)

    def openqa_group(self):
        return self.project

    def iso_prefix(self):
        return self.project

    def jobs_num(self):
        return 70

    def current_version(self):
        return self.release_version()

    def binaries_of_product(self, project, product, repo=None, arch=None):
        if repo is None:
            repo = self.product_repo
        if arch is None:
            arch = self.product_arch

        url = self.api.makeurl(['build', project, repo, arch, product])
        try:
            f = self.api.retried_GET(url)
        except HTTPError:
            return []

        ret = []
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            ret.append(binary.get('filename'))

        return ret

    def get_current_snapshot(self):
        """Return the current snapshot in the test project"""

        for binary in self.binaries_of_product(
                self.test_project,
                '000product:%s-cd-mini-%s' % (self.project_base, self.arch())):
            result = re.match(
                r'%s-%s-NET-.*-Snapshot(.*)-Media.iso' %
                (self.project_base, self.iso_prefix()), binary)
            if result:
                return result.group(1)

        return None

    def ftp_build_version(self, project, tree, base=None):
        if not base:
            base = self.project_base
        for binary in self.binaries_of_product(project, tree):
            result = re.match(r'%s.*Build(.*)-Media1.report' % base, binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s ftp version" % project)

    def iso_build_version(self,
                          project,
                          tree,
                          base=None,
                          repo=None,
                          arch=None):
        if not base:
            base = self.project_base
        for binary in self.binaries_of_product(project,
                                               tree,
                                               repo=repo,
                                               arch=arch):
            result = re.match(
                r'.*-(?:Build|Snapshot)([0-9.]+)(?:-Media.*\.iso|\.docker\.tar\.xz)',
                binary)
            if result:
                return result.group(1)
        raise NotFoundException("can't find %s iso version" % project)

    def release_version(self):
        url = self.api.makeurl([
            'build', self.project, 'standard',
            self.arch(),
            '000release-packages:%s-release' % self.project_base
        ])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            binary = binary.get('filename', '')
            result = re.match(r'.*-([^-]*)-[^-]*.src.rpm', binary)
            if result:
                return result.group(1)

        raise NotFoundException("can't find %s version" % self.project)

    def current_qa_version(self):
        return self.api.pseudometa_file_load('version_totest')

    def find_openqa_results(self, snapshot):
        """Return the openqa jobs of a given snapshot and filter out the
        cloned jobs

        """

        url = makeurl(self.openqa_server, ['api', 'v1', 'jobs'], {
            'group': self.openqa_group(),
            'build': snapshot,
            'latest': 1
        })
        f = self.api.retried_GET(url)
        jobs = []
        for job in json.load(f)['jobs']:
            if job['clone_id'] or job['result'] == 'obsoleted':
                continue
            job['name'] = job['name'].replace(snapshot, '')
            jobs.append(job)
        return jobs

    def _result2str(self, result):
        if result == QA_INPROGRESS:
            return 'inprogress'
        elif result == QA_FAILED:
            return 'failed'
        else:
            return 'passed'

    def find_failed_module(self, testmodules):
        # print json.dumps(testmodules, sort_keys=True, indent=4)
        for module in testmodules:
            if module['result'] != 'failed':
                continue
            flags = module['flags']
            if 'fatal' in flags or 'important' in flags:
                return module['name']
                break
            logger.info('%s %s %s' %
                        (module['name'], module['result'], module['flags']))

    def update_openqa_status_message(self):
        url = makeurl(self.openqa_server, ['api', 'v1', 'job_groups'])
        f = self.api.retried_GET(url)
        job_groups = json.load(f)
        group_id = 0
        for jg in job_groups:
            if jg['name'] == self.openqa_group():
                group_id = jg['id']
                break

        if not group_id:
            logger.debug(
                'No openQA group id found for status comment update, ignoring')
            return

        pinned_ignored_issue = 0
        issues = ' , '.join(self.issues_to_ignore.keys())
        status_flag = 'publishing' if self.status_for_openqa['is_publishing'] else \
            'preparing' if self.status_for_openqa['can_release'] else \
            'testing' if self.status_for_openqa['snapshotable'] else \
            'building'
        status_msg = "tag:{}:{}:{}".format(
            self.status_for_openqa['new_snapshot'], status_flag, status_flag)
        msg = "pinned-description: Ignored issues\r\n\r\n{}\r\n\r\n{}".format(
            issues, status_msg)
        data = {'text': msg}

        url = makeurl(self.openqa_server,
                      ['api', 'v1', 'groups',
                       str(group_id), 'comments'])
        f = self.api.retried_GET(url)
        comments = json.load(f)
        for comment in comments:
            if comment['userName'] == 'ttm' and \
                    comment['text'].startswith('pinned-description: Ignored issues'):
                pinned_ignored_issue = comment['id']

        logger.debug('Writing openQA status message: {}'.format(data))
        if not self.dryrun:
            if pinned_ignored_issue:
                self.openqa.openqa_request('PUT',
                                           'groups/%s/comments/%d' %
                                           (group_id, pinned_ignored_issue),
                                           data=data)
            else:
                self.openqa.openqa_request('POST',
                                           'groups/%s/comments' % group_id,
                                           data=data)

    def overall_result(self, snapshot):
        """Analyze the openQA jobs of a given snapshot Returns a QAResult"""

        if snapshot is None:
            return QA_FAILED

        jobs = self.find_openqa_results(snapshot)

        self.failed_relevant_jobs = []
        self.failed_ignored_jobs = []

        if len(jobs) < self.jobs_num():  # not yet scheduled
            logger.warning('we have only %s jobs' % len(jobs))
            return QA_INPROGRESS

        in_progress = False
        for job in jobs:
            # print json.dumps(job, sort_keys=True, indent=4)
            if job['result'] in ('failed', 'incomplete', 'skipped',
                                 'user_cancelled', 'obsoleted',
                                 'parallel_failed'):
                # print json.dumps(job, sort_keys=True, indent=4), jobname
                url = makeurl(
                    self.openqa_server,
                    ['api', 'v1', 'jobs',
                     str(job['id']), 'comments'])
                f = self.api.retried_GET(url)
                comments = json.load(f)
                refs = set()
                labeled = 0
                to_ignore = False
                for comment in comments:
                    for ref in comment['bugrefs']:
                        refs.add(str(ref))
                    if comment['userName'] == 'ttm' and comment[
                            'text'] == 'label:unknown_failure':
                        labeled = comment['id']
                    if re.search(r'@ttm:? ignore', comment['text']):
                        to_ignore = True
                # to_ignore can happen with or without refs
                ignored = True if to_ignore else len(refs) > 0
                build_nr = str(job['settings']['BUILD'])
                for ref in refs:
                    if ref not in self.issues_to_ignore:
                        if to_ignore:
                            self.issues_to_ignore[ref] = build_nr
                            self.update_pinned_descr = True
                        else:
                            ignored = False
                    else:
                        # update reference
                        self.issues_to_ignore[ref] = build_nr

                if ignored:
                    self.failed_ignored_jobs.append(job['id'])
                    if labeled:
                        text = 'Ignored issue' if len(
                            refs) > 0 else 'Ignored failure'
                        # remove flag - unfortunately can't delete comment unless admin
                        data = {'text': text}
                        if self.dryrun:
                            logger.info("Would label {} with: {}".format(
                                job['id'], text))
                        else:
                            self.openqa.openqa_request('PUT',
                                                       'jobs/%s/comments/%d' %
                                                       (job['id'], labeled),
                                                       data=data)

                    logger.info("job %s failed, but was ignored", job['name'])
                else:
                    self.failed_relevant_jobs.append(job['id'])
                    if not labeled and len(refs) > 0:
                        data = {'text': 'label:unknown_failure'}
                        if self.dryrun:
                            logger.info("Would label {} as unknown".format(
                                job['id']))
                        else:
                            self.openqa.openqa_request('POST',
                                                       'jobs/%s/comments' %
                                                       job['id'],
                                                       data=data)

                    joburl = '%s/tests/%s' % (self.openqa_server, job['id'])
                    logger.info("job %s failed, see %s", job['name'], joburl)

            elif job['result'] == 'passed' or job['result'] == 'softfailed':
                continue
            elif job['result'] == 'none':
                if job['state'] != 'cancelled':
                    in_progress = True
            else:
                raise Exception(job['result'])

        self.save_issues_to_ignore()

        if len(self.failed_relevant_jobs) > 0:
            return QA_FAILED

        if in_progress:
            return QA_INPROGRESS

        return QA_PASSED

    def all_repos_done(self, project, codes=None):
        """Check the build result of the project and only return True if all
        repos of that project are either published or unpublished

        """

        # coolo's experience says that 'finished' won't be
        # sufficient here, so don't try to add it :-)
        codes = ['published', 'unpublished'] if not codes else codes

        url = self.api.makeurl(['build', project, '_result'],
                               {'code': 'failed'})
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        ready = True
        for repo in root.findall('result'):
            # ignore ports. 'factory' is used by arm for repos that are not
            # meant to use the totest manager.
            if repo.get('repository') in ('ports', 'factory',
                                          'images_staging'):
                continue
            if repo.get('dirty', '') == 'true':
                logger.info('%s %s %s -> %s' %
                            (repo.get('project'), repo.get('repository'),
                             repo.get('arch'), 'dirty'))
                ready = False
            if repo.get('code') not in codes:
                logger.info('%s %s %s -> %s' %
                            (repo.get('project'), repo.get('repository'),
                             repo.get('arch'), repo.get('code')))
                ready = False
        return ready

    def maxsize_for_package(self, package):
        if re.match(r'.*-mini-.*', package):
            return 737280000  # a CD needs to match

        if re.match(r'.*-dvd5-.*', package):
            return 4700372992  # a DVD needs to match

        if re.match(r'livecd-x11', package):
            return 681574400  # not a full CD

        if re.match(r'livecd-.*', package):
            return 999999999  # a GB stick

        if re.match(r'.*-(dvd9-dvd|cd-DVD)-.*', package):
            return 8539996159

        if re.match(r'.*-ftp-(ftp|POOL)-', package):
            return None

        # docker container has no size limit
        if re.match(r'opensuse-.*-image.*', package):
            return None

        if '-Addon-NonOss-ftp-ftp' in package:
            return None

        if 'JeOS' in package:
            return 4700372992

        raise Exception('No maxsize for {}'.format(package))

    def package_ok(self, project, package, repository, arch):
        """Checks one package in a project and returns True if it's succeeded

        """

        query = {'package': package, 'repository': repository, 'arch': arch}

        url = self.api.makeurl(['build', project, '_result'], query)
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        # [@code!='succeeded'] is not supported by ET
        failed = [
            status for status in root.findall("result/status")
            if status.get('code') != 'succeeded'
        ]

        if any(failed):
            logger.info(
                '%s %s %s %s -> %s' %
                (project, package, repository, arch, failed[0].get('code')))
            return False

        if not len(root.findall('result/status[@code="succeeded"]')):
            logger.info('No "succeeded" for %s %s %s %s' %
                        (project, package, repository, arch))
            return False

        maxsize = self.maxsize_for_package(package)
        if not maxsize:
            return True

        url = self.api.makeurl(['build', project, repository, arch, package])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        for binary in root.findall('binary'):
            if not binary.get('filename', '').endswith('.iso'):
                continue
            isosize = int(binary.get('size', 0))
            if isosize > maxsize:
                logger.error('%s %s %s %s: %s' %
                             (project, package, repository, arch,
                              'too large by %s bytes' % (isosize - maxsize)))
                return False

        return True

    def is_snapshottable(self):
        """Check various conditions required for factory to be snapshotable

        """

        if not self.all_repos_done(self.project):
            return False

        for product in self.ftp_products + self.main_products:
            if not self.package_ok(self.project, product, self.product_repo,
                                   self.product_arch):
                return False

        for product in self.image_products + self.container_products:
            for arch in product.archs:
                if not self.package_ok(self.project, product.package,
                                       self.product_repo, arch):
                    return False

        if len(self.livecd_products):
            if not self.all_repos_done('%s:Live' % self.project):
                return False

            for product in self.livecd_products:
                for arch in product.archs:
                    if not self.package_ok('%s:Live' % self.project,
                                           product.package, self.product_repo,
                                           arch):
                        return False

        return True

    def _release_package(self,
                         project,
                         package,
                         set_release=None,
                         repository=None,
                         target_project=None,
                         target_repository=None):
        query = {'cmd': 'release'}

        if set_release:
            query['setrelease'] = set_release

        if repository is not None:
            query['repository'] = repository

        if target_project is not None:
            # Both need to be set
            query['target_project'] = target_project
            query['target_repository'] = target_repository

        baseurl = ['source', project, package]

        url = self.api.makeurl(baseurl, query=query)
        if self.dryrun or self.norelease:
            logger.info("release %s/%s (%s)" % (project, package, query))
        else:
            self.api.retried_POST(url)

    def _release(self, set_release=None):
        for product in self.ftp_products:
            self._release_package(self.project,
                                  product,
                                  repository=self.product_repo)

        for cd in self.livecd_products:
            self._release_package('%s:Live' % self.project,
                                  cd.package,
                                  set_release=set_release,
                                  repository=self.livecd_repo)

        for image in self.image_products:
            self._release_package(self.project,
                                  image.package,
                                  set_release=set_release,
                                  repository=self.product_repo)

        for cd in self.main_products:
            self._release_package(self.project,
                                  cd,
                                  set_release=set_release,
                                  repository=self.product_repo)

        for container in self.container_products:
            # Containers are built in the same repo as other image products,
            # but released into a different repo in :ToTest
            self._release_package(self.project,
                                  container.package,
                                  repository=self.product_repo,
                                  target_project=self.test_project,
                                  target_repository=self.totest_container_repo)

    def update_totest(self, snapshot=None):
        release = 'Snapshot%s' % snapshot if snapshot else None
        logger.info('Updating snapshot %s' % snapshot)
        if not (self.dryrun or self.norelease):
            self.api.switch_flag_in_prj(self.test_project,
                                        flag='publish',
                                        state='disable',
                                        repository=self.product_repo)

        self._release(set_release=release)

    def publish_factory_totest(self):
        logger.info('Publish test project content')
        if not (self.dryrun or self.norelease):
            self.api.switch_flag_in_prj(self.test_project,
                                        flag='publish',
                                        state='enable',
                                        repository=self.product_repo)
        if self.container_products:
            logger.info('Releasing container products from ToTest')
            for container in self.container_products:
                self._release_package(self.test_project,
                                      container.package,
                                      repository=self.totest_container_repo)

    def totest_is_publishing(self):
        """Find out if the publishing flag is set in totest's _meta"""

        url = self.api.makeurl(['source', self.test_project, '_meta'])
        f = self.api.retried_GET(url)
        root = ET.parse(f).getroot()
        if not root.find('publish'):  # default true
            return True

        for flag in root.find('publish'):
            if flag.get('repository', None) not in [None, self.product_repo]:
                continue
            if flag.get('arch', None):
                continue
            if flag.tag == 'enable':
                return True
        return False

    def totest(self):
        try:
            current_snapshot = self.get_current_snapshot()
        except NotFoundException as e:
            # nothing in test project (yet)
            logger.warn(e)
            current_snapshot = None
        new_snapshot = self.current_version()
        self.update_pinned_descr = False
        current_result = self.overall_result(current_snapshot)
        current_qa_version = self.current_qa_version()

        logger.info('current_snapshot %s: %s' %
                    (current_snapshot, self._result2str(current_result)))
        logger.debug('new_snapshot %s', new_snapshot)
        logger.debug('current_qa_version %s', current_qa_version)

        snapshotable = self.is_snapshottable()
        logger.debug("snapshotable: %s", snapshotable)
        can_release = ((current_snapshot is None
                        or current_result != QA_INPROGRESS) and snapshotable)

        # not overwriting
        if new_snapshot == current_qa_version:
            logger.debug("no change in snapshot version")
            can_release = False
        elif not self.all_repos_done(self.test_project):
            logger.debug("not all repos done, can't release")
            # the repos have to be done, otherwise we better not touch them
            # with a new release
            can_release = False

        self.send_amqp_event(current_snapshot, current_result)

        can_publish = (current_result == QA_PASSED)

        # already published
        totest_is_publishing = self.totest_is_publishing()
        if totest_is_publishing:
            logger.debug("totest already publishing")
            can_publish = False

        if self.update_pinned_descr:
            self.status_for_openqa = {
                'current_snapshot': current_snapshot,
                'new_snapshot': new_snapshot,
                'snapshotable': snapshotable,
                'can_release': can_release,
                'is_publishing': totest_is_publishing,
            }
            self.update_openqa_status_message()

        if can_publish:
            if current_qa_version == current_snapshot:
                self.publish_factory_totest()
                self.write_version_to_dashboard("snapshot", current_snapshot)
                can_release = False  # we have to wait
            else:
                # We reached a very bad status: openQA testing is 'done', but not of the same version
                # currently in test project. This can happen when 'releasing' the
                # product failed
                raise Exception(
                    "Publishing stopped: tested version (%s) does not match version in test project (%s)"
                    % (current_qa_version, current_snapshot))

        if can_release:
            self.update_totest(new_snapshot)
            self.write_version_to_dashboard("totest", new_snapshot)

    def send_amqp_event(self, current_snapshot, current_result):
        if not self.amqp_url:
            logger.debug(
                'No ttm_amqp_url configured in oscrc - skipping amqp event emission'
            )
            return

        logger.debug('Sending AMQP message')
        inf = re.sub(r"ed$", '', self._result2str(current_result))
        msg_topic = '%s.ttm.build.%s' % (self.project_base.lower(), inf)
        msg_body = json.dumps({
            'build': current_snapshot,
            'project': self.project,
            'failed_jobs': {
                'relevant': self.failed_relevant_jobs,
                'ignored': self.failed_ignored_jobs,
            }
        })

        # send amqp event
        tries = 7  # arbitrary
        for t in range(tries):
            try:
                notify_connection = pika.BlockingConnection(
                    pika.URLParameters(self.amqp_url))
                notify_channel = notify_connection.channel()
                notify_channel.exchange_declare(exchange='pubsub',
                                                exchange_type='topic',
                                                passive=True,
                                                durable=True)
                notify_channel.basic_publish(exchange='pubsub',
                                             routing_key=msg_topic,
                                             body=msg_body)
                notify_connection.close()
                break
            except pika.exceptions.ConnectionClosed as e:
                logger.warn(
                    'Sending AMQP event did not work: %s. Retrying try %s out of %s'
                    % (e, t, tries))
        else:
            logger.error(
                'Could not send out AMQP event for %s tries, aborting.' %
                tries)

    def release(self):
        new_snapshot = self.current_version()
        self.update_totest(new_snapshot)

    def write_version_to_dashboard(self, target, version):
        if not (self.dryrun or self.norelease):
            self.api.pseudometa_file_ensure('version_%s' % target,
                                            version,
                                            comment='Update version')
Exemplo n.º 10
0
class StagingWorkflow(object):
    def __init__(self, project=PROJECT):
        """
        Initialize the configuration
        """
        THIS_DIR = os.path.dirname(os.path.abspath(__file__))
        oscrc = os.path.join(THIS_DIR, 'test.oscrc')

        self.apiurl = APIURL
        logging.basicConfig()

        # clear cache from other tests - otherwise the VCR is replayed depending
        # on test order, which can be harmful
        memoize_session_reset()

        osc.core.conf.get_config(override_conffile=oscrc,
                                 override_no_keyring=True,
                                 override_no_gnome_keyring=True)
        if os.environ.get('OSC_DEBUG'):
            osc.core.conf.config['debug'] = 1
        self.project = project
        self.projects = {}
        self.requests = []
        self.groups = []
        self.users = []
        CacheManager.test = True
        # disable caching, the TTLs break any reproduciblity
        Cache.CACHE_DIR = None
        Cache.PATTERNS = {}
        Cache.init()
        self.setup_remote_config()
        self.load_config()
        self.api = StagingAPI(APIURL, project)

    def load_config(self, project=None):
        if project is None:
            project = self.project
        self.config = Config(APIURL, project)

    def create_attribute_type(self, namespace, name, values=None):
        meta="""
        <namespace name='{}'>
            <modifiable_by user='******'/>
        </namespace>""".format(namespace)
        url = osc.core.makeurl(APIURL, ['attribute', namespace, '_meta'])
        osc.core.http_PUT(url, data=meta)

        meta="<definition name='{}' namespace='{}'><description/>".format(name, namespace)
        if values:
            meta += "<count>{}</count>".format(values)
        meta += "<modifiable_by role='maintainer'/></definition>"
        url = osc.core.makeurl(APIURL, ['attribute', namespace, name, '_meta'])
        osc.core.http_PUT(url, data=meta)

    def setup_remote_config(self):
        self.create_target()
        self.create_attribute_type('OSRT', 'Config', 1)
        attribute_value_save(APIURL, self.project, 'Config', 'overridden-by-local = remote-nope\n'
                                                        'remote-only = remote-indeed\n')

    def create_group(self, name, users=[]):

        meta = """
        <group>
          <title>{}</title>
        </group>
        """.format(name)

        if len(users):
            root = ET.fromstring(meta)
            persons = ET.SubElement(root, 'person')
            for user in users:
                ET.SubElement(persons, 'person', { 'userid': user } )
            meta = ET.tostring(root)

        if not name in self.groups:
            self.groups.append(name)
        url = osc.core.makeurl(APIURL, ['group', name])
        osc.core.http_PUT(url, data=meta)

    def create_user(self, name):
        if name in self.users: return
        meta = """
        <person>
          <login>{}</login>
          <email>{}@example.com</email>
          <state>confirmed</state>
        </person>
        """.format(name, name)
        self.users.append(name)
        url = osc.core.makeurl(APIURL, ['person', name])
        osc.core.http_PUT(url, data=meta)
        url = osc.core.makeurl(APIURL, ['person', name], {'cmd': 'change_password'})
        osc.core.http_POST(url, data='opensuse')
        home_project = 'home:' + name
        self.projects[home_project] = Project(home_project, create=False)

    def create_target(self):
        if self.projects.get('target'): return
        self.create_group('factory-staging')
        self.projects['target'] = Project(name=self.project, reviewer={'groups': ['factory-staging']})

    def setup_rings(self):
        self.create_target()
        self.projects['ring0'] = Project(name=self.project + ':Rings:0-Bootstrap')
        self.projects['ring1'] = Project(name=self.project + ':Rings:1-MinimalX')
        target_wine = Package(name='wine', project=self.projects['target'])
        target_wine.create_commit()
        self.create_link(target_wine, self.projects['ring1'])

    def create_package(self, project, package):
        project = self.create_project(project)
        return Package(name=package, project=project)

    def create_link(self, source_package, target_project, target_package=None):
        if not target_package:
            target_package = source_package.name
        target_package = Package(name=target_package, project=target_project)
        url = self.api.makeurl(['source', target_project.name, target_package.name, '_link'])
        osc.core.http_PUT(url, data='<link project="{}" package="{}"/>'.format(source_package.project.name,
                                                                               source_package.name))
        return target_package

    def create_project(self, name,  reviewer={}, maintainer={}, project_links=[]):
        if isinstance(name, Project):
            return name
        if name in self.projects:
            return self.projects[name]
        self.projects[name] = Project(name, reviewer=reviewer,
                                      maintainer=maintainer,
                                      project_links=project_links)
        return self.projects[name]

    def submit_package(self, package=None):
        request = Request(source_package=package, target_project=self.project)
        self.requests.append(request)
        return request

    def create_submit_request(self, project, package, text=None):
        project = self.create_project(project)
        package = Package(name=package, project=project)
        package.create_commit(text=text)
        return self.submit_package(package)

    def create_staging(self, suffix, freeze=False, rings=None):
        project_links = []
        if rings == 0:
            project_links.append(self.project + ":Rings:0-Bootstrap")
        if rings == 1 or rings == 0:
            project_links.append(self.project + ":Rings:1-MinimalX")
        staging = Project(self.project + ':Staging:' + suffix, project_links=project_links)
        if freeze:
            FreezeCommand(self.api).perform(staging.name)
        self.projects['staging:{}'.format(suffix)] = staging
        return staging

    def __del__(self):
        try:
            self.remove()
        except:
            # normally exceptions in destructors are ignored but a info
            # message is displayed. Make this a little more useful by
            # printing it into the capture log
            traceback.print_exc(None, sys.stdout)

    def remove(self):
        print('deleting staging workflow')
        for project in self.projects.values():
            project.remove()
        for request in self.requests:
            request.revoke()
        for group in self.groups:
            url = osc.core.makeurl(APIURL, ['group', group])
            try:
                osc.core.http_DELETE(url)
            except HTTPError:
                pass
        print('done')
        if hasattr(self.api, '_invalidate_all'):
            self.api._invalidate_all()