예제 #1
0
def clone_do(apiurl_source, apiurl_target, project):
    print('clone {} from {} to {}'.format(project, apiurl_source, apiurl_target))

    try:
        # TODO Decide how to choose what to clone via args.

        # Rather than handle the self-referencing craziness with a proper solver
        # the leaf can simple be used to start the chain and works as desired.
        # Disable this when running clone repeatedly during developing as the
        # projects cannot be cleanly re-created without more work.
        entity_clone(apiurl_source, apiurl_target, ['source', project + ':Rings:1-MinimalX', '_meta'],
                     clone=project_clone)

        pseudometa_project, pseudometa_package = project_pseudometa_package(apiurl_source, project)
        entity_clone(apiurl_source, apiurl_target, ['source', pseudometa_project, pseudometa_package, '_meta'],
                     clone=package_clone, after=package_clone_after)

        entity_clone(apiurl_source, apiurl_target, ['source', project, 'drush', '_meta'],
                     clone=package_clone, after=package_clone_after)

        entity_clone(apiurl_source, apiurl_target, ['group', 'opensuse-review-team'],
                     clone=group_clone)
    except HTTPError as e:
        # Print full output for any errors since message can be cryptic.
        print(e.read())
        return 1
예제 #2
0
    def install_check(self, target_project_pair, arch, directories,
                      ignore=None, whitelist=[], parse=False, no_filter=False):
        self.logger.info('install check: start (ignore:{}, whitelist:{}, parse:{}, no_filter:{})'.format(
            bool(ignore), len(whitelist), parse, no_filter))

        with tempfile.NamedTemporaryFile() as ignore_file:
            # Print ignored rpms on separate lines in ignore file.
            if ignore:
                for item in ignore:
                    ignore_file.write(item + '\n')
                ignore_file.flush()

            # Invoke repo_checker.pl to perform an install check.
            script = os.path.join(SCRIPT_PATH, 'repo_checker.pl')
            parts = ['LC_ALL=C', 'perl', script, arch, ','.join(directories),
                     '-f', ignore_file.name, '-w', ','.join(whitelist)]
            if no_filter:
                parts.append('--no-filter')

            parts = [pipes.quote(part) for part in parts]
            p = subprocess.Popen(' '.join(parts), shell=True,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE, close_fds=True)
            stdout, stderr = p.communicate()

        if p.returncode:
            self.logger.info('install check: failed')
            if p.returncode == 126:
                self.logger.warning('mirror cache reset due to corruption')
                self._invalidate_all()
            elif parse:
                # Parse output for later consumption for posting comments.
                sections = self.install_check_parse(stdout)
                self.install_check_sections_group(
                    target_project_pair[0], target_project_pair[1], arch, sections)

            # Format output as markdown comment.
            parts = []

            stdout = stdout.strip().decode('utf-8')
            if stdout:
                parts.append('<pre>\n' + stdout + '\n' + '</pre>\n')
            stderr = stderr.strip().decode('utf-8')
            if stderr:
                parts.append('<pre>\n' + stderr + '\n' + '</pre>\n')

            pseudometa_project, pseudometa_package = project_pseudometa_package(
                self.apiurl, target_project_pair[0])
            filename = self.project_pseudometa_file_name(target_project_pair[0], target_project_pair[1])
            path = ['package', 'view_file', pseudometa_project, pseudometa_package, filename]
            header = '### [install check & file conflicts](/{})\n\n'.format('/'.join(path))
            return CheckResult(False, header + ('\n' + ('-' * 80) + '\n\n').join(parts))

        self.logger.info('install check: passed')
        return CheckResult(True, None)
    def install_check(self, target_project_pair, arch, directories,
                      ignore=None, whitelist=[], parse=False, no_filter=False):
        self.logger.info('install check: start (ignore:{}, whitelist:{}, parse:{}, no_filter:{})'.format(
            bool(ignore), len(whitelist), parse, no_filter))

        with tempfile.NamedTemporaryFile() as ignore_file:
            # Print ignored rpms on separate lines in ignore file.
            if ignore:
                for item in ignore:
                    ignore_file.write(item + '\n')
                ignore_file.flush()

            # Invoke repo_checker.pl to perform an install check.
            script = os.path.join(SCRIPT_PATH, 'repo_checker.pl')
            parts = ['LC_ALL=C', 'perl', script, arch, ','.join(directories),
                     '-f', ignore_file.name, '-w', ','.join(whitelist)]
            if no_filter:
                parts.append('--no-filter')

            parts = [pipes.quote(part) for part in parts]
            p = subprocess.Popen(' '.join(parts), shell=True,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE, close_fds=True)
            stdout, stderr = p.communicate()

        if p.returncode:
            self.logger.info('install check: failed')
            if p.returncode == 126:
                self.logger.warn('mirror cache reset due to corruption')
                self._invalidate_all()
            elif parse:
                # Parse output for later consumption for posting comments.
                sections = self.install_check_parse(stdout)
                self.install_check_sections_group(
                    target_project_pair[0], target_project_pair[1], arch, sections)

            # Format output as markdown comment.
            parts = []

            stdout = stdout.strip()
            if stdout:
                parts.append('<pre>\n' + stdout + '\n' + '</pre>\n')
            stderr = stderr.strip()
            if stderr:
                parts.append('<pre>\n' + stderr + '\n' + '</pre>\n')

            pseudometa_project, pseudometa_package = project_pseudometa_package(
                self.apiurl, target_project_pair[0])
            filename = self.project_pseudometa_file_name(target_project_pair[0], target_project_pair[1])
            path = ['package', 'view_file', pseudometa_project, pseudometa_package, filename]
            header = '### [install check & file conflicts](/{})\n\n'.format('/'.join(path))
            return CheckResult(False, header + ('\n' + ('-' * 80) + '\n\n').join(parts))

        self.logger.info('install check: passed')
        return CheckResult(True, None)
예제 #4
0
def revision_index(api):
    if not hasattr(revision_index, 'index'):
        revision_index.index = {}

        project, package = project_pseudometa_package(api.apiurl, api.project)
        try:
            root = ET.fromstringlist(
                get_commitlog(api.apiurl, project, package, None, format='xml'))
        except HTTPError as e:
            return revision_index.index

        for logentry in root.findall('logentry'):
            date = date_parse(logentry.find('date').text)
            revision_index.index[date] = logentry.get('revision')

    return revision_index.index
예제 #5
0
def revision_index(api):
    if not hasattr(revision_index, 'index'):
        revision_index.index = {}

        project, package = project_pseudometa_package(api.apiurl, api.project)
        try:
            root = ET.fromstringlist(
                get_commitlog(api.apiurl, project, package, None, format='xml'))
        except HTTPError as e:
            return revision_index.index

        for logentry in root.findall('logentry'):
            date = date_parse(logentry.find('date').text)
            revision_index.index[date] = logentry.get('revision')

    return revision_index.index
예제 #6
0
def main(args):
    global client
    client = InfluxDBClient(args.host, args.port, args.user, args.password,
                            args.project)

    osc.conf.get_config(override_apiurl=args.apiurl)
    apiurl = osc.conf.config['apiurl']
    osc.conf.config['debug'] = args.debug

    # Ensure database exists.
    client.create_database(client._database)

    metrics_release.ingest(client)
    if args.release_only:
        return

    # Use separate cache since it is persistent.
    _, package = project_pseudometa_package(apiurl, args.project)
    if args.wipe_cache:
        Cache.delete_all()
    if args.heavy_cache:
        Cache.PATTERNS[r'/search/request'] = sys.maxsize
        Cache.PATTERNS[r'/source/[^/]+/{}/_history'.format(
            package)] = sys.maxsize
    Cache.PATTERNS[r'/source/[^/]+/{}/[^/]+\?rev=.*'.format(
        package)] = sys.maxsize
    Cache.init('metrics')

    Config(apiurl, args.project)
    api = StagingAPI(apiurl, args.project)

    print('dashboard: wrote {:,} points'.format(ingest_dashboard(api)))

    global who_workaround_swap, who_workaround_miss
    who_workaround_swap = who_workaround_miss = 0

    points_requests = ingest_requests(api, args.project)
    points_schedule = ingest_release_schedule(args.project)

    print('who_workaround_swap', who_workaround_swap)
    print('who_workaround_miss', who_workaround_miss)

    print('wrote {:,} points and {:,} annotation points to db'.format(
        points_requests, points_schedule))
예제 #7
0
def main(args):
    global client
    client = InfluxDBClient(args.host, args.port, args.user, args.password, args.project)

    osc.conf.get_config(override_apiurl=args.apiurl)
    apiurl = osc.conf.config['apiurl']
    osc.conf.config['debug'] = args.debug

    # Ensure database exists.
    client.create_database(client._database)

    metrics_release.ingest(client)
    if args.release_only:
        return

    # Use separate cache since it is persistent.
    _, package = project_pseudometa_package(apiurl, args.project)
    if args.wipe_cache:
        Cache.delete_all()
    if args.heavy_cache:
        Cache.PATTERNS[r'/search/request'] = sys.maxint
        Cache.PATTERNS[r'/source/[^/]+/{}/_history'.format(package)] = sys.maxint
    Cache.PATTERNS[r'/source/[^/]+/{}/[^/]+\?rev=.*'.format(package)] = sys.maxint
    Cache.init('metrics')

    Config(apiurl, args.project)
    api = StagingAPI(apiurl, args.project)

    print('dashboard: wrote {:,} points'.format(ingest_dashboard(api)))

    global who_workaround_swap, who_workaround_miss
    who_workaround_swap = who_workaround_miss = 0

    points_requests = ingest_requests(api, args.project)
    points_schedule = ingest_release_schedule(args.project)

    print('who_workaround_swap', who_workaround_swap)
    print('who_workaround_miss', who_workaround_miss)

    print('wrote {:,} points and {:,} annotation points to db'.format(
        points_requests, points_schedule))
예제 #8
0
def clone_do(apiurl_source, apiurl_target, project):
    print('clone {} from {} to {}'.format(project, apiurl_source,
                                          apiurl_target))

    try:
        # TODO Decide how to choose what to clone via args.

        # Rather than handle the self-referencing craziness with a proper solver
        # the leaf can simple be used to start the chain and works as desired.
        # Disable this when running clone repeatedly during developing as the
        # projects cannot be cleanly re-created without more work.
        entity_clone(apiurl_source,
                     apiurl_target,
                     ['source', project + ':Rings:1-MinimalX', '_meta'],
                     clone=project_clone)

        pseudometa_project, pseudometa_package = project_pseudometa_package(
            apiurl_source, project)
        entity_clone(
            apiurl_source,
            apiurl_target,
            ['source', pseudometa_project, pseudometa_package, '_meta'],
            clone=package_clone,
            after=package_clone_after)

        entity_clone(apiurl_source,
                     apiurl_target, ['source', project, 'drush', '_meta'],
                     clone=package_clone,
                     after=package_clone_after)

        entity_clone(apiurl_source,
                     apiurl_target, ['group', 'opensuse-review-team'],
                     clone=group_clone)
    except HTTPError as e:
        # Print full output for any errors since message can be cryptic.
        print(e.read())
        return 1
예제 #9
0
    def crawl(self):
        """Main method"""
        succeeded_packages = []
        succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
        if not len(succeeded_packages) > 0:
            logging.info('No build succeeded package in %s' % self.from_prj)
            return

        # randomize the list
        random.shuffle(succeeded_packages)
        # get souce packages from target
        target_packages = self.get_source_packages(self.to_prj)
        deleted_packages = self.get_deleted_packages(self.to_prj)
        if self.to_prj.startswith("openSUSE:"):
            for prd in OPENSUSE_RELEASED_VERSION:
                deleted_packages = deleted_packages + self.get_deleted_packages(prd)

        pseudometa_project, pseudometa_package = project_pseudometa_package(self.apiurl, 'openSUSE:Factory')
        skip_pkgs_list = self.load_skip_pkgs_list(pseudometa_project, pseudometa_package).splitlines()

        ms_packages = []  # collect multi specs packages

        for i in range(0, min(int(self.submit_limit), len(succeeded_packages))):
            package = succeeded_packages[i]
            submit_ok = True

            if package in deleted_packages:
                logging.info('%s has been dropped from %s, ignore it!' % (package, self.to_prj))
                submit_ok = False

            if self.is_sle_base_pkgs(package) is True:
                logging.info('%s origin from SLE base, skip for now!' % package)
                submit_ok = False

            # make sure it is new package
            new_pkg = self.is_new_package(self.to_prj, package)
            if new_pkg is not True:
                logging.info('%s is not a new package, do not submit.' % package)
                submit_ok = False

            multi_specs = self.check_multiple_specfiles(self.factory, package)
            if multi_specs is None:
                logging.info('%s does not exist in %s' % (package, 'openSUSE:Factory'))
                submit_ok = False

            if multi_specs:
                if multi_specs['linkinfo']:
                    logging.info('%s in %s is sub-package of %s, skip it!' %
                                 (package, 'openSUSE:Factory', multi_specs['linkinfo']))
                    ms_packages.append(package)
                    submit_ok = False

                for spec in multi_specs['specs']:
                    if spec not in succeeded_packages:
                        logging.info('%s is sub-pacakge of %s but build failed, skip it!' % (spec, package))
                        submit_ok = False

            if not submit_ok:
                continue

            # make sure the package non-exist in target yet ie. expand=False
            if package not in target_packages:
                # make sure there is no request against same package
                request = self.get_request_list(package)
                if request:
                    logging.debug("There is a request to %s / %s already or it has been declined/revoked, skip!" %
                                  (package, self.to_prj))
                else:
                    logging.info("%d - Preparing submit %s to %s" % (i, package, self.to_prj))
                    # get devel project
                    devel_prj, devel_pkg = devel_project_get(self.apiurl, self.factory, package)
                    # check devel project does not in the skip list
                    if devel_prj in self.skip_devel_project_list:
                        # check the except packages list
                        match = None
                        for elem in self.except_pkgs_list:
                            m = re.search(elem, package)
                            if m is not None:
                                match = True

                        if match is not True:
                            logging.info('%s/%s is in the skip list, do not submit.' % (devel_prj, package))
                            continue
                        else:
                            pass

                    # check package does not in the skip list
                    match = None
                    for elem in skip_pkgs_list:
                        m = re.search(str(elem), package)
                        if m is not None:
                            match = True

                    if match is True:
                        logging.info('%s is in the skip list, do not submit.' % package)
                        continue
                    else:
                        pass

                    res = self.create_submitrequest(package)
                    if res and res is not None:
                        logging.info('Created request %s for %s' % (res, package))
                    else:
                        logging.error('Error occurred when creating submit request')
            else:
                logging.debug('%s is exist in %s, skip!' % (package, self.to_prj))
            time.sleep(5)

        # dump multi specs packages
        print("Multi-specfile packages:")
        if ms_packages:
            for pkg in ms_packages:
                print(pkg)
        else:
            print('None')
    def crawl(self):
        """Main method"""
        succeeded_packages = []
        succeeded_packages = self.get_build_succeeded_packages(self.from_prj)
        if not len(succeeded_packages) > 0:
            logging.info('No build succeeded package in %s'%self.from_prj)
            return

        # randomize the list
        random.shuffle(succeeded_packages)
        # get souce packages from target
        target_packages = self.get_source_packages(self.to_prj)
        deleted_packages = self.get_deleted_packages(self.to_prj)
        if self.to_prj.startswith("openSUSE:"):
            deleted_packages = deleted_packages + self.get_deleted_packages(OPENSUSE_PREVERSION)

        pseudometa_project, pseudometa_package = project_pseudometa_package(self.apiurl, 'openSUSE:Factory')
        skip_pkgs_list = self.load_skip_pkgs_list(pseudometa_project, pseudometa_package).splitlines()

        ms_packages = [] # collect multi specs packages

        for i in range(0, min(int(self.submit_limit), len(succeeded_packages))):
            package = succeeded_packages[i]
            submit_ok = True

            if package in deleted_packages:
                logging.info('%s has been dropped from %s, ignore it!'%(package, self.to_prj))
                submit_ok = False

            if self.is_sle_base_pkgs(package) is True:
                logging.info('%s origin from SLE base, skip for now!'%package)
                submit_ok = False

            # make sure it is new package
            new_pkg = self.is_new_package(self.to_prj, package)
            if new_pkg is not True:
                logging.info('%s is not a new package, do not submit.' % package)
                submit_ok = False

            multi_specs = self.check_multiple_specfiles(self.factory, package)
            if multi_specs is None:
                logging.info('%s does not exist in %s'%(package, 'openSUSE:Factory'))
                submit_ok = False

            if multi_specs:
                if multi_specs['linkinfo']:
                    logging.info('%s in %s is sub-package of %s, skip it!'%(package, 'openSUSE:Factory', multi_specs['linkinfo']))
                    ms_packages.append(package)
                    submit_ok = False

                for spec in multi_specs['specs']:
                    if spec not in succeeded_packages:
                        logging.info('%s is sub-pacakge of %s but build failed, skip it!'%(spec, package))
                        submit_ok = False

            if not submit_ok:
                continue

            # make sure the package non-exist in target yet ie. expand=False
            if package not in target_packages:
                # make sure there is no request against same package
                request = self.get_request_list(package)
                if request:
                    logging.debug("There is a request to %s / %s already or it has been declined/revoked, skip!"%(package, self.to_prj))
                else:
                    logging.info("%d - Preparing submit %s to %s"%(i, package, self.to_prj))
                    # get devel project
                    devel_prj, devel_pkg = devel_project_get(self.apiurl, self.factory, package)
                    # check devel project does not in the skip list
                    if devel_prj in self.skip_devel_project_list:
                        # check the except packages list
                        match = None
                        for elem in self.except_pkgs_list:
                            m = re.search(elem, package)
                            if m is not None:
                                match = True

                        if match is not True:
                            logging.info('%s/%s is in the skip list, do not submit.' % (devel_prj, package))
                            continue
                        else:
                            pass

                    # check package does not in the skip list
                    match = None
                    for elem in skip_pkgs_list:
                        m = re.search(elem, package)
                        if m is not None:
                            match = True

                    if match is True:
                        logging.info('%s is in the skip list, do not submit.' % package)
                        continue
                    else:
                        pass

                    res = self.create_submitrequest(package)
                    if res and res is not None:
                        logging.info('Created request %s for %s' % (res, package))
                    else:
                        logging.error('Error occurred when creating submit request')
            else:
                logging.debug('%s is exist in %s, skip!'%(package, self.to_prj))

        # dump multi specs packages
        print("Multi-specfile packages:")
        if ms_packages:
            for pkg in ms_packages:
                print pkg
        else:
            print 'None'