コード例 #1
0
def osrt_origin_lookup_file(project, previous=False):
    parts = [project, 'yaml']
    if previous:
        parts.insert(1, 'previous')
    lookup_name = '.'.join(parts)
    cache_dir = CacheManager.directory('origin-manager')
    return os.path.join(cache_dir, lookup_name)
コード例 #2
0
    def update_project(self, project):
        yaml_path = os.path.join(CacheManager.directory('legal-auto'), '{}.yaml'.format(project))
        try:
            with open(yaml_path, 'r') as file:
                self.pkg_cache = yaml.load(file, Loader=yaml.SafeLoader)
        except (IOError, EOFError):
            self.pkg_cache = {}

        self.packages = []
        self._query_sources(project)
        with open(yaml_path, 'w') as file:
            yaml.dump(self.pkg_cache, file)
        url = osc.core.makeurl(self.legaldb, ['products', project])
        request = REQ.patch(url, headers=self.legaldb_headers, data={'id': self.packages}).json()
コード例 #3
0
    def init(directory='main'):
        if Cache.CACHE_DIR:
            # Stick with the first initialization to allow for StagingAPI to
            # ensure always enabled, but allow parent to change directory.
            return

        Cache.CACHE_DIR = CacheManager.directory('request', directory)

        Cache.patterns = []
        for pattern in Cache.PATTERNS:
            Cache.patterns.append(re.compile(pattern))

        # Replace http_request with wrapper function which needs a stored
        # version of the original function to call.
        if not hasattr(osc.core, '_http_request'):
            osc.core._http_request = osc.core.http_request
            osc.core.http_request = http_request
コード例 #4
0
    def init(directory='main'):
        if Cache.CACHE_DIR:
            # Stick with the first initialization to allow for StagingAPI to
            # ensure always enabled, but allow parent to change directory.
            return

        Cache.CACHE_DIR = CacheManager.directory('request', directory)

        Cache.patterns = []
        for pattern in Cache.PATTERNS:
            Cache.patterns.append(re.compile(pattern))

        # Replace http_request with wrapper function which needs a stored
        # version of the original function to call.
        if not hasattr(osc.core, '_http_request'):
            osc.core._http_request = osc.core.http_request
            osc.core.http_request = http_request
コード例 #5
0
    def update_project(self, project):
        yaml_path = os.path.join(CacheManager.directory('legal-auto'),
                                 '{}.yaml'.format(project))
        try:
            with open(yaml_path, 'r') as file:
                self.pkg_cache = yaml.load(file, Loader=yaml.SafeLoader)
        except (IOError, EOFError):
            self.pkg_cache = {}

        self.packages = []
        self._query_sources_for_product_import(project)
        with open(yaml_path, 'w') as file:
            yaml.dump(self.pkg_cache, file)
        url = osc.core.makeurl(self.legaldb, ['products', project])
        REQ.patch(url,
                  headers=self.legaldb_headers,
                  data={'id': self.packages})
コード例 #6
0
    def init(directory='main'):
        if Cache.CACHE_DIR:
            # Stick with the first initialization to allow for StagingAPI to
            # ensure always enabled, but allow parent to change directory.
            return

        Cache.CACHE_DIR = CacheManager.directory('request', directory)

        Cache.patterns = []

        if str2bool(os.environ.get('OSRT_DISABLE_CACHE', '')):
            if conf.config['debug']:
                print('CACHE_DISABLE via $OSRT_DISABLE_CACHE', file=sys.stderr)
            return

        for pattern in Cache.PATTERNS:
            Cache.patterns.append(re.compile(pattern))

        # Replace http_request with wrapper function which needs a stored
        # version of the original function to call.
        if not hasattr(osc.core, '_http_request'):
            osc.core._http_request = osc.core.http_request
            osc.core.http_request = http_request
コード例 #7
0
def update_project(apiurl, project):
    # Cache dir specific to hostname and project.
    host = urlparse(apiurl).hostname
    cache_dir = CacheManager.directory('update_repo_handler', host, project)
    repo_dir = os.path.join(cache_dir, '000update-repos')

    # development aid
    checkout = True
    if checkout:
        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        osc.core.checkout_package(apiurl, project, '000update-repos', expand_link=True, prj_dir=cache_dir)

    root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
    for item in root:
        key = list(item)[0]
        opts = item[key]
        # cast 15.1 to string :)
        key = str(key)
        if not opts['url'].endswith('/'):
            opts['url'] += '/'

        if opts.get('refresh', False):
            opts['build'] = dump_solv_build(opts['url'])
            path = '{}_{}.packages'.format(key, opts['build'])
        else:
            path = key + '.packages'
        packages_file = os.path.join(repo_dir, path)

        if os.path.exists(packages_file + '.xz'):
            print(path, 'already exists')
            continue

        solv_file = packages_file + '.solv'
        dump_solv(solv_file, opts['url'])

        pool = solv.Pool()
        pool.setarch()

        if opts.get('refresh', False):
            for file in glob.glob(os.path.join(repo_dir, '{}_*.packages.xz'.format(key))):
                repo = pool.add_repo(file)
                defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
                f = tempfile.TemporaryFile()
                # FIXME: port to lzma module with python3
                st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
                os.lseek(f.fileno(), 0, os.SEEK_SET)
                repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE|solv.Repo.SUSETAGS_RECORD_SHARES)

        repo1 = pool.add_repo(''.join(random.choice(string.ascii_letters) for _ in range(5)))
        repo1.add_solv(solv_file)

        print_repo_delta(pool, repo1, open(packages_file, 'w'))
        subprocess.call(['xz', '-9', packages_file])
        os.unlink(solv_file)

        url = osc.core.makeurl(apiurl, ['source', project, '000update-repos', path + '.xz'])
        osc.core.http_PUT(url, data=open(packages_file + '.xz', 'rb').read())

        del pool
コード例 #8
0
import os
from os import path
from osclib.cache_manager import CacheManager
import subprocess

# Git will not be happy if pruned, but not used enough to be worth excluding.
CACHE_DIR = CacheManager.directory('git')


def clone(url, directory):
    return_code = subprocess.call(['git', 'clone', url, directory])
    if return_code != 0:
        raise Exception('Failed to clone {}'.format(url))


def sync(cache_dir, repo_url, message=None):
    cwd = os.getcwd()
    devnull = open(os.devnull, 'wb')

    # Ensure git-sync tool is available.
    git_sync_dir = path.join(cache_dir, 'git-sync')
    git_sync_exec = path.join(git_sync_dir, 'git-sync')
    if not path.exists(git_sync_dir):
        os.makedirs(git_sync_dir)
        clone('https://github.com/simonthum/git-sync.git', git_sync_dir)
    else:
        os.chdir(git_sync_dir)
        subprocess.call(['git', 'pull', 'origin', 'master'],
                        stdout=devnull,
                        stderr=devnull)
コード例 #9
0
#!/usr/bin/python3

import argparse
import os
from osclib.cache_manager import CacheManager
import subprocess
import sys

CACHE_DIR = CacheManager.directory('k8s-secret')
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))


def secret_create(cache_file):
    environment = {'OSCRC': cache_file}

    print('Username: '******'')
    environment['OBS_USER'] = input()

    print('Password: '******'')
    environment['OBS_PASS'] = input()

    osc_init = os.path.join(SCRIPT_PATH, 'dist/ci/osc-init')
    subprocess.Popen([osc_init], env=environment).wait()

def secret_apply(prefix, cache_file):
    print(subprocess.check_output([
        'kubectl', 'create', 'secret', 'generic',
        '{}-oscrc'.format(prefix), '--from-file={}={}'.format('.oscrc', cache_file)]))

def main(args):
    cache_file = os.path.join(CACHE_DIR, args.prefix)
コード例 #10
0
from osclib.core import project_pseudometa_file_load
from osclib.core import project_pseudometa_package
from osclib.core import repository_path_search
from osclib.core import repository_path_expand
from osclib.core import repositories_states
from osclib.core import repository_arch_state
from osclib.core import repositories_published
from osclib.core import target_archs
from osclib.comments import CommentAPI
from osclib.memoize import memoize
from osclib.util import sha1_short
from osclib.stagingapi import StagingAPI

import ReviewBot

CACHEDIR = CacheManager.directory('repository-meta')
SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))
CheckResult = namedtuple('CheckResult', ('success', 'comment'))
INSTALL_REGEX = r"^(?:can't install (.*?)|found conflict of (.*?) with (.*?)):$"
InstallSection = namedtuple('InstallSection', ('binaries', 'text'))

ERROR_REPO_SPECIFIED = 'a repository must be specified via OSRT:Config main-repo for {}'

class InstallChecker(object):
    def __init__(self, api, config):
        self.api = api
        self.config = conf.config[api.project]
        self.logger = logging.getLogger('InstallChecker')
        self.commentapi = CommentAPI(api.apiurl)

        self.arch_whitelist = self.config.get('repo_checker-arch-whitelist')
コード例 #11
0
def update_project(apiurl, project):
    # Cache dir specific to hostname and project.
    host = urlparse(apiurl).hostname
    cache_dir = CacheManager.directory('update_repo_handler', host, project)
    repo_dir = os.path.join(cache_dir, '000update-repos')

    # development aid
    checkout = True
    if checkout:
        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        osc.core.checkout_package(apiurl, project, '000update-repos', expand_link=True, prj_dir=cache_dir)

    root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
    for item in root:
        key = list(item)[0]
        opts = item[key]
        # cast 15.1 to string :)
        key = str(key)
        if not opts['url'].endswith('/'):
            opts['url'] += '/'

        if opts.get('refresh', False):
            opts['build'] = dump_solv_build(opts['url'])
            path = '{}_{}.packages'.format(key, opts['build'])
        else:
            path = key + '.packages'
        packages_file = os.path.join(repo_dir, path)

        if os.path.exists(packages_file + '.xz'):
            print(path, 'already exists')
            continue

        solv_file = packages_file + '.solv'
        dump_solv(solv_file, opts['url'])

        pool = solv.Pool()
        pool.setarch()

        if opts.get('refresh', False):
            for file in glob.glob(os.path.join(repo_dir, '{}_*.packages.xz'.format(key))):
                repo = pool.add_repo(file)
                defvendorid = repo.meta.lookup_id(solv.SUSETAGS_DEFAULTVENDOR)
                f = tempfile.TemporaryFile()
                # FIXME: port to lzma module with python3
                st = subprocess.call(['xz', '-cd', file], stdout=f.fileno())
                os.lseek(f.fileno(), 0, os.SEEK_SET)
                repo.add_susetags(solv.xfopen_fd(None, f.fileno()), defvendorid, None, solv.Repo.REPO_NO_INTERNALIZE | solv.Repo.SUSETAGS_RECORD_SHARES)

        repo1 = pool.add_repo(''.join(random.choice(string.ascii_letters) for _ in range(5)))
        repo1.add_solv(solv_file)

        print_repo_delta(pool, repo1, open(packages_file, 'w'))
        subprocess.call(['xz', '-9', packages_file])
        os.unlink(solv_file)

        url = osc.core.makeurl(apiurl, ['source', project, '000update-repos', path + '.xz'])
        osc.core.http_PUT(url, data=open(packages_file + '.xz', 'rb').read())

        del pool
コード例 #12
0
ファイル: tool.py プロジェクト: openSUSE/osc-plugin-factory
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        self.repos = self.expand_repos(project, main_repo)
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-local'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = ''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output)
            return True
コード例 #13
0
from osclib.conf import str2bool
from osclib.core import repository_path_expand
from osclib.core import repository_arch_state
from osclib.cache_manager import CacheManager

from urllib.parse import urlparse

from pkglistgen import file_utils
from pkglistgen.group import Group

SCRIPT_PATH = os.path.dirname(os.path.realpath(__file__))

PRODUCT_SERVICE = '/usr/lib/obs/service/create_single_product'

# share header cache with repochecker
CACHEDIR = CacheManager.directory('repository-meta')

class PkgListGen(ToolBase.ToolBase):

    def __init__(self):
        ToolBase.ToolBase.__init__(self)
        self.logger = logging.getLogger(__name__)
        self.reset()

    def reset(self):
        # package -> supportatus
        self.packages = dict()
        self.groups = dict()
        self._supportstatus = None
        self.input_dir = '.'
        self.output_dir = '.'
コード例 #14
0
from datetime import datetime
import fcntl
from functools import wraps
import os
from osclib.cache_manager import CacheManager
import shelve
try:
    import cPickle as pickle
except:
    import pickle

# Where the cache files are stored
CACHEDIR = CacheManager.directory('memoize')


def memoize(ttl=None, session=False, add_invalidate=False):
    """Decorator function to implement a persistent cache.

    >>> @memoize()
    ... def test_func(a):
    ...     return a

    Internally, the memoized function has a cache:

    >>> cache = [c.cell_contents for c in test_func.func_closure if 'sync' in dir(c.cell_contents)][0]
    >>> 'sync' in dir(cache)
    True

    There is a limit of the size of the cache

    >>> for k in cache:
コード例 #15
0
ファイル: git.py プロジェクト: dirkmueller/osc-plugin-factory
import os
from os import path
from osclib.cache_manager import CacheManager
import subprocess

# Git will not be happy if pruned, but not used enough to be worth excluding.
CACHE_DIR = CacheManager.directory('git')

def clone(url, directory):
    return_code = subprocess.call(['git', 'clone', url, directory])
    if return_code != 0:
        raise Exception('Failed to clone {}'.format(url))

def sync(cache_dir, repo_url, message=None):
    cwd = os.getcwd()
    devnull = open(os.devnull, 'wb')

    # Ensure git-sync tool is available.
    git_sync_dir = path.join(cache_dir, 'git-sync')
    git_sync_exec = path.join(git_sync_dir, 'git-sync')
    if not path.exists(git_sync_dir):
        os.makedirs(git_sync_dir)
        clone('https://github.com/simonthum/git-sync.git', git_sync_dir)
    else:
        os.chdir(git_sync_dir)
        subprocess.call(['git', 'pull', 'origin', 'master'], stdout=devnull, stderr=devnull)

    repo_name = path.basename(path.normpath(repo_url))
    repo_dir = path.join(cache_dir, repo_name)
    if not path.exists(repo_dir):
        os.makedirs(repo_dir)
コード例 #16
0
ファイル: tool.py プロジェクト: gyr/openSUSE-release-tools
    def update_and_solve_target(self, api, target_project, target_config,
                                main_repo, project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(
            ' ')
        self.use_newest_version = str2bool(
            target_config.get('pkglistgen-use-newest-version', 'False'))
        self.repos = self.expand_repos(project, main_repo)
        logging.debug('[{}] {}/{}: update and solve'.format(
            scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(
                show_results_meta(api.apiurl,
                                  project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                logging.info('{}/{} build in progress'.format(
                    project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            logging.info(
                '{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        self.input_dir = group_dir
        self.output_dir = product_dir

        for package in checkout_list:
            if no_checkout:
                logging.debug('Skipping checkout of {}/{}'.format(
                    project, package))
                continue
            checkout_package(api.apiurl,
                             project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir,
                             outdir=os.path.join(cache_dir, package))

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        ignore_list = [
            'supportstatus.txt', 'summary-staging.txt',
            'package-groups.changes'
        ]
        ignore_list += self.group_input_files()
        file_utils.copy_directory_contents(group_dir, product_dir, ignore_list)
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        logging.debug('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(
                ignore_unresolvable=str2bool(
                    target_config.get('pkglistgen-ignore-unresolvable')),
                ignore_recommended=str2bool(
                    target_config.get('pkglistgen-ignore-recommended')),
                locale=target_config.get('pkglistgen-locale'),
                locales_from=target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list and not only_release_packages:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            try:
                self.create_weakremovers(project,
                                         target_config,
                                         oldrepos_dir,
                                         output=open(weakremovers_file, 'w'))
            except MismatchedRepoException:
                logging.error(
                    "Failed to create weakremovers.inc due to mismatch in repos - project most likey started building again."
                )
                return

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        logging.debug('-> product service')
        product_version = attribute_value_load(api.apiurl, project,
                                               'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            logging.debug(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir, project],
                    encoding='utf-8'))

        for delete_kiwi in target_config.get(
                'pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'),
                           inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')],
                                 release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')

        reference_summary = os.path.join(group_dir, f'summary-{scope}.txt')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, f'summary-{scope}.txt')
            output = []
            for group in summary:
                for package in sorted(summary[group]):
                    output.append(f'{package}:{group}')

            with open(summary_file, 'w') as f:
                for line in sorted(output):
                    f.write(line + '\n')

        self.commit_package(product_dir)

        if os.path.isfile(reference_summary):
            return self.comment.handle_package_diff(project, reference_summary,
                                                    summary_file)
コード例 #17
0
 def _pkl_path(self):
     return CacheManager.directory('legal-auto')
コード例 #18
0
def update_project(apiurl, project, fixate=None):
    # Cache dir specific to hostname and project.
    host = urlparse(apiurl).hostname
    cache_dir = CacheManager.directory('update_repo_handler', host, project)
    repo_dir = os.path.join(cache_dir, '000update-repos')

    # development aid
    checkout = True
    if checkout:
        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        osc.core.checkout_package(apiurl,
                                  project,
                                  '000update-repos',
                                  expand_link=True,
                                  prj_dir=cache_dir)

    package = osc.core.Package(repo_dir)

    root = yaml.safe_load(open(os.path.join(repo_dir, 'config.yml')))
    if fixate:
        return fixate_target(root, package, fixate)

    for item in root:
        key = list(item)[0]
        opts = item[key]
        # cast 15.1 to string :)
        key = str(key)
        if not opts['url'].endswith('/'):
            opts['url'] += '/'

        if opts.get('refresh', False):
            opts['build'] = dump_solv_build(opts['url'])
            path = '{}_{}.packages'.format(key, opts['build'])
        else:
            path = key + '.packages'
        packages_file = os.path.join(repo_dir, path)

        if opts.get('refresh', False):
            oldfiles = target_files(repo_dir, key)
            if len(oldfiles) > 10:
                oldest = oldfiles[-1]
                if oldest.count('and_before') > 1:
                    raise Exception('The oldest is already a compated file')
                oldest = oldest.replace('.packages.xz', '_and_before.packages')
                oldest = oldest.replace('.packages.zst',
                                        '_and_before.packages')
                merge_susetags(oldest, oldfiles)
                for file in oldfiles:
                    os.unlink(file)
                    package.delete_file(os.path.basename(file))
                subprocess.check_call(['zstd', '-19', '--rm', oldest])
                package.addfile(os.path.basename(oldest) + ".zst")

        if os.path.exists(packages_file +
                          '.zst') or os.path.exists(packages_file + '.xz'):
            print(path, 'already exists')
            continue

        solv_file = packages_file + '.solv'
        dump_solv(solv_file, opts['url'])

        pool = solv.Pool()
        pool.setarch()

        if opts.get('refresh', False):
            for file in target_files(repo_dir, key):
                file_utils.add_susetags(pool, file)

        repo1 = pool.add_repo(''.join(
            random.choice(string.ascii_letters) for _ in range(5)))
        repo1.add_solv(solv_file)

        print_repo_delta(pool, repo1, open(packages_file, 'w'))
        subprocess.call(['zstd', '-19', '--rm', packages_file])
        os.unlink(solv_file)

        package.addfile(os.path.basename(path + '.zst'))
        del pool

    package.commit('Automatic update')
コード例 #19
0
    def update_and_solve_target(self, api, target_project, target_config, main_repo,
                                project, scope, force, no_checkout,
                                only_release_packages, stop_after_solve):
        self.all_architectures = target_config.get('pkglistgen-archs').split(' ')
        ignore_repos = set(target_config.get('pkglistgen-ignore_repos', '').split(' '))
        self.repos = [ r for r in self.expand_repos(project, main_repo) if r[0] != project or r[1] not in ignore_repos ]
        print('[{}] {}/{}: update and solve'.format(scope, project, main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release', '000release-packages')
        oldrepos = target_config.get('pkglistgen-repos', '000update-repos')

        url = api.makeurl(['source', project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not force:
            root = ET.fromstringlist(show_results_meta(api.apiurl, project, product,
                                                       repository=[main_repo], multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(project, product))
                return

        drop_list = api.item_exists(project, oldrepos)
        checkout_list = [group, product, release]
        if drop_list and not only_release_packages:
            checkout_list.append(oldrepos)

        if packages.find('entry[@name="{}"]'.format(release)) is None:
            if not self.dry_run:
                undelete_package(api.apiurl, project, release, 'revive')
            print('{} undeleted, skip dvd until next cycle'.format(release))
            return

        # Cache dir specific to hostname and project.
        host = urlparse(api.apiurl).hostname
        cache_dir = CacheManager.directory('pkglistgen', host, project)

        if not no_checkout:
            if os.path.exists(cache_dir):
                shutil.rmtree(cache_dir)
            os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)
        oldrepos_dir = os.path.join(cache_dir, oldrepos)

        for package in checkout_list:
            if no_checkout:
                print('Skipping checkout of {}/{}'.format(project, package))
                continue
            checkout_package(api.apiurl, project, package, expand_link=True, prj_dir=cache_dir)

        file_utils.unlink_all_except(release_dir, ['weakremovers.inc'])
        if not only_release_packages:
            file_utils.unlink_all_except(product_dir)
        file_utils.copy_directory_contents(group_dir, product_dir,
                                     ['supportstatus.txt', 'groups.yml',
                                      'reference-unsorted.yml', 'reference-summary.yml',
                                      'package-groups.changes'])
        file_utils.change_extension(product_dir, '.spec.in', '.spec')
        file_utils.change_extension(product_dir, '.product.in', '.product')

        self.input_dir = group_dir
        self.output_dir = product_dir

        print('-> do_update')
        # make sure we only calculcate existant architectures
        self.filter_architectures(target_archs(api.apiurl, project, main_repo))
        self.update_repos(self.filtered_architectures)

        if only_release_packages:
            self.load_all_groups()
            self.write_group_stubs()
        else:
            summary = self.solve_project(ignore_unresolvable=str2bool(target_config.get('pkglistgen-ignore-unresolvable')),
                                         ignore_recommended=str2bool(target_config.get('pkglistgen-ignore-recommended')),
                                         locale = target_config.get('pkglistgen-locale'),
                                         locales_from = target_config.get('pkglistgen-locales-from'))

        if stop_after_solve:
            return

        if drop_list:
            weakremovers_file = os.path.join(release_dir, 'weakremovers.inc')
            self.create_weakremovers(project, target_config, oldrepos_dir, output=open(weakremovers_file, 'w'))

        delete_products = target_config.get('pkglistgen-delete-products', '').split(' ')
        file_utils.unlink_list(product_dir, delete_products)

        print('-> product service')
        product_version = attribute_value_load(api.apiurl, project, 'ProductVersion')
        if not product_version:
            # for stagings the product version doesn't matter (I hope)
            product_version = '1'
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            self.replace_product_version(product_file, product_version)
            print(subprocess.check_output(
                [PRODUCT_SERVICE, product_file, product_dir, project], encoding='utf-8'))

        for delete_kiwi in target_config.get('pkglistgen-delete-kiwis-{}'.format(scope), '').split(' '):
            delete_kiwis = glob.glob(os.path.join(product_dir, delete_kiwi))
            file_utils.unlink_list(product_dir, delete_kiwis)
        if scope == 'staging':
            self.strip_medium_from_staging(product_dir)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        file_utils.move_list(spec_files, release_dir)
        inc_files = glob.glob(os.path.join(group_dir, '*.inc'))
        # filter special inc file
        inc_files = filter(lambda file: file.endswith('weakremovers.inc'), inc_files)
        file_utils.move_list(inc_files, release_dir)

        # do not overwrite weakremovers.inc if it exists
        # we will commit there afterwards if needed
        if os.path.exists(os.path.join(group_dir, 'weakremovers.inc')) and \
           not os.path.exists(os.path.join(release_dir, 'weakremovers.inc')):
            file_utils.move_list([os.path.join(group_dir, 'weakremovers.inc')], release_dir)

        file_utils.multibuild_from_glob(release_dir, '*.spec')
        self.build_stub(release_dir, 'spec')
        self.commit_package(release_dir)

        if only_release_packages:
            return

        file_utils.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        error_output = b''
        reference_summary = os.path.join(group_dir, 'reference-summary.yml')
        if os.path.isfile(reference_summary):
            summary_file = os.path.join(product_dir, 'summary.yml')
            with open(summary_file, 'w') as f:
                f.write('# Summary of packages in groups')
                for group in sorted(summary):
                    # the unsorted group should appear filtered by
                    # unneeded.yml - so we need the content of unsorted.yml
                    # not unsorted.group (this grew a little unnaturally)
                    if group == 'unsorted':
                        continue
                    f.write('\n' + group + ':\n')
                    for package in sorted(summary[group]):
                        f.write('  - ' + package + '\n')

            try:
                error_output += subprocess.check_output(['diff', '-u', reference_summary, summary_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output
            reference_unsorted = os.path.join(group_dir, 'reference-unsorted.yml')
            unsorted_file = os.path.join(product_dir, 'unsorted.yml')
            try:
                error_output += subprocess.check_output(['diff', '-u', reference_unsorted, unsorted_file])
            except subprocess.CalledProcessError as e:
                error_output += e.output

        if len(error_output) > 0:
            self.logger.error('Difference in yml:\n' + error_output.decode('utf-8'))
            return True
コード例 #20
0
from datetime import datetime
import fcntl
from functools import wraps
import os
from osclib.cache_manager import CacheManager
import shelve
try:
    import cPickle as pickle
except:
    import pickle

# Where the cache files are stored
CACHEDIR = CacheManager.directory('memoize')


def memoize(ttl=None, session=False, add_invalidate=False):
    """Decorator function to implement a persistent cache.

    >>> @memoize()
    ... def test_func(a):
    ...     return a

    Internally, the memoized function has a cache:

    >>> cache = [c.cell_contents for c in test_func.func_closure if 'sync' in dir(c.cell_contents)][0]
    >>> 'sync' in dir(cache)
    True

    There is a limit of the size of the cache

    >>> for k in cache: