Ejemplo n.º 1
0
def connection(app, request):
    """Session-wide test database."""
    # Connect and create the temporary database
    print "=" * 80
    print "CREATING TEMPORARY DATABASE FOR TESTS"
    print "=" * 80
    subprocess.call(['dropdb', DBNAME])
    subprocess.call(['createdb', DBNAME])

    # Bind and create the database tables
    _db.clear()
    engine_url = '%s/%s' % (BIND, DBNAME)

    db_engine = create_engine(engine_url, encoding='utf-8', poolclass=NullPool)

    # AKA models.start()
    _db.Session.bind = db_engine
    _db.metadata.bind = _db.Session.bind

    _db.Base.metadata.create_all(db_engine)
    _db.commit()
    _db.clear()

    #connection = db_engine.connect()

    def teardown():
        _db.Base.metadata.drop_all(db_engine)

    request.addfinalizer(teardown)

    # Slap our test app on it
    _db.app = app
    return _db
Ejemplo n.º 2
0
    def test_filter_binaries_by_sha1(self, session):
        models.Binary(
            'ceph-1.0.deb',
            self.p,
            ref='firefly',
            distro='ubuntu',
            distro_version='precise',
            arch='all',
            sha1="sha1",
            )
        models.Binary(
            'ceph-1.0.deb',
            self.p,
            ref='firefly',
            distro='ubuntu',
            distro_version='trusty',
            arch='all',
            sha1="head",
            )

        models.commit()
        result = util.get_extra_binaries(
            'ceph',
            'ubuntu',
            'trusty',
            distro_versions=['precise', 'trusty'],
            ref='firefly',
            sha1="sha1")
        assert len(result) == 1
Ejemplo n.º 3
0
def delete_repositories(repo_objects, lifespan, keep_minimum):
    logger.info('processing deletion for repos %s days and older', lifespan)
    if keep_minimum:
        logger.info('will keep at most %s repositories after purging', keep_minimum)
    else:
        logger.info('will not keep any repositories after purging is completed')

    for r in repo_objects:
        logger.info('repo %s is being processed for removal', r)
        for b in r.binaries:
            try:
                os.remove(b.path)
            except OSError as err:
                # no such file, ignore
                if err.errno == errno.ENOENT:
                    pass
            b.delete()
            models.flush()
        try:
            if r.path:
                shutil.rmtree(r.path)
        except OSError as err:
            # no such file, ignore
            if err.errno == errno.ENOENT:
                pass
            else:
                raise
        post_deleted(r)
        r.delete()
        models.commit()
Ejemplo n.º 4
0
def delete_repositories(repo_objects, lifespan, keep_minimum):
    logger.info('processing deletion for repos %s days and older', lifespan)
    if keep_minimum:
        logger.info('will keep at most %s repositories after purging',
                    keep_minimum)
    else:
        logger.info(
            'will not keep any repositories after purging is completed')

    for r in repo_objects:
        logger.info('repo %s is being processed for removal', r)
        for b in r.binaries:
            try:
                os.remove(b.path)
            except OSError as err:
                # no such file, ignore
                if err.errno == errno.ENOENT:
                    pass
            b.delete()
            models.flush()
        try:
            if r.path:
                shutil.rmtree(r.path)
        except OSError as err:
            # no such file, ignore
            if err.errno == errno.ENOENT:
                pass
            else:
                raise
        post_deleted(r)
        r.delete()
        models.commit()
Ejemplo n.º 5
0
def connection(app, request):
    """Session-wide test database."""
    # Connect and create the temporary database
    print "=" * 80
    print "CREATING TEMPORARY DATABASE FOR TESTS"
    print "=" * 80
    subprocess.call(['dropdb', DBNAME])
    subprocess.call(['createdb', DBNAME])

    # Bind and create the database tables
    _db.clear()
    engine_url = '%s/%s' % (BIND, DBNAME)

    db_engine = create_engine(
        engine_url,
        encoding='utf-8',
        poolclass=NullPool)

    # AKA models.start()
    _db.Session.bind = db_engine
    _db.metadata.bind = _db.Session.bind

    _db.Base.metadata.create_all(db_engine)
    _db.commit()
    _db.clear()

    def teardown():
        _db.Base.metadata.drop_all(db_engine)

    request.addfinalizer(teardown)

    # Slap our test app on it
    _db.app = app
    return _db
Ejemplo n.º 6
0
    def test_ref_matches_binaries_from_distro_versions(self, session):
        models.Binary(
            'ceph-1.0.deb',
            self.p,
            ref='firefly',
            distro='ubuntu',
            distro_version='precise',
            arch='all',
            )
        models.Binary(
            'ceph-1.0.deb',
            self.p,
            ref='firefly',
            distro='ubuntu',
            distro_version='trusty',
            arch='all',
            )

        models.commit()
        result = util.get_extra_binaries(
            'ceph',
            'ubuntu',
            'trusty',
            distro_versions=['precise', 'trusty'],
            ref='firefly')
        assert len(result) == 2
Ejemplo n.º 7
0
 def test_no_matching_ref_with_specific_ref(self, session):
     models.commit()
     result = util.get_extra_binaries('ceph',
                                      'ubuntu',
                                      'precise',
                                      ref='master')
     assert result == []
Ejemplo n.º 8
0
def poll_repos():
    """
    Poll the repository objects that need to be updated and call the tasks
    that can create (or update) repositories with that information

    """
    logger.info('polling repos....')
    for r in models.Repo.query.filter_by(needs_update=True).all():
        # this repo is being processed, do not pile up and try to get it
        # processed again until it is done doing work
        if r.is_updating:
            continue
        if r.needs_update:
            logger.info("repo %s needs to be updated/created", r)
            if r.type == 'rpm':
                create_rpm_repo.apply_async(
                    (r.id,),
                    countdown=pecan.conf.quiet_time)
            elif r.type == 'deb':
                create_deb_repo.apply_async(
                    (r.id,),
                    countdown=pecan.conf.quiet_time)
            else:
                _type = r.infer_type()
                if _type is None:
                    logger.warning('failed to infer repository type')
                    logger.warning('got a repository with an unknown type: %s', r)
                else:
                    logger.warning('inferred repo type as: %s', _type)
                    r.type = _type
                    models.commit()

    logger.info('completed repo polling')
Ejemplo n.º 9
0
def create_rpm_repo(repo_id):
    """
    Go create or update repositories with specific IDs.
    """
    directories = ['SRPMS', 'noarch', 'x86_64']
    # get the root path for storing repos
    # TODO: Is it possible we can get an ID that doesn't exist anymore?
    repo = models.Repo.get(repo_id)
    logger.info("processing repository: %s", repo)

    # Determine paths for this repository
    paths = util.repo_paths(repo)
    repo_dirs = [os.path.join(paths['absolute'], d) for d in directories]

    # Before doing work that might take very long to complete, set the repo
    # path in the object and mark needs_update as False
    repo.path = paths['absolute']
    repo.needs_update = False
    models.commit()

    # this is safe to do, behind the scenes it is just trying to create them if
    # they don't exist and it will include the 'absolute' path
    for d in repo_dirs:
        util.makedirs(d)

    # now that structure is done, we need to symlink the RPMs that belong
    # to this repo so that we can create the metadata.
    conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref)
    extra_binaries = []
    for project_name, project_refs in conf_extra_repos.items():
        for ref in project_refs:
            extra_binaries += util.get_extra_binaries(
                project_name,
                repo.distro,
                repo.distro_version,
                ref=ref if ref != 'all' else None
            )

    all_binaries = extra_binaries + [b for b in repo.binaries]
    for binary in all_binaries:
        source = binary.path
        arch_directory = util.infer_arch_directory(binary.name)
        destination_dir = os.path.join(paths['absolute'], arch_directory)
        destination = os.path.join(destination_dir, binary.name)
        try:
            if not os.path.exists(destination):
                os.symlink(source, destination)
        except OSError:
            logger.exception('could not symlink')

    for d in repo_dirs:
        subprocess.check_call(['createrepo', d])
Ejemplo n.º 10
0
 def run(self, args):
     super(PopulateCommand, self).run(args)
     out("LOADING ENVIRONMENT")
     self.load_app()
     out("BUILDING SCHEMA")
     try:
         out("STARTING A TRANSACTION...")
         models.start()
         models.Base.metadata.create_all(conf.sqlalchemy.engine)
     except:
         models.rollback()
         out("ROLLING BACK... ")
         raise
     else:
         out("COMMITING... ")
         models.commit()
Ejemplo n.º 11
0
    def test_no_ref_matches_binaries(self, session):
        models.Binary(
            'ceph-1.1.deb',
            self.p,
            distro='ubuntu',
            distro_version='trusty',
            arch='all',
            )
        models.Binary(
            'ceph-1.0.deb',
            self.p,
            distro='ubuntu',
            distro_version='trusty',
            arch='all',
            )

        models.commit()
        result = util.get_extra_binaries('ceph', 'ubuntu', 'trusty')
        assert len(result) == 2
Ejemplo n.º 12
0
 def run(self, args):
     super(PopulateCommand, self).run(args)
     out("LOADING ENVIRONMENT")
     self.load_app()
     out("BUILDING SCHEMA")
     try:
         out("STARTING A TRANSACTION...")
         models.start()
         models.Base.metadata.create_all(conf.sqlalchemy.engine)
     except:
         models.rollback()
         out("ROLLING BACK... ")
         raise
     else:
         out("COMMITING... ")
         models.commit()
         out("STAMPING INITIAL STATE WITH ALEMBIC... ")
         alembic_cfg = Config(get_alembic_config())
         command.stamp(alembic_cfg, "head")
Ejemplo n.º 13
0
 def run(self, args):
     super(PopulateCommand, self).run(args)
     out("LOADING ENVIRONMENT")
     self.load_app()
     out("BUILDING SCHEMA")
     try:
         out("STARTING A TRANSACTION...")
         models.start()
         models.Base.metadata.create_all(conf.sqlalchemy.engine)
     except:
         models.rollback()
         out("ROLLING BACK... ")
         raise
     else:
         out("COMMITING... ")
         models.commit()
         out("STAMPING INITIAL STATE WITH ALEMBIC... ")
         alembic_cfg = Config(get_alembic_config())
         command.stamp(alembic_cfg, "head")
Ejemplo n.º 14
0
def poll_repos():
    """
    Poll the repository objects that need to be updated and call the tasks
    that can create (or update) repositories with that information

    """
    logger.info('polling repos....')
    for r in models.Repo.query.filter_by(needs_update=True,
                                         is_queued=False).all():
        # this repo is being processed, do not pile up and try to get it
        # processed again until it is done doing work
        if r.is_updating:
            continue
        if r.needs_update:
            logger.info("repo %s needs to be updated/created", r)
            r.is_queued = True
            post_queued(r)
            if r.type == 'rpm':
                rpm.create_rpm_repo.apply_async(
                    (r.id, ),
                    countdown=pecan.conf.quiet_time,
                    queue='build_repos',
                )
            elif r.type == 'deb':
                debian.create_deb_repo.apply_async(
                    (r.id, ),
                    countdown=pecan.conf.quiet_time,
                    queue='build_repos',
                )
            else:
                _type = r.infer_type()
                if _type is None:
                    logger.warning('failed to infer repository type')
                    logger.warning('got a repository with an unknown type: %s',
                                   r)
                else:
                    logger.warning('inferred repo type as: %s', _type)
                    r.type = _type

            models.commit()

    logger.info('completed repo polling')
Ejemplo n.º 15
0
def create_deb_repo(repo_id):
    """
    Go create or update repositories with specific IDs.
    """
    # get the root path for storing repos
    # TODO: Is it possible we can get an ID that doesn't exist anymore?
    repo = models.Repo.get(repo_id)
    timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name)
    counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name)
    timer.start()
    post_building(repo)
    logger.info("processing repository: %s", repo)
    if util.repository_is_disabled(repo.project.name):
        logger.info("will not process repository: %s", repo)
        repo.needs_update = False
        repo.is_queued = False
        models.commit()
        return

    # Determine paths for this repository
    paths = util.repo_paths(repo)

    # Before doing work that might take very long to complete, set the repo
    # path in the object, mark needs_update as False, and mark it as being
    # updated so we prevent piling up if other binaries are being posted
    repo.path = paths['absolute']
    repo.is_updating = True
    repo.is_queued = False
    repo.needs_update = False
    models.commit()

    # determine if other repositories might need to be queried to add extra
    # binaries (repos are tied to binaries which are all related with  refs,
    # archs, distros, and distro versions.
    conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref)
    combined_versions = util.get_combined_repos(repo.project.name)
    extra_binaries = []

    # See if there are any generic/universal binaries so that they can be
    # automatically added from the current project
    for binary in util.get_extra_binaries(
            repo.project.name,
            repo.distro,
            None,
            distro_versions=['generic', 'universal', 'any'],
            ref=repo.ref,
            sha1=repo.sha1):
        extra_binaries.append(binary)

    for project_name, project_refs in conf_extra_repos.items():
        for ref in project_refs:
            logger.info('fetching binaries for project: %s, ref: %s',
                        project_name, ref)
            found_binaries = util.get_extra_binaries(
                project_name,
                None,
                repo.distro_version,
                distro_versions=combined_versions,
                ref=ref if ref != 'all' else None)
            extra_binaries += found_binaries

            # See if there are any generic/universal binaries so that they can be
            # automatically added from projects coming from extra repos
            for binary in util.get_extra_binaries(
                    project_name,
                    repo.distro,
                    None,
                    distro_versions=['generic', 'universal', 'any'],
                    ref=ref if ref != 'all' else None):
                extra_binaries.append(binary)

    # check for the option to 'combine' repositories with different
    # debian/ubuntu versions
    for distro_version in combined_versions:
        logger.info('fetching distro_version %s for project: %s',
                    distro_version, repo.project.name)
        # When combining distro_versions we cannot filter by distribution as
        # well, otherwise it will be an impossible query. E.g. "get wheezy,
        # precise and trusty but only for the Ubuntu distro"
        extra_binaries += util.get_extra_binaries(repo.project.name,
                                                  None,
                                                  distro_version,
                                                  ref=repo.ref,
                                                  sha1=repo.sha1)

    # try to create the absolute path to the repository if it doesn't exist
    util.makedirs(paths['absolute'])

    all_binaries = extra_binaries + [b for b in repo.binaries]
    timer.intermediate('collection')

    for binary in set(all_binaries):
        # XXX This is really not a good alternative but we are not going to be
        # using .changes for now although we can store it.
        if binary.extension == 'changes':
            continue
        try:
            commands = util.reprepro_commands(
                paths['absolute'],
                binary,
                distro_versions=combined_versions,
                fallback_version=repo.distro_version)
        except KeyError:  # probably a tar.gz or similar file that should not be added directly
            continue
        for command in commands:
            logger.info('running command: %s', ' '.join(command))
            result = subprocess.Popen(command,
                                      stderr=subprocess.PIPE,
                                      stdout=subprocess.PIPE)
            if result.returncode > 0:
                logger.error('failed to add binary %s', binary.name)
            stdout, stderr = result.communicate()
            for line in stdout.split('\n'):
                logger.info(line)
            for line in stderr.split('\n'):
                logger.warning(line)

    logger.info("finished processing repository: %s", repo)
    repo.is_updating = False
    models.commit()
    timer.stop()
    counter += 1
    post_ready(repo)
Ejemplo n.º 16
0
 def test_no_matching_ref_with_specific_ref(self, session):
     models.commit()
     result = util.get_extra_binaries(
         'ceph', 'ubuntu', 'precise', ref='master')
     assert result == []
Ejemplo n.º 17
0
def create_rpm_repo(repo_id):
    """
    Go create or update repositories with specific IDs.
    """
    directories = ['SRPMS', 'noarch', 'x86_64', 'aarch64']
    # get the root path for storing repos
    # TODO: Is it possible we can get an ID that doesn't exist anymore?
    repo = models.Repo.get(repo_id)
    post_building(repo)
    timer = Timer(__name__, suffix="create.rpm.%s" % repo.metric_name)
    counter = Counter(__name__, suffix="create.rpm.%s" % repo.metric_name)
    timer.start()
    logger.info("processing repository: %s", repo)
    if util.repository_is_disabled(repo.project.name):
        logger.info("will not process repository: %s", repo)
        repo.needs_update = False
        repo.is_queued = False
        return

    # Determine paths for this repository
    paths = util.repo_paths(repo)
    repo_dirs = [os.path.join(paths['absolute'], d) for d in directories]

    # Before doing work that might take very long to complete, set the repo
    # path in the object and mark needs_update as False
    repo.path = paths['absolute']
    repo.is_updating = True
    repo.is_queued = False
    repo.needs_update = False
    models.commit()

    # this is safe to do, behind the scenes it is just trying to create them if
    # they don't exist and it will include the 'absolute' path
    for d in repo_dirs:
        util.makedirs(d)

    # now that structure is done, we need to symlink the RPMs that belong
    # to this repo so that we can create the metadata.
    conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref)
    extra_binaries = []
    for project_name, project_refs in conf_extra_repos.items():
        for ref in project_refs:
            extra_binaries += util.get_extra_binaries(
                project_name,
                repo.distro,
                repo.distro_version,
                ref=ref if ref != 'all' else None)

    all_binaries = extra_binaries + [b for b in repo.binaries]
    timer.intermediate('collection')
    for binary in all_binaries:
        source = binary.path
        arch_directory = util.infer_arch_directory(binary.name)
        destination_dir = os.path.join(paths['absolute'], arch_directory)
        destination = os.path.join(destination_dir, binary.name)
        try:
            if not os.path.exists(destination):
                os.symlink(source, destination)
        except OSError:
            logger.exception('could not symlink')

    for d in repo_dirs:
        subprocess.check_call(['createrepo', d])

    logger.info("finished processing repository: %s", repo)
    repo.is_updating = False
    models.commit()
    timer.stop()
    counter += 1
    post_ready(repo)
Ejemplo n.º 18
0
def create_deb_repo(repo_id):
    """
    Go create or update repositories with specific IDs.
    """
    # get the root path for storing repos
    # TODO: Is it possible we can get an ID that doesn't exist anymore?
    repo = models.Repo.get(repo_id)
    timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name)
    counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name)
    timer.start()
    post_building(repo)
    logger.info("processing repository: %s", repo)
    if util.repository_is_disabled(repo.project.name):
        logger.info("will not process repository: %s", repo)
        repo.needs_update = False
        repo.is_queued = False
        models.commit()
        return

    # Determine paths for this repository
    paths = util.repo_paths(repo)

    # Before doing work that might take very long to complete, set the repo
    # path in the object, mark needs_update as False, and mark it as being
    # updated so we prevent piling up if other binaries are being posted
    repo.path = paths['absolute']
    repo.is_updating = True
    repo.is_queued = False
    repo.needs_update = False
    models.commit()

    # determine if other repositories might need to be queried to add extra
    # binaries (repos are tied to binaries which are all related with  refs,
    # archs, distros, and distro versions.
    conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref)
    combined_versions = util.get_combined_repos(repo.project.name)
    extra_binaries = []

    # See if there are any generic/universal binaries so that they can be
    # automatically added from the current project
    for binary in util.get_extra_binaries(
            repo.project.name,
            repo.distro,
            None,
            distro_versions=['generic', 'universal', 'any'],
            ref=repo.ref,
            sha1=repo.sha1):
        extra_binaries.append(binary)

    for project_name, project_refs in conf_extra_repos.items():
        for ref in project_refs:
            logger.info('fetching binaries for project: %s, ref: %s', project_name, ref)
            found_binaries = util.get_extra_binaries(
                project_name,
                None,
                repo.distro_version,
                distro_versions=combined_versions,
                ref=ref if ref != 'all' else None
            )
            extra_binaries += found_binaries

            # See if there are any generic/universal binaries so that they can be
            # automatically added from projects coming from extra repos
            for binary in util.get_extra_binaries(
                    project_name,
                    repo.distro,
                    None,
                    distro_versions=['generic', 'universal', 'any'],
                    ref=ref if ref != 'all' else None):
                extra_binaries.append(binary)

    # check for the option to 'combine' repositories with different
    # debian/ubuntu versions
    for distro_version in combined_versions:
        logger.info(
            'fetching distro_version %s for project: %s',
            distro_version,
            repo.project.name
        )
        # When combining distro_versions we cannot filter by distribution as
        # well, otherwise it will be an impossible query. E.g. "get wheezy,
        # precise and trusty but only for the Ubuntu distro"
        extra_binaries += util.get_extra_binaries(
            repo.project.name,
            None,
            distro_version,
            ref=repo.ref,
            sha1=repo.sha1
        )

    # try to create the absolute path to the repository if it doesn't exist
    util.makedirs(paths['absolute'])

    all_binaries = extra_binaries + [b for b in repo.binaries]
    timer.intermediate('collection')

    for binary in set(all_binaries):
        # XXX This is really not a good alternative but we are not going to be
        # using .changes for now although we can store it.
        if binary.extension == 'changes':
            continue
        try:
            commands = util.reprepro_commands(
                paths['absolute'],
                binary,
                distro_versions=combined_versions,
                fallback_version=repo.distro_version
            )
        except KeyError:  # probably a tar.gz or similar file that should not be added directly
            continue
        for command in commands:
            logger.info('running command: %s', ' '.join(command))
            result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
            if result.returncode > 0:
                logger.error('failed to add binary %s', binary.name)
            stdout, stderr = result.communicate()
            for line in stdout.split('\n'):
                logger.info(line)
            for line in stderr.split('\n'):
                logger.warning(line)

    logger.info("finished processing repository: %s", repo)
    repo.is_updating = False
    models.commit()
    timer.stop()
    counter += 1
    post_ready(repo)
Ejemplo n.º 19
0
def create_deb_repo(repo_id):
    """
    Go create or update repositories with specific IDs.
    """
    # get the root path for storing repos
    # TODO: Is it possible we can get an ID that doesn't exist anymore?
    repo = models.Repo.get(repo_id)
    logger.info("processing repository: %s", repo)

    # Determine paths for this repository
    paths = util.repo_paths(repo)

    # determine if other repositories might need to be queried to add extra
    # binaries (repos are tied to binaries which are all related with  refs,
    # archs, distros, and distro versions.
    conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref)
    combined_versions = util.get_combined_repos(repo.project.name)
    extra_binaries = []
    for project_name, project_refs in conf_extra_repos.items():
        for ref in project_refs:
            logger.info('fetching binaries for project: %s, ref: %s', project_name, ref)
            found_binaries = util.get_extra_binaries(
                project_name,
                None,
                repo.distro_version,
                distro_versions=combined_versions,
                ref=ref if ref != 'all' else None
            )
            extra_binaries += found_binaries

    # check for the option to 'combine' repositories with different
    # debian/ubuntu versions
    for distro_version in combined_versions:
        logger.info(
            'fetching distro_version %s for project: %s',
            distro_version,
            repo.project.name
        )
        # When combining distro_versions we cannot filter by distribution as
        # well, otherwise it will be an impossible query. E.g. "get wheezy,
        # precise and trusty but only for the Ubuntu distro"
        extra_binaries += util.get_extra_binaries(
            repo.project.name,
            None,
            distro_version,
            ref=repo.ref
        )

    # try to create the absolute path to the repository if it doesn't exist
    util.makedirs(paths['absolute'])

    all_binaries = extra_binaries + [b for b in repo.binaries]

    for binary in all_binaries:
        # XXX This is really not a good alternative but we are not going to be
        # using .changes for now although we can store it.
        if binary.extension == 'changes':
            continue
        try:
            command = util.reprepro_command(paths['absolute'], binary)
        except KeyError:  # probably a tar.gz or similar file that should not be added directly
            continue
        try:
            logger.info('running command: %s', ' '.join(command))
        except TypeError:
            logger.exception('was not able to add binary: %s', binary)
            continue
        else:
            try:
                subprocess.check_call(command)
            except subprocess.CalledProcessError:
                logger.exception('failed to add binary %s', binary.name)

    # Finally, set the repo path in the object and mark needs_update as False
    repo.path = paths['absolute']
    repo.needs_update = False
    models.commit()