def test_ref_matches_binaries_from_distro_versions(self, session): models.Binary( 'ceph-1.0.deb', self.p, ref='firefly', distro='ubuntu', distro_version='precise', arch='all', ) models.Binary( 'ceph-1.0.deb', self.p, ref='firefly', distro='ubuntu', distro_version='trusty', arch='all', ) models.commit() result = util.get_extra_binaries( 'ceph', 'ubuntu', 'trusty', distro_versions=['precise', 'trusty'], ref='firefly') assert len(result) == 2
def test_filter_binaries_by_sha1(self, session): models.Binary( 'ceph-1.0.deb', self.p, ref='firefly', distro='ubuntu', distro_version='precise', arch='all', sha1="sha1", ) models.Binary( 'ceph-1.0.deb', self.p, ref='firefly', distro='ubuntu', distro_version='trusty', arch='all', sha1="head", ) models.commit() result = util.get_extra_binaries( 'ceph', 'ubuntu', 'trusty', distro_versions=['precise', 'trusty'], ref='firefly', sha1="sha1") assert len(result) == 1
def create_rpm_repo(repo_id): """ Go create or update repositories with specific IDs. """ directories = ['SRPMS', 'noarch', 'x86_64'] # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) logger.info("processing repository: %s", repo) # Determine paths for this repository paths = util.repo_paths(repo) repo_dirs = [os.path.join(paths['absolute'], d) for d in directories] # Before doing work that might take very long to complete, set the repo # path in the object and mark needs_update as False repo.path = paths['absolute'] repo.needs_update = False models.commit() # this is safe to do, behind the scenes it is just trying to create them if # they don't exist and it will include the 'absolute' path for d in repo_dirs: util.makedirs(d) # now that structure is done, we need to symlink the RPMs that belong # to this repo so that we can create the metadata. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: extra_binaries += util.get_extra_binaries( project_name, repo.distro, repo.distro_version, ref=ref if ref != 'all' else None ) all_binaries = extra_binaries + [b for b in repo.binaries] for binary in all_binaries: source = binary.path arch_directory = util.infer_arch_directory(binary.name) destination_dir = os.path.join(paths['absolute'], arch_directory) destination = os.path.join(destination_dir, binary.name) try: if not os.path.exists(destination): os.symlink(source, destination) except OSError: logger.exception('could not symlink') for d in repo_dirs: subprocess.check_call(['createrepo', d])
def test_no_ref_matches_binaries(self, session): models.Binary( 'ceph-1.1.deb', self.p, distro='ubuntu', distro_version='trusty', arch='all', ) models.Binary( 'ceph-1.0.deb', self.p, distro='ubuntu', distro_version='trusty', arch='all', ) models.commit() result = util.get_extra_binaries('ceph', 'ubuntu', 'trusty') assert len(result) == 2
def test_ref_matches_binaries(self, session): models.Binary( 'ceph-1.1.deb', self.p, ref='firefly', distro='ubuntu', distro_version='trusty', arch='all', ) models.Binary( 'ceph-1.0.deb', self.p, ref='master', distro='ubuntu', distro_version='trusty', arch='all', ) models.commit() result = util.get_extra_binaries('ceph', 'ubuntu', 'trusty', ref='master') assert len(result) == 1
def test_no_matching_ref_with_specific_ref(self, session): models.commit() result = util.get_extra_binaries( 'ceph', 'ubuntu', 'precise', ref='master') assert result == []
def test_no_project(self, session): result = util.get_extra_binaries('f', 'ubuntu', 'precise') assert result == []
def create_rpm_repo(repo_id): """ Go create or update repositories with specific IDs. """ directories = ['SRPMS', 'noarch', 'x86_64', 'aarch64'] # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) post_building(repo) timer = Timer(__name__, suffix="create.rpm.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.rpm.%s" % repo.metric_name) timer.start() logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False return # Determine paths for this repository paths = util.repo_paths(repo) repo_dirs = [os.path.join(paths['absolute'], d) for d in directories] # Before doing work that might take very long to complete, set the repo # path in the object and mark needs_update as False repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # this is safe to do, behind the scenes it is just trying to create them if # they don't exist and it will include the 'absolute' path for d in repo_dirs: util.makedirs(d) # now that structure is done, we need to symlink the RPMs that belong # to this repo so that we can create the metadata. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: extra_binaries += util.get_extra_binaries( project_name, repo.distro, repo.distro_version, ref=ref if ref != 'all' else None) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in all_binaries: source = binary.path arch_directory = util.infer_arch_directory(binary.name) destination_dir = os.path.join(paths['absolute'], arch_directory) destination = os.path.join(destination_dir, binary.name) try: if not os.path.exists(destination): os.symlink(source, destination) except OSError: logger.exception('could not symlink') for d in repo_dirs: subprocess.check_call(['createrepo', d]) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name) timer.start() post_building(repo) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False models.commit() return # Determine paths for this repository paths = util.repo_paths(repo) # Before doing work that might take very long to complete, set the repo # path in the object, mark needs_update as False, and mark it as being # updated so we prevent piling up if other binaries are being posted repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] # See if there are any generic/universal binaries so that they can be # automatically added from the current project for binary in util.get_extra_binaries( repo.project.name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=repo.ref, sha1=repo.sha1): extra_binaries.append(binary) for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None ) extra_binaries += found_binaries # See if there are any generic/universal binaries so that they can be # automatically added from projects coming from extra repos for binary in util.get_extra_binaries( project_name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=ref if ref != 'all' else None): extra_binaries.append(binary) # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info( 'fetching distro_version %s for project: %s', distro_version, repo.project.name ) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries( repo.project.name, None, distro_version, ref=repo.ref, sha1=repo.sha1 ) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in set(all_binaries): # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: commands = util.reprepro_commands( paths['absolute'], binary, distro_versions=combined_versions, fallback_version=repo.distro_version ) except KeyError: # probably a tar.gz or similar file that should not be added directly continue for command in commands: logger.info('running command: %s', ' '.join(command)) result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: logger.error('failed to add binary %s', binary.name) stdout, stderr = result.communicate() for line in stdout.split('\n'): logger.info(line) for line in stderr.split('\n'): logger.warning(line) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) logger.info("processing repository: %s", repo) # Determine paths for this repository paths = util.repo_paths(repo) # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None ) extra_binaries += found_binaries # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info( 'fetching distro_version %s for project: %s', distro_version, repo.project.name ) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries( repo.project.name, None, distro_version, ref=repo.ref ) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] for binary in all_binaries: # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: command = util.reprepro_command(paths['absolute'], binary) except KeyError: # probably a tar.gz or similar file that should not be added directly continue try: logger.info('running command: %s', ' '.join(command)) except TypeError: logger.exception('was not able to add binary: %s', binary) continue else: try: subprocess.check_call(command) except subprocess.CalledProcessError: logger.exception('failed to add binary %s', binary.name) # Finally, set the repo path in the object and mark needs_update as False repo.path = paths['absolute'] repo.needs_update = False models.commit()
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name) timer.start() post_building(repo) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False models.commit() return # Determine paths for this repository paths = util.repo_paths(repo) # Before doing work that might take very long to complete, set the repo # path in the object, mark needs_update as False, and mark it as being # updated so we prevent piling up if other binaries are being posted repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] # See if there are any generic/universal binaries so that they can be # automatically added from the current project for binary in util.get_extra_binaries( repo.project.name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=repo.ref, sha1=repo.sha1): extra_binaries.append(binary) for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None) extra_binaries += found_binaries # See if there are any generic/universal binaries so that they can be # automatically added from projects coming from extra repos for binary in util.get_extra_binaries( project_name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=ref if ref != 'all' else None): extra_binaries.append(binary) # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info('fetching distro_version %s for project: %s', distro_version, repo.project.name) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries(repo.project.name, None, distro_version, ref=repo.ref, sha1=repo.sha1) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in set(all_binaries): # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: commands = util.reprepro_commands( paths['absolute'], binary, distro_versions=combined_versions, fallback_version=repo.distro_version) except KeyError: # probably a tar.gz or similar file that should not be added directly continue for command in commands: logger.info('running command: %s', ' '.join(command)) result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: logger.error('failed to add binary %s', binary.name) stdout, stderr = result.communicate() for line in stdout.split('\n'): logger.info(line) for line in stderr.split('\n'): logger.warning(line) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)