def test_unconfigured_with_repos_explicitly_disabled(self): pecan.conf.disable_unconfigured_repos = True pecan.conf.repos = {'bar': {'disabled': False}} assert util.repository_is_disabled('bar') is False
def test_repo_configured_but_not_project(self): pecan.conf.repos = {'foo': {'disabled': True}} assert util.repository_is_disabled('bar') is False
def test_project_explicitly_disabled(self): pecan.conf.repos = {'foo': {'disabled': True}} assert util.repository_is_disabled('foo') is True
def test_unconfigured_repo_with_disabled_repos(self): pecan.conf.disable_unconfigured_repos = True pecan.conf.repos = {'bar': {'disabled': True}} assert util.repository_is_disabled('foo') is True
def test_nothing_is_configured(self): assert util.repository_is_disabled('foo') is False
def create_rpm_repo(repo_id): """ Go create or update repositories with specific IDs. """ directories = ['SRPMS', 'noarch', 'x86_64', 'aarch64'] # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) post_building(repo) timer = Timer(__name__, suffix="create.rpm.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.rpm.%s" % repo.metric_name) timer.start() logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False return # Determine paths for this repository paths = util.repo_paths(repo) repo_dirs = [os.path.join(paths['absolute'], d) for d in directories] # Before doing work that might take very long to complete, set the repo # path in the object and mark needs_update as False repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # this is safe to do, behind the scenes it is just trying to create them if # they don't exist and it will include the 'absolute' path for d in repo_dirs: util.makedirs(d) # now that structure is done, we need to symlink the RPMs that belong # to this repo so that we can create the metadata. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: extra_binaries += util.get_extra_binaries( project_name, repo.distro, repo.distro_version, ref=ref if ref != 'all' else None) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in all_binaries: source = binary.path arch_directory = util.infer_arch_directory(binary.name) destination_dir = os.path.join(paths['absolute'], arch_directory) destination = os.path.join(destination_dir, binary.name) try: if not os.path.exists(destination): os.symlink(source, destination) except OSError: logger.exception('could not symlink') for d in repo_dirs: subprocess.check_call(['createrepo', d]) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name) timer.start() post_building(repo) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False models.commit() return # Determine paths for this repository paths = util.repo_paths(repo) # Before doing work that might take very long to complete, set the repo # path in the object, mark needs_update as False, and mark it as being # updated so we prevent piling up if other binaries are being posted repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] # See if there are any generic/universal binaries so that they can be # automatically added from the current project for binary in util.get_extra_binaries( repo.project.name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=repo.ref, sha1=repo.sha1): extra_binaries.append(binary) for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None ) extra_binaries += found_binaries # See if there are any generic/universal binaries so that they can be # automatically added from projects coming from extra repos for binary in util.get_extra_binaries( project_name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=ref if ref != 'all' else None): extra_binaries.append(binary) # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info( 'fetching distro_version %s for project: %s', distro_version, repo.project.name ) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries( repo.project.name, None, distro_version, ref=repo.ref, sha1=repo.sha1 ) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in set(all_binaries): # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: commands = util.reprepro_commands( paths['absolute'], binary, distro_versions=combined_versions, fallback_version=repo.distro_version ) except KeyError: # probably a tar.gz or similar file that should not be added directly continue for command in commands: logger.info('running command: %s', ' '.join(command)) result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: logger.error('failed to add binary %s', binary.name) stdout, stderr = result.communicate() for line in stdout.split('\n'): logger.info(line) for line in stderr.split('\n'): logger.warning(line) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name) timer.start() post_building(repo) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False models.commit() return # Determine paths for this repository paths = util.repo_paths(repo) # Before doing work that might take very long to complete, set the repo # path in the object, mark needs_update as False, and mark it as being # updated so we prevent piling up if other binaries are being posted repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] # See if there are any generic/universal binaries so that they can be # automatically added from the current project for binary in util.get_extra_binaries( repo.project.name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=repo.ref, sha1=repo.sha1): extra_binaries.append(binary) for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None) extra_binaries += found_binaries # See if there are any generic/universal binaries so that they can be # automatically added from projects coming from extra repos for binary in util.get_extra_binaries( project_name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=ref if ref != 'all' else None): extra_binaries.append(binary) # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info('fetching distro_version %s for project: %s', distro_version, repo.project.name) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries(repo.project.name, None, distro_version, ref=repo.ref, sha1=repo.sha1) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in set(all_binaries): # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: commands = util.reprepro_commands( paths['absolute'], binary, distro_versions=combined_versions, fallback_version=repo.distro_version) except KeyError: # probably a tar.gz or similar file that should not be added directly continue for command in commands: logger.info('running command: %s', ' '.join(command)) result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: logger.error('failed to add binary %s', binary.name) stdout, stderr = result.communicate() for line in stdout.split('\n'): logger.info(line) for line in stderr.split('\n'): logger.warning(line) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_rpm_repo(repo_id): """ Go create or update repositories with specific IDs. """ directories = ['SRPMS', 'noarch', 'x86_64'] # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False return # Determine paths for this repository paths = util.repo_paths(repo) repo_dirs = [os.path.join(paths['absolute'], d) for d in directories] # Before doing work that might take very long to complete, set the repo # path in the object and mark needs_update as False repo.path = paths['absolute'] repo.is_updating = True repo.needs_update = False models.commit() # this is safe to do, behind the scenes it is just trying to create them if # they don't exist and it will include the 'absolute' path for d in repo_dirs: util.makedirs(d) # now that structure is done, we need to symlink the RPMs that belong # to this repo so that we can create the metadata. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: extra_binaries += util.get_extra_binaries( project_name, repo.distro, repo.distro_version, ref=ref if ref != 'all' else None ) all_binaries = extra_binaries + [b for b in repo.binaries] for binary in all_binaries: source = binary.path arch_directory = util.infer_arch_directory(binary.name) destination_dir = os.path.join(paths['absolute'], arch_directory) destination = os.path.join(destination_dir, binary.name) try: if not os.path.exists(destination): os.symlink(source, destination) except OSError: logger.exception('could not symlink') for d in repo_dirs: subprocess.check_call(['createrepo', d]) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit()