def update(self): if request.method == 'HEAD': return {} if request.method != 'POST': error('/errors/not_allowed', 'only POST request are accepted for this url') if self.repo_obj.type == 'raw': # raw repos need no asynch construction. Create # the paths, symlink the binaries, mark them ready. self.repo_obj.path = util.repo_paths(self.repo_obj)['absolute'] util.makedirs(self.repo_obj.path) for binary in self.repo_obj.binaries: src = binary.path dest = os.path.join(self.repo_obj.path, os.path.join(binary.arch, binary.name)) try: if not os.path.exists(dest): os.symlink(src, dest) except OSError: logger.exception( f'could not symlink raw binary {src} -> {dest}') self.repo_obj.needs_update = False asynch.post_ready(self.repo_obj) else: # Just mark the repo so that celery picks it up self.repo_obj.needs_update = True self.repo_obj.is_updating = False self.repo_obj.is_queued = False asynch.post_requested(self.repo_obj) return self.repo_obj
def test_custom_flavor(self): self.repo = models.Repo( models.Project('ceph'), 'master', 'centos', 'el7', flavor='wakawaka', ) pecan.conf.repos_root = '/tmp/repos' result = util.repo_paths(self.repo)['absolute'] assert result == '/tmp/repos/ceph/master/head/centos/el7/flavors/wakawaka'
def create_rpm_repo(repo_id): """ Go create or update repositories with specific IDs. """ directories = ['SRPMS', 'noarch', 'x86_64'] # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) logger.info("processing repository: %s", repo) # Determine paths for this repository paths = util.repo_paths(repo) repo_dirs = [os.path.join(paths['absolute'], d) for d in directories] # Before doing work that might take very long to complete, set the repo # path in the object and mark needs_update as False repo.path = paths['absolute'] repo.needs_update = False models.commit() # this is safe to do, behind the scenes it is just trying to create them if # they don't exist and it will include the 'absolute' path for d in repo_dirs: util.makedirs(d) # now that structure is done, we need to symlink the RPMs that belong # to this repo so that we can create the metadata. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: extra_binaries += util.get_extra_binaries( project_name, repo.distro, repo.distro_version, ref=ref if ref != 'all' else None ) all_binaries = extra_binaries + [b for b in repo.binaries] for binary in all_binaries: source = binary.path arch_directory = util.infer_arch_directory(binary.name) destination_dir = os.path.join(paths['absolute'], arch_directory) destination = os.path.join(destination_dir, binary.name) try: if not os.path.exists(destination): os.symlink(source, destination) except OSError: logger.exception('could not symlink') for d in repo_dirs: subprocess.check_call(['createrepo', d])
def test_root(self): pecan.conf.repos_root = '/tmp/repos' result = util.repo_paths(self.repo) assert result['root'] == '/tmp/repos/ceph-deploy'
def test_relative(self): pecan.conf.repos_root = '/tmp/repos' result = util.repo_paths(self.repo) assert result['relative'] == 'master/head/centos/el7/flavors/default'
def test_absolute(self): pecan.conf.repos_root = '/tmp/repos' result = util.repo_paths(self.repo)['absolute'] assert result == '/tmp/repos/ceph-deploy/master/head/centos/el7/flavors/default'
def create_rpm_repo(repo_id): """ Go create or update repositories with specific IDs. """ directories = ['SRPMS', 'noarch', 'x86_64', 'aarch64'] # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) post_building(repo) timer = Timer(__name__, suffix="create.rpm.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.rpm.%s" % repo.metric_name) timer.start() logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False return # Determine paths for this repository paths = util.repo_paths(repo) repo_dirs = [os.path.join(paths['absolute'], d) for d in directories] # Before doing work that might take very long to complete, set the repo # path in the object and mark needs_update as False repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # this is safe to do, behind the scenes it is just trying to create them if # they don't exist and it will include the 'absolute' path for d in repo_dirs: util.makedirs(d) # now that structure is done, we need to symlink the RPMs that belong # to this repo so that we can create the metadata. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: extra_binaries += util.get_extra_binaries( project_name, repo.distro, repo.distro_version, ref=ref if ref != 'all' else None) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in all_binaries: source = binary.path arch_directory = util.infer_arch_directory(binary.name) destination_dir = os.path.join(paths['absolute'], arch_directory) destination = os.path.join(destination_dir, binary.name) try: if not os.path.exists(destination): os.symlink(source, destination) except OSError: logger.exception('could not symlink') for d in repo_dirs: subprocess.check_call(['createrepo', d]) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name) timer.start() post_building(repo) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False models.commit() return # Determine paths for this repository paths = util.repo_paths(repo) # Before doing work that might take very long to complete, set the repo # path in the object, mark needs_update as False, and mark it as being # updated so we prevent piling up if other binaries are being posted repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] # See if there are any generic/universal binaries so that they can be # automatically added from the current project for binary in util.get_extra_binaries( repo.project.name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=repo.ref, sha1=repo.sha1): extra_binaries.append(binary) for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None ) extra_binaries += found_binaries # See if there are any generic/universal binaries so that they can be # automatically added from projects coming from extra repos for binary in util.get_extra_binaries( project_name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=ref if ref != 'all' else None): extra_binaries.append(binary) # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info( 'fetching distro_version %s for project: %s', distro_version, repo.project.name ) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries( repo.project.name, None, distro_version, ref=repo.ref, sha1=repo.sha1 ) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in set(all_binaries): # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: commands = util.reprepro_commands( paths['absolute'], binary, distro_versions=combined_versions, fallback_version=repo.distro_version ) except KeyError: # probably a tar.gz or similar file that should not be added directly continue for command in commands: logger.info('running command: %s', ' '.join(command)) result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: logger.error('failed to add binary %s', binary.name) stdout, stderr = result.communicate() for line in stdout.split('\n'): logger.info(line) for line in stderr.split('\n'): logger.warning(line) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) logger.info("processing repository: %s", repo) # Determine paths for this repository paths = util.repo_paths(repo) # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None ) extra_binaries += found_binaries # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info( 'fetching distro_version %s for project: %s', distro_version, repo.project.name ) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries( repo.project.name, None, distro_version, ref=repo.ref ) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] for binary in all_binaries: # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: command = util.reprepro_command(paths['absolute'], binary) except KeyError: # probably a tar.gz or similar file that should not be added directly continue try: logger.info('running command: %s', ' '.join(command)) except TypeError: logger.exception('was not able to add binary: %s', binary) continue else: try: subprocess.check_call(command) except subprocess.CalledProcessError: logger.exception('failed to add binary %s', binary.name) # Finally, set the repo path in the object and mark needs_update as False repo.path = paths['absolute'] repo.needs_update = False models.commit()
def create_deb_repo(repo_id): """ Go create or update repositories with specific IDs. """ # get the root path for storing repos # TODO: Is it possible we can get an ID that doesn't exist anymore? repo = models.Repo.get(repo_id) timer = Timer(__name__, suffix="create.deb.%s" % repo.metric_name) counter = Counter(__name__, suffix="create.deb.%s" % repo.metric_name) timer.start() post_building(repo) logger.info("processing repository: %s", repo) if util.repository_is_disabled(repo.project.name): logger.info("will not process repository: %s", repo) repo.needs_update = False repo.is_queued = False models.commit() return # Determine paths for this repository paths = util.repo_paths(repo) # Before doing work that might take very long to complete, set the repo # path in the object, mark needs_update as False, and mark it as being # updated so we prevent piling up if other binaries are being posted repo.path = paths['absolute'] repo.is_updating = True repo.is_queued = False repo.needs_update = False models.commit() # determine if other repositories might need to be queried to add extra # binaries (repos are tied to binaries which are all related with refs, # archs, distros, and distro versions. conf_extra_repos = util.get_extra_repos(repo.project.name, repo.ref) combined_versions = util.get_combined_repos(repo.project.name) extra_binaries = [] # See if there are any generic/universal binaries so that they can be # automatically added from the current project for binary in util.get_extra_binaries( repo.project.name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=repo.ref, sha1=repo.sha1): extra_binaries.append(binary) for project_name, project_refs in conf_extra_repos.items(): for ref in project_refs: logger.info('fetching binaries for project: %s, ref: %s', project_name, ref) found_binaries = util.get_extra_binaries( project_name, None, repo.distro_version, distro_versions=combined_versions, ref=ref if ref != 'all' else None) extra_binaries += found_binaries # See if there are any generic/universal binaries so that they can be # automatically added from projects coming from extra repos for binary in util.get_extra_binaries( project_name, repo.distro, None, distro_versions=['generic', 'universal', 'any'], ref=ref if ref != 'all' else None): extra_binaries.append(binary) # check for the option to 'combine' repositories with different # debian/ubuntu versions for distro_version in combined_versions: logger.info('fetching distro_version %s for project: %s', distro_version, repo.project.name) # When combining distro_versions we cannot filter by distribution as # well, otherwise it will be an impossible query. E.g. "get wheezy, # precise and trusty but only for the Ubuntu distro" extra_binaries += util.get_extra_binaries(repo.project.name, None, distro_version, ref=repo.ref, sha1=repo.sha1) # try to create the absolute path to the repository if it doesn't exist util.makedirs(paths['absolute']) all_binaries = extra_binaries + [b for b in repo.binaries] timer.intermediate('collection') for binary in set(all_binaries): # XXX This is really not a good alternative but we are not going to be # using .changes for now although we can store it. if binary.extension == 'changes': continue try: commands = util.reprepro_commands( paths['absolute'], binary, distro_versions=combined_versions, fallback_version=repo.distro_version) except KeyError: # probably a tar.gz or similar file that should not be added directly continue for command in commands: logger.info('running command: %s', ' '.join(command)) result = subprocess.Popen(command, stderr=subprocess.PIPE, stdout=subprocess.PIPE) if result.returncode > 0: logger.error('failed to add binary %s', binary.name) stdout, stderr = result.communicate() for line in stdout.split('\n'): logger.info(line) for line in stderr.split('\n'): logger.warning(line) logger.info("finished processing repository: %s", repo) repo.is_updating = False models.commit() timer.stop() counter += 1 post_ready(repo)