def main(self): parser = Transport(self.arguments, options=self.options, check_help=True) parser.catch_help = self._help parser.parse_args() parser.catches_help() branch = parser.get('--branch', 'master') user = parser.get('--user', 'vagrant') high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v' if not parser.unknown_commands: log.error( "it is required to pass a host to deploy to, but none was provided" ) raise SystemExit(1) command = [ "ansible-playbook", "-i", "%s," % parser.unknown_commands[-1], high_verbosity, "-u", user, "--extra-vars", 'branch=%s' % branch, "deploy.yml", ] log.debug("Running command: %s" % ' '.join(command)) out, err, code = process.run(command, cwd=playbook_path) log.error(err) log.debug(out)
def main(self): parser = Transport(self.arguments, options=self.options, check_help=True) parser.catch_help = self._help parser.parse_args() parser.catches_help() if not parser.unknown_commands: log.error("it is required to pass an identifer, but none was provided") raise SystemExit(1) self.identifier = parser.unknown_commands[-1] if parser.has('--poll'): return self.poll() for key in [ 'stdout', 'stderr', 'command', 'ended', 'started', 'succeeded', 'exit_code']: if parser.has(key): return self.get(key) # if nothing else matches, just try to give a generic, full summary self.summary()
def parse_args(self): transport = Transport(self.argv, check_help=False) transport.catch_help = self.__doc__ if len(self.argv) <= 1: transport.print_help() transport.parse_args() for action in self.actions: if transport.has(action): return self.actions.get(action)() # If nothing matches, print the help transport.print_help()
def parse_args(self): options = ['create', 'update', 'generate', 'remove', 'get'] parser = Transport(self.argv, options=options) parser.catch_help = self._help parser.parse_args() if parser.has('create'): return self.create(parser.get('create')) if parser.has('update'): optional_args = ['key', 'step', 'secret', 'b32'] items = [i for i in parser.arguments if i in optional_args] return self.update(parser.get('update'), items) if parser.has('generate'): return self.generate() if parser.has('remove'): return self.remove(parser.get('remove')) if parser.has('get'): items = [i for i in parser.arguments if i in ['pin']] return self.get(parser.get('get'), items)
def main(self): parser = Transport(self.arguments, options=self.options, check_help=True) parser.catch_help = self._help parser.parse_args() parser.catches_help() if not parser.unknown_commands: log.error( "it is required to pass an identifer, but none was provided") raise SystemExit(1) self.identifier = parser.unknown_commands[-1] if parser.has('--poll'): return self.poll() for key in [ 'stdout', 'stderr', 'command', 'ended', 'started', 'succeeded', 'exit_code' ]: if parser.has(key): return self.get(key) # if nothing else matches, just try to give a generic, full summary self.summary()
def main(self, argv): options = [['--log', '--logging']] parser = Transport(argv, mapper=self.mapper, options=options, check_help=False, check_version=False) parser.parse_args() merfi.config['verbosity'] = parser.get('--log', 'info') merfi.config['check'] = parser.has('--check') parser.catch_help = self.help() parser.catch_version = merfi.__version__ parser.mapper = self.mapper if len(argv) <= 1: return parser.print_help() parser.dispatch() parser.catches_help() parser.catches_version()
def main(self): parser = Transport(self.arguments, options=self.options, check_help=True) parser.catch_help = self._help parser.parse_args() parser.catches_help() branch = parser.get('--branch', 'master') user = parser.get('--user', 'vagrant') high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v' if not parser.unknown_commands: log.error("it is required to pass a host to deploy to, but none was provided") raise SystemExit(1) command = [ "ansible-playbook", "-i", "%s," % parser.unknown_commands[-1], high_verbosity, "-u", user, "--extra-vars", 'branch=%s' % branch, "deploy.yml", ] log.debug("Running command: %s" % ' '.join(command)) out, err, code = process.run(command, cwd=playbook_path) log.error(err) log.debug(out)
class Localbuild(object): help_menu = 'build a package on the local system' _help = """ Build a package on the local system, using pbuilder. Options: --dist "xenial" or "trusty". Defaults to "trusty". """ name = 'localbuild' def __init__(self, argv): self.argv = argv self.options = ('--dist',) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() # FIXME: stop hardcoding trusty. Use the git branch name instead, # translating "-ubuntu" into this local computer's own distro. distro = 'trusty' # Allow user to override the distro. if self.parser.has('--dist'): if self.parser.get('--dist') is None: raise SystemExit('Specify a distro to --dist') distro = self.parser.get('--dist') self._run(distro) def help(self): return self._help def _run(self, distro): """ Build a package on the local system, using pbuilder. """ pkg_name = util.package_name() os.environ['BUILDER'] = 'pbuilder' j_arg = self._get_j_arg(cpu_count()) pbuilder_cache = '/var/cache/pbuilder/base-%s-amd64.tgz' % distro if not os.path.isfile(pbuilder_cache): cmd = ['sudo', 'pbuilder', 'create', '--debootstrapopts', '--variant=buildd', '--basetgz', pbuilder_cache, '--distribution', distro] log.info('initializing pbuilder cache %s', pbuilder_cache) subprocess.check_call(cmd) # TODO: we should also probably check parent dir for leftovers and warn # the user to delete them (or delete them ourselves?) cmd = ['gbp', 'buildpackage', '--git-dist=%s' % distro, '--git-arch=amd64', '--git-verbose', '--git-pbuilder', j_arg, '-us', '-uc'] log.info('building %s with pbuilder', pkg_name) subprocess.check_call(cmd) def _get_j_arg(self, cpus, total_ram_gb=None): """ Returns a string like "-j4" or "-j8". j is the number of processors, with a maximum of x, where x = TOTAL_RAM_GB / 4. We want to use all our processors (a high "j" value), but the build process will fail with an "out of memory" error out if this j value is too high. An 8 GB system would have a maximum of -j2 A 16 GB system would have a maximum of -j4 A 32 GB system would have a maximum of -j8 """ if total_ram_gb is None: page_size = os.sysconf('SC_PAGE_SIZE') mem_bytes = page_size * os.sysconf('SC_PHYS_PAGES') # mem_gib is a decimal, eg. 7.707 on 8GB system mem_gib = mem_bytes / (1024. ** 3) # Round up to the nearest GB for our purposes. total_ram_gb = math.ceil(mem_gib) number = min(cpus, total_ram_gb / 4) return '-j%d' % max(number, 1)
class MergePatches(object): help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch' _help = """ Fetch the latest patches branch that rdopkg uses, and then fast-forward merge that into our local patch-queue branch, so that both branches align. This command helps to align the patch series between our RHEL packages and our Ubuntu packages. Options: --force Do a hard reset, rather than restricting to fast-forward merges only. Use this option if the RHEL patches branch was amended or rebased for some reason. """ name = 'merge-patches' def __init__(self, argv): self.argv = argv self.options = ['--force', '--hard-reset'] def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() force = False if self.parser.has(['--force', '--hard-reset']): force = True if self.parser.unknown_commands: log.error('unknown option %s', ' '.join(self.parser.unknown_commands)) return self.parser.print_help() self._run(force) def help(self): return self._help def _run(self, force=False): # Determine the names of the relevant branches current_branch = util.current_branch() debian_branch = util.current_debian_branch() patch_queue_branch = util.current_patch_queue_branch() rhel_patches_branch = self.get_rhel_patches_branch(debian_branch) # Do the merge if current_branch == patch_queue_branch: # HEAD is our patch-queue branch. Use "git pull" directly. # For example: "git pull --ff-only patches/ceph-2-rhel-patches" cmd = ['git', 'pull', '--ff-only', 'patches/' + rhel_patches_branch] if force: # Do a hard reset on HEAD instead. cmd = ['git', 'reset', '--hard', 'patches/' + rhel_patches_branch] else: # HEAD is our debian branch. Use "git fetch" to update the # patch-queue ref. For example: # "git fetch . \ # patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu" util.ensure_patch_queue_branch() cmd = ['git', 'fetch', '.', 'patches/%s:%s' % (rhel_patches_branch, patch_queue_branch)] if force: # Do a hard push (with "+") instead. cmd = ['git', 'push', '.', '+patches/%s:%s' % (rhel_patches_branch, patch_queue_branch)] log.info(' '.join(cmd)) subprocess.check_call(cmd) def get_rhel_patches_branch(self, debian_branch): """ Get the RHEL -patches branch corresponding to this debian branch. Examples: ceph-2-ubuntu -> ceph-2-rhel-patches ceph-2-trusty -> ceph-2-rhel-patches ceph-2-xenial -> ceph-2-rhel-patches ceph-1.3-ubuntu -> ceph-1.3-rhel-patches ceph-2-ubuntu-hotfix-bz123 -> ceph-2-rhel-patches-hotfix-bz123 """ (product, version, distro) = debian_branch.split('-', 2) suffix = None if '-' in distro: (distro, suffix) = distro.split('-', 1) rhel = '%s-%s-rhel-patches' % (product, version) if suffix is not None: rhel = '%s-%s' % (rhel, suffix) return rhel
class Binary(object): _help = dedent(""" Operate binaries on a remote chacra instance. Creating a new binary:: chacractl binary create project/ref/distro/distro_version/arch /path/to/binary Options: create Creates a new binary at a given distro version architecture delete Deletes an existing binary from chacra --force If the resource exists, force the upload """) help_menu = "create, update metadata, or delete binaries" options = ['create', '--force', 'delete'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join( chacractl.config['url'], 'binaries' ) def sanitize_filename(self, line): """ lines may come with newlines and leading slashes make sure they are clean so that they can be processed """ line = line.strip('\n') if os.path.isfile(line): return os.path.abspath(line) def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url def post(self, url, filepath): filename = os.path.basename(filepath) file_url = os.path.join(url, filename) + '/' exists = requests.head(file_url, verify=chacractl.config['ssl_verify']) if exists.status_code == 200: if not self.force: logger.warning( 'resource exists and --force was not used, will not upload' ) logger.warning('SKIP %s', file_url) return return self.put(file_url, filepath) elif exists.status_code == 404: logger.info('POSTing file: %s', filepath) with open(filepath, 'rb') as binary: response = requests.post( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() def put(self, url, filepath): logger.info('resource exists and --force was used, will re-upload') logger.info('PUTing file: %s', filepath) with open(filepath, 'rb') as binary: response = requests.put( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def delete(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('resource already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE file: %s', url) response = requests.delete( url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() self.force = self.parser.has('--force') # handle posting binaries: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): filename = self.sanitize_filename(line) if not filename: continue url = os.path.join(self.base_url, url_part) self.post(url, filename) else: filepath = self.sanitize_filename(self.argv[-1]) if not filepath: logger.warning( 'provided path does not exist: %s', self.argv[-1] ) return url = os.path.join(self.base_url, url_part) self.post(url, filepath) elif self.parser.has('delete'): url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class Binary(object): _help = dedent(""" Operate binaries on a remote chacra instance. Creating a new binary:: chacractl binary create project/ref/distro/distro_version/arch /path/to/binary Options: create Creates a new binary at a given distro version architecture delete Deletes an existing binary from chacra --force If the resource exists, force the upload """) help_menu = "create, update metadata, or delete binaries" options = ['create', '--force', 'delete'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join( chacractl.config['url'], 'binaries' ) def sanitize_filename(self, line): """ lines may come with newlines and leading slashes make sure they are clean so that they can be processed """ line = line.strip('\n') if os.path.isfile(line): return os.path.abspath(line) def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url def load_file(self, filepath): chsum = sha512() binary = open(filepath, 'rb') for chunk in iter(lambda: binary.read(4096), b''): chsum.update(chunk) binary.seek(0) return binary, chsum.hexdigest() def upload_is_verified(self, arch_url, filename, digest): r = requests.get(arch_url, verify=chacractl.config['ssl_verify']) r.raise_for_status() arch_data = r.json() remote_digest = arch_data[filename]['checksum'] verified = remote_digest == digest if not verified: logging.error( 'Checksum mismatch: server has wrong checksum for %s', filename) logging.error('local checksum: %s', digest) logging.error('remote checksum: %s', remote_digest) return verified def post(self, url, filepath): filename = os.path.basename(filepath) file_url = os.path.join(url, filename) + '/' exists = requests.head(file_url, verify=chacractl.config['ssl_verify']) if exists.status_code == 200: if not self.force: logger.warning( 'resource exists and --force was not used, will not upload' ) logger.warning('SKIP %s', file_url) return return self.put(file_url, filepath) elif exists.status_code == 404: logger.info('POSTing file: %s', filepath) binary, digest = self.load_file(filepath) with binary: response = requests.post( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() if not self.upload_is_verified(url, filename, digest): # Since this is a new file, attempt to delete it logging.error('Deleting corrupted file from server...') self.delete(file_url) raise SystemExit( 'Checksum mismatch: remote server has wrong checksum for %s' % filepath) def put(self, url, filepath): filename = os.path.basename(filepath) logger.info('resource exists and --force was used, will re-upload') logger.info('PUTing file: %s', filepath) binary, digest = self.load_file(filepath) with binary: response = requests.put( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) # trim off binary filename url = url.rsplit('/', 2)[0] + "/" if not self.upload_is_verified(url, filename, digest): # Maybe the old file with a different digest is still there, so # don't delete it raise SystemExit( 'Checksum mismatch: server has wrong checksum for %s!' % filepath) def delete(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('resource already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE file: %s', url) response = requests.delete( url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code < 200 or response.status_code > 299: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() self.force = self.parser.has('--force') # handle posting binaries: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): filename = self.sanitize_filename(line) if not filename: continue url = os.path.join(self.base_url, url_part) self.post(url, filename) else: filepath = self.sanitize_filename(self.argv[-1]) if not filepath: logger.warning( 'provided path does not exist: %s', self.argv[-1] ) return url = os.path.join(self.base_url, url_part) self.post(url, filepath) elif self.parser.has('delete'): if self.parser.get('delete') is None: raise SystemExit('Specify a URL to delete a binary.') url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class Binary(object): _help = dedent(""" Operate binaries on a remote chacra instance. Creating a new binary:: chacractl binary create project/ref/distro/distro_version/arch /path/to/binary Options: create Creates a new binary at a given distro version architecture delete Deletes an existing binary from chacra --force If the resource exists, force the upload """) help_menu = "create, update metadata, or delete binaries" options = ['create', '--force', 'delete'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config['url'], 'binaries') def sanitize_filename(self, line): """ lines may come with newlines and leading slashes make sure they are clean so that they can be processed """ line = line.strip('\n') if os.path.isfile(line): return os.path.abspath(line) def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url def get_checksum(self, filepath): chsum = sha512() with open(filepath, 'rb') as binary: for chunk in iter(lambda: binary.read(4096), b''): chsum.update(chunk) return chsum.hexdigest() @retry() def upload_is_verified(self, arch_url, filename, digest): r = requests.get(arch_url, verify=chacractl.config['ssl_verify']) r.raise_for_status() arch_data = r.json() remote_digest = arch_data[filename]['checksum'] verified = remote_digest == digest if not verified: logging.error( 'Checksum mismatch: server has wrong checksum for %s', filename) logging.error('local checksum: %s', digest) logging.error('remote checksum: %s', remote_digest) return verified @retry() def post(self, url, filepath): filename = os.path.basename(filepath) file_url = os.path.join(url, filename) + '/' exists = requests.head(file_url, verify=chacractl.config['ssl_verify']) digest = self.get_checksum(filepath) if exists.status_code == 200: if not self.force: logger.warning( 'resource exists and --force was not used, will not upload' ) logger.warning('SKIP %s', file_url) return return self.put(file_url, filepath) elif exists.status_code == 404: length = os.path.getsize(filepath) logger.info('POSTing file: %s', filepath) mpart = MultipartEncoder(fields={ 'file': (filename, open(filepath, 'rb'), 'text/plain') }) response = requests.post(url, data=mpart, headers={ 'Content-Type': mpart.content_type, 'Content-Length': '%d' % length, }, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() if not self.upload_is_verified(url, filename, digest): # Since this is a new file, attempt to delete it logging.error('Deleting corrupted file from server...') self.delete(file_url) raise SystemExit( 'Checksum mismatch: remote server has wrong checksum for %s' % filepath) @retry() def put(self, url, filepath): filename = os.path.basename(filepath) logger.info('resource exists and --force was used, will re-upload') logger.info('PUTing file: %s', filepath) digest = self.get_checksum(filepath) length = os.path.getsize(filepath) mpart = MultipartEncoder( fields={'file': (filename, open(filepath, 'rb'), 'text/plain')}) response = requests.put(url, data=mpart, headers={ 'Content-Type': mpart.content_type, 'Content-Length': '%d' % length, }, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) # trim off binary filename url = url.rsplit('/', 2)[0] + "/" if not self.upload_is_verified(url, filename, digest): # Maybe the old file with a different digest is still there, so # don't delete it raise SystemExit( 'Checksum mismatch: server has wrong checksum for %s!' % filepath) @retry() def delete(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('resource already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE file: %s', url) response = requests.delete(url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code < 200 or response.status_code > 299: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() self.force = self.parser.has('--force') # handle posting binaries: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): filename = self.sanitize_filename(line) if not filename: continue url = os.path.join(self.base_url, url_part) self.post(url, filename) else: filepath = self.sanitize_filename(self.argv[-1]) if not filepath: logger.warning('provided path does not exist: %s', self.argv[-1]) return url = os.path.join(self.base_url, url_part) self.post(url, filepath) elif self.parser.has('delete'): if self.parser.get('delete') is None: raise SystemExit('Specify a URL to delete a binary.') url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class Patch(object): help_menu = 'apply patches from patch-queue branch' _help = """ Generate patches from a patch-queue branch. Options: --nobz Do not require "Resolves: rhbz#" for every patch. The default is to require them. Use this CLI option to override the default. """ name = 'patch' def __init__(self, argv): self.argv = argv self.options = ('--nobz', ) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() self._run() def help(self): return self._help def _run(self): """ Generate quilt patch series with gbp pq, and update d/rules """ # Determine the names of the patch-queue branch and debian branch current_branch = util.current_branch() patch_queue_branch = util.current_patch_queue_branch() debian_branch = util.current_debian_branch() # TODO: default to fetching from upstream, the way rdopkg patch does. # Get the new sha1 to insert into the $COMMIT variable in d/rules cmd = ['git', 'rev-parse', patch_queue_branch] output = subprocess.check_output(cmd) patch_queue_sha1 = output.rstrip() if six.PY3: patch_queue_sha1 = output.decode('utf-8').rstrip() # Switch to "debian" branch if necessary if current_branch != debian_branch: cmd = ['git', 'checkout', debian_branch] subprocess.check_call(cmd) # Get the original (old) patch series old_series = self.read_series_file('debian/patches/series') old_subjects = [patch.subject for patch in old_series] # Git-buildpackage pq operation cmd = ['gbp', 'pq', 'export'] subprocess.check_call(cmd) # Add all patch files to Git's index cmd = ['git', 'add', '--all', 'debian/patches'] subprocess.check_call(cmd) # Bail early if gbp pq did nothing. if not self.read_git_debian_patches_status(): print('No new patches, quitting.') raise SystemExit(1) # Replace $COMMIT sha1 in d/rules old_sha1 = read_commit() if old_sha1: rules = read_rules_file() with open('debian/rules', 'w') as fileh: fileh.write(rules.replace(old_sha1, patch_queue_sha1)) # Get the new patch series new_series = self.read_series_file('debian/patches/series') # Select only the ones that are new (according to commit subjects) new_series = [p for p in new_series if p.subject not in old_subjects] if not new_series: # Maybe we rewrote some patch files in place? # Check Git itself for changed files: new_series = self.read_git_debian_patches() # Add patch entries to d/changelog changelog = self.generate_changelog(new_series) try: ensure_bzs(changelog) except BzNotFound: if not self.parser.has('--nobz'): raise util.bump_changelog(changelog) # Assemble a standard commit message string "clog". clog = "debian: %s\n" % util.get_deb_version() clog += "\n" clog += "Add patches from %s\n" % patch_queue_branch clog += "\n" clog += util.format_changelog(changelog) # Commit everything with the standard commit message. with tempfile.NamedTemporaryFile(mode='w+') as temp: temp.write(clog) temp.flush() cmd = [ 'git', 'commit', 'debian/changelog', 'debian/patches', 'debian/rules', '-F', temp.name ] subprocess.check_call(cmd) # Summarize this commit on STDOUT for the developer. # (This matches the behavior of "rdopkg patch".) cmd = ['git', '--no-pager', 'log', '--name-status', 'HEAD~..HEAD'] subprocess.check_call(cmd) def generate_changelog(self, series): """ Generate a list of changelog entries for this gbp Patch series. :return: a list of strings """ changelog = [] for p in series: # If there was some in-place Git modification for this patch, # (.git_action attribute), include that in our log. action = getattr(p, 'git_action', 'A') # Make common actions human-readable: if action == 'M': action = 'Modified' if action == 'D': action = 'Deleted' if action == 'R': # We don't log .patch file renames continue change = '%s %s' % (action, p.path) if action == 'A': # This was a simple patch addition, so just log the patch's # subject. change = p.subject bzs = self.get_rhbzs(p) bzstr = ' '.join(map(lambda x: 'rhbz#%s' % x, bzs)) if bzstr != '': change += ' (%s)' % bzstr changelog.append(change) return changelog def get_rhbzs(self, patch): """ Return all RHBZ numbers from a Patch's subject and body. :param patch: gbp.patch_series.Patch`` """ bzs = re.findall(BZ_REGEX, patch.subject) body = patch.long_desc try: if patch.git_action == 'D': # patch.long_desc will be empty. # Read the deleted file's description from Git instead. body = self.read_deleted_patch_description(patch.path) except AttributeError: # This was a simple patch addition, so we'll just search this # patch's .long_desc. pass bzs.extend(re.findall(BZ_REGEX, body)) return bzs def read_series_file(self, file_): return gbp.patch_series.PatchSeries.read_series_file(file_) def read_git_debian_patches_status(self): """ Return a list of all edited Debian patch files (from "git status"). :return: a list of actions/filesname pairs. For example: [ ['M', 'debian/patches/0001-foo.patch'], ['D', 'debian/patches/0002-bar.patch'], ] """ cmd = ['git', 'status', '-s', 'debian/patches/'] output = subprocess.check_output(cmd) if six.PY3: output = output.decode('utf-8') result = [] for line in output.splitlines(): if line.endswith('.patch'): result.append(line.split(None, 1)) return result def read_git_debian_patches(self): """ Load all edited Debian patches (from "git status") into Patch objects. The returned Patch objects have an extra ".git_action" attribute. Use this to determine what happened to the patch in Git. :return: a list of gbp.patch_series.Patch objects """ patches = [] for (action, filename) in self.read_git_debian_patches_status(): patch = gbp.patch_series.Patch(filename) # Hack: record what happened to this patch file: patch.git_action = action patches.append(patch) return patches def read_deleted_patch_description(self, filename): """ Parse a deleted .patch file with gbp.patch_series.Patch. For deleted .patch files, most of the gbp.patch_series.Patch attributes from read_git_debian_patches() are empty, because the file no longer exists. More hackery to recover the original .long_desc so we can recover the original RHBZ number. :returns: ``str``, the long_desc attribute. """ with tempfile.NamedTemporaryFile(mode='w+') as temp: cmd = ['git', 'show', 'HEAD:%s' % filename] subprocess.call(cmd, stdout=temp) temp.flush() temppatch = gbp.patch_series.Patch(temp.name) temppatch._read_info() # XXX internal API here :( return temppatch.long_desc
class Project(object): _help = dedent(""" Handle projects on a remote chacra instance. Creating a new project:: chacractl project create project Options: create Creates a new project """) help_menu = "create projects" options = ['create'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config['url'], 'binaries') def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url @retry() def post(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 200: logger.warning('resource exists, will not upload') logger.warning('SKIP %s', url) return elif exists.status_code == 404: logger.info('POSTing to project: %s', url) response = requests.post(url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() @retry() def delete(self, url): # XXX This exists here but it is not yet implemented, e.g. nothing # calls this method exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('project already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE project: %s', url) response = requests.delete(url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() # handle posting projects: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): url = os.path.join(self.base_url, url_part) self.post(url) else: url = os.path.join(self.base_url, url_part) self.post(url) # XXX this exists here but it not yet enabled from the CLI elif self.parser.has('delete'): url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class MergePatches(object): help_menu = 'Merge patches from RHEL -patches branch to patch-queue branch' _help = """ Fetch the latest patches branch that rdopkg uses, and then fast-forward merge that into our local patch-queue branch, so that both branches align. This command helps to align the patch series between our RHEL packages and our Ubuntu packages. Options: --force Do a hard reset, rather than restricting to fast-forward merges only. Use this option if the RHEL patches branch was amended or rebased for some reason. """ name = 'merge-patches' def __init__(self, argv): self.argv = argv self.options = ['--force', '--hard-reset'] def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() force = False if self.parser.has(['--force', '--hard-reset']): force = True if self.parser.unknown_commands: log.error('unknown option %s', ' '.join(self.parser.unknown_commands)) return self.parser.print_help() self._run(force) def help(self): return self._help def _run(self, force=False): # Determine the names of the relevant branches current_branch = util.current_branch() debian_branch = util.current_debian_branch() patches_branch = util.current_patches_branch() rhel_patches_branch = self.get_rhel_patches_branch(debian_branch) # Do the merge if current_branch == patches_branch: # HEAD is our patch-queue branch. Use "git pull" directly. # For example: "git pull --ff-only patches/ceph-2-rhel-patches" cmd = ['git', 'pull', '--ff-only', 'patches/' + rhel_patches_branch] if force: # Do a hard reset on HEAD instead. cmd = ['git', 'reset', '--hard', 'patches/' + rhel_patches_branch] else: # HEAD is our debian branch. Use "git fetch" to update the # patch-queue ref. For example: # "git fetch . \ # patches/ceph-2-rhel-patches:patch-queue/ceph-2-ubuntu" cmd = ['git', 'fetch', '.', 'patches/%s:%s' % (rhel_patches_branch, patches_branch)] if force: # Do a hard push (with "+") instead. cmd = ['git', 'push', '.', '+patches/%s:%s' % (rhel_patches_branch, patches_branch)] log.info(' '.join(cmd)) subprocess.check_call(cmd) def get_rhel_patches_branch(self, debian_branch): """ Get the RHEL -patches branch corresponding to this debian branch. Examples: ceph-2-ubuntu -> ceph-2-rhel-patches ceph-2-trusty -> ceph-2-rhel-patches ceph-2-xenial -> ceph-2-rhel-patches ceph-1.3-ubuntu -> ceph-1.3-rhel-patches ceph-2-ubuntu-hotfix-bz123 -> ceph-2-rhel-patches-hotfix-bz123 """ (product, version, distro) = debian_branch.split('-', 2) suffix = None if '-' in distro: (distro, suffix) = distro.split('-', 1) rhel = '%s-%s-rhel-patches' % (product, version) if suffix is not None: rhel = '%s-%s' % (rhel, suffix) return rhel
class Project(object): _help = dedent( """ Handle projects on a remote chacra instance. Creating a new project:: chacractl project create project Options: create Creates a new project """ ) help_menu = "create projects" options = ["create"] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config["url"], "binaries") def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip("/") # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith("/"): url = "%s/" % url return url def post(self, url): exists = requests.head(url, verify=chacractl.config["ssl_verify"]) if exists.status_code == 200: logger.warning("resource exists, will not upload") logger.warning("SKIP %s", url) return elif exists.status_code == 404: logger.info("POSTing to project: %s", url) response = requests.post(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"]) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() def delete(self, url): # XXX This exists here but it is not yet implemented, e.g. nothing # calls this method exists = requests.head(url, verify=chacractl.config["ssl_verify"]) if exists.status_code == 404: logger.warning("project already deleted") logger.warning("SKIP %s", url) return logger.info("DELETE project: %s", url) response = requests.delete(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"]) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() # handle posting projects: if self.parser.has("create"): url_part = self.sanitize_url(self.parser.get("create")) if not sys.stdin.isatty(): # read from stdin logger.info("reading input from stdin") for line in sys.stdin.readlines(): url = os.path.join(self.base_url, url_part) self.post(url) else: url = os.path.join(self.base_url, url_part) self.post(url) # XXX this exists here but it not yet enabled from the CLI elif self.parser.has("delete"): url_part = self.sanitize_url(self.parser.get("delete")) url = os.path.join(self.base_url, url_part) self.delete(url)
class Localbuild(object): help_menu = 'build a package on the local system' _help = """ Build a package on the local system, using pbuilder. Options: --dist "xenial" or "trusty". If unspecified, rhcephpkg will choose one based on the current branch's name. Rules for automatic distro selection: 1) If the branch suffix is an ubuntu distro name, use that. eg "ceph-3.0-xenial". 2) If a branch has a version number starting with "1.3", return "trusty". eg. "ceph-1.3-ubuntu" 3) If a branch has a version number starting with "2" return "xenial". eg. "ceph-2-ubuntu" 4) If a branch has a version number starting with "3" return "xenial". eg. "ceph-3.0-ubuntu" 5) Otherwise raise, because we need to add more rules. """ name = 'localbuild' def __init__(self, argv): self.argv = argv self.options = ('--dist', ) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() # Allow user to override the distro. if self.parser.has('--dist'): if self.parser.get('--dist') is None: raise SystemExit('Specify a distro to --dist') distro = self.parser.get('--dist') else: distro = get_distro() if self.parser.unknown_commands: log.error('unknown option %s', ' '.join(self.parser.unknown_commands)) return self.parser.print_help() self._run(distro) def help(self): return self._help def _run(self, distro): """ Build a package on the local system, using pbuilder. """ pkg_name = util.package_name() os.environ['BUILDER'] = 'pbuilder' j_arg = self._get_j_arg(cpu_count()) pbuilder_cache = '/var/cache/pbuilder/base-%s-amd64.tgz' % distro setup_pbuilder_cache(pbuilder_cache, distro) util.setup_pristine_tar_branch() # TODO: we should also probably check parent dir for leftovers and warn # the user to delete them (or delete them ourselves?) cmd = [ 'gbp', 'buildpackage', '--git-dist=%s' % distro, '--git-arch=amd64', '--git-verbose', '--git-pbuilder', j_arg, '-us', '-uc' ] log.info('building %s with pbuilder', pkg_name) subprocess.check_call(cmd) def _get_j_arg(self, cpus, total_ram_gb=None): """ Returns a string like "-j4" or "-j8". j is the number of processors, with a maximum of x, where x = TOTAL_RAM_GB / 4. We want to use all our processors (a high "j" value), but the build process will fail with an "out of memory" error out if this j value is too high. An 8 GB system would have a maximum of -j2 A 16 GB system would have a maximum of -j4 A 32 GB system would have a maximum of -j8 """ if total_ram_gb is None: page_size = os.sysconf('SC_PAGE_SIZE') mem_bytes = page_size * os.sysconf('SC_PHYS_PAGES') # mem_gib is a decimal, eg. 7.707 on 8GB system mem_gib = mem_bytes / (1024.**3) # Round up to the nearest GB for our purposes. total_ram_gb = math.ceil(mem_gib) number = min(cpus, total_ram_gb / 4) return '-j%d' % max(number, 1)