def main(self): parser = Transport(self.arguments, options=self.options, check_help=True) parser.catch_help = self._help parser.parse_args() parser.catches_help() branch = parser.get('--branch', 'master') user = parser.get('--user', 'vagrant') high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v' if not parser.unknown_commands: log.error( "it is required to pass a host to deploy to, but none was provided" ) raise SystemExit(1) command = [ "ansible-playbook", "-i", "%s," % parser.unknown_commands[-1], high_verbosity, "-u", user, "--extra-vars", 'branch=%s' % branch, "deploy.yml", ] log.debug("Running command: %s" % ' '.join(command)) out, err, code = process.run(command, cwd=playbook_path) log.error(err) log.debug(out)
class Repo(object): _help = dedent(""" Operate on repositories on a remote chacra instance. Both `recreate` and `update` calls are not immediate. They rely on the async service managing repos which usually have a delay applied to them. Options: recreate Mark a repository to be removed and created from scratch again. update Repository will get updated by running the repo tools on it again. """) help_menu = "recreate, delete, or update repositories" options = ['recreate', 'update'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join( chacractl.config['url'], 'repos' ) @catches(requests.exceptions.HTTPError, handler=requests_errors) @retry() def post(self, url): exists = requests.head( url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) exists.raise_for_status() logger.info('POST: %s', url) response = requests.post( url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) response.raise_for_status() json = response.json() for k, v in json.items(): logger.info("%s: %s", k, v) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() recreate = self.parser.get('recreate') update = self.parser.get('update') if recreate: url_part = os.path.join(recreate, 'recreate') url = os.path.join(self.base_url, url_part) self.post(url) elif update: url_part = os.path.join(update, 'update') url = os.path.join(self.base_url, url_part) self.post(url)
class Repo(object): _help = dedent( """ Operate on repositories on a remote chacra instance. Both `recreate` and `update` calls are not immediate. They rely on the async service managing repos which usually have a delay applied to them. Options: recreate Mark a repository to be removed and created from scratch again. update Repository will get updated by running the repo tools on it again. """ ) help_menu = "recreate, delete, or update repositories" options = ["recreate", "update"] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config["url"], "repos") @catches(requests.exceptions.HTTPError, handler=requests_errors) def post(self, url): exists = requests.head(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"]) exists.raise_for_status() logger.info("POST: %s", url) response = requests.post(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"]) response.raise_for_status() json = response.json() for k, v in json.items(): logger.info("%s: %s", k, v) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() recreate = self.parser.get("recreate") update = self.parser.get("update") if recreate: url_part = os.path.join(recreate, "recreate") url = os.path.join(self.base_url, url_part) self.post(url) elif update: url_part = os.path.join(update, "update") url = os.path.join(self.base_url, url_part) self.post(url)
def main(self, argv): # Console Logger sh = logging.StreamHandler() sh.setFormatter(log.color_format()) sh.setLevel(logging.DEBUG) root_logger = logging.getLogger() root_logger.setLevel(logging.DEBUG) root_logger.addHandler(sh) self.api_credentials() # TODO: Need to implement `--filename` and make it available options = [['--log', '--logging']] parser = Transport(argv, mapper=self.mapper, options=options, check_help=False, check_version=False) parser.parse_args() chacractl.config['verbosity'] = parser.get('--log', 'info') parser.catch_help = self.help() parser.catch_version = chacractl.__version__ parser.mapper = self.mapper if len(argv) <= 1: return parser.print_help() parser.dispatch() parser.catches_help() parser.catches_version()
def main(self): options = ['--ignore'] config_ignores = ceph_medic.config.file.get_list('check', '--ignore') parser = Transport(self.argv, options=options, check_version=False) parser.catch_help = self._help() parser.parse_args() ignored_codes = as_list(parser.get('--ignore', '')) # fallback to the configuration if nothing is defined in the CLI if not ignored_codes: ignored_codes = config_ignores if len(self.argv) < 1: return parser.print_help() # populate the nodes metadata with the configured nodes for daemon in ceph_medic.config.nodes.keys(): ceph_medic.metadata['nodes'][daemon] = [] for daemon, nodes in ceph_medic.config.nodes.items(): for node in nodes: node_metadata = {'host': node['host']} if 'container' in node: node_metadata['container'] = node['container'] ceph_medic.metadata['nodes'][daemon].append(node_metadata) collector.collect() test = runner.Runner() test.ignore = ignored_codes results = test.run() runner.report(results) #XXX might want to make this configurable to not bark on warnings for # example, setting forcefully for now, but the results object doesn't # make a distinction between error and warning (!) if results.errors or results.warnings: sys.exit(1)
def main(self): options = ['--stdout'] parser = Transport(self.argv, options=options, check_version=False) parser.catch_help = self._help() parser.parse_args() if len(self.argv) == 1: raise SystemExit( "A monitor hostname or a ceph.conf file is required as an argument" ) node = self.argv[-1] inventory = {} with get_connection(node) as conn: report = get_mon_report(conn) try: mons = report['monmap']['mons'] except KeyError: raise SystemExit(report) inventory['mons'] = [i['name'] for i in mons] osds = report['osd_metadata'] inventory['osds'] = [i['hostname'] for i in osds] if not inventory: raise SystemExit( 'no hosts where found from remote monitor node: %s' % node) generate_inventory(inventory, to_stdout=parser.get('--stdout')) conn.exit() return
def parse_args(self): options = ['--allowed'] parser = Transport(self.argv, options=options) parser.catch_help = self._help parser.parse_args() delgado.config['allowed'] = parser.get('--allowed') or [] engine = Engine(connection=self.connection) engine.run_forever()
def parse_args(self): parser = Transport(self.argv, options=['--socket-location']) parser.catch_help = self._help parser.parse_args() location = parser.get('--socket-location') or '/tmp/pytest.sock' delgado.config['allowed'] = ['py.test'] engine = Engine(socket_location=location) engine.run_forever()
def parse_args(self, argv=None): """ pass argv during testing """ if argv is None: argv = self.argv options = [['--output', '-o']] parser = Transport(argv, options=options) parser.catch_help = self.help() parser.parse_args() self.source = util.infer_path(parser.unknown_commands) self.output = parser.get('--output', self.source + '-dvd.iso') self.check_dependency() self.make_iso() self.make_sha256sum()
def main(self, argv): options = [ '--cluster', '--ssh-config', '--inventory', '--config', ] parser = Transport(argv, options=options, check_help=False, check_version=False) parser.parse_args() self.config_path = parser.get('--config', configuration.location()) # load medic configuration loaded_config = configuration.load( path=parser.get('--config', self.config_path)) # this is the earliest we can have enough config to setup logging log.setup(loaded_config) # update the module-wide configuration object ceph_medic.config.update(configuration.get_overrides(loaded_config)) # SSH config ceph_medic.config['ssh_config'] = parser.get('--ssh-config') if ceph_medic.config['ssh_config']: ssh_config_path = ceph_medic.config['ssh_config'] if not os.path.exists(ssh_config_path): terminal.error("the given ssh config path does not exist: %s" % ssh_config_path) sys.exit() ceph_medic.config['cluster_name'] = parser.get('--cluster') ceph_medic.metadata['cluster_name'] = 'ceph' # Hosts file self.hosts_file = parser.get('--inventory', configuration.get_host_file()) # find the hosts files, by the CLI first, fallback to the configuration # file, and lastly if none of those are found or defined, try to load # from well known locations (cwd, and /etc/ansible/) loaded_hosts = configuration.load_hosts( parser.get('--inventory', ceph_medic.config.get('--inventory', self.hosts_file))) ceph_medic.config['nodes'] = loaded_hosts.nodes ceph_medic.config['hosts_file'] = loaded_hosts.filename self.hosts_file = loaded_hosts.filename parser.catch_version = ceph_medic.__version__ parser.mapper = self.mapper parser.catch_help = self.help(parser.subhelp()) if len(argv) <= 1: return parser.print_help() ceph_medic.config['config_path'] = self.config_path parser.dispatch() parser.catches_help() parser.catches_version()
def parse_args(self): options = ['create', 'update', 'generate', 'remove', 'get'] parser = Transport(self.argv, options=options) parser.catch_help = self._help parser.parse_args() if parser.has('create'): return self.create(parser.get('create')) if parser.has('update'): optional_args = ['key', 'step', 'secret', 'b32'] items = [i for i in parser.arguments if i in optional_args] return self.update(parser.get('update'), items) if parser.has('generate'): return self.generate() if parser.has('remove'): return self.remove(parser.get('remove')) if parser.has('get'): items = [i for i in parser.arguments if i in ['pin']] return self.get(parser.get('get'), items)
def main(self): parser = Transport(self.arguments, options=self.options, check_help=True) parser.catch_help = self._help parser.parse_args() parser.catches_help() branch = parser.get('--branch', 'master') user = parser.get('--user', 'vagrant') high_verbosity = '-vvvv' if parser.has('-vvvv') else '-v' if not parser.unknown_commands: log.error("it is required to pass a host to deploy to, but none was provided") raise SystemExit(1) command = [ "ansible-playbook", "-i", "%s," % parser.unknown_commands[-1], high_verbosity, "-u", user, "--extra-vars", 'branch=%s' % branch, "deploy.yml", ] log.debug("Running command: %s" % ' '.join(command)) out, err, code = process.run(command, cwd=playbook_path) log.error(err) log.debug(out)
def main(self, argv): options = [['--log', '--logging']] parser = Transport(argv, mapper=self.mapper, options=options, check_help=False, check_version=False) parser.parse_args() merfi.config['verbosity'] = parser.get('--log', 'info') merfi.config['check'] = parser.has('--check') parser.catch_help = self.help() parser.catch_version = merfi.__version__ parser.mapper = self.mapper if len(argv) <= 1: return parser.print_help() parser.dispatch() parser.catches_help() parser.catches_version()
class Binary(object): _help = dedent(""" Operate binaries on a remote chacra instance. Creating a new binary:: chacractl binary create project/ref/distro/distro_version/arch /path/to/binary Options: create Creates a new binary at a given distro version architecture delete Deletes an existing binary from chacra --force If the resource exists, force the upload """) help_menu = "create, update metadata, or delete binaries" options = ['create', '--force', 'delete'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join( chacractl.config['url'], 'binaries' ) def sanitize_filename(self, line): """ lines may come with newlines and leading slashes make sure they are clean so that they can be processed """ line = line.strip('\n') if os.path.isfile(line): return os.path.abspath(line) def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url def load_file(self, filepath): chsum = sha512() binary = open(filepath, 'rb') for chunk in iter(lambda: binary.read(4096), b''): chsum.update(chunk) binary.seek(0) return binary, chsum.hexdigest() def upload_is_verified(self, arch_url, filename, digest): r = requests.get(arch_url, verify=chacractl.config['ssl_verify']) r.raise_for_status() arch_data = r.json() remote_digest = arch_data[filename]['checksum'] verified = remote_digest == digest if not verified: logging.error( 'Checksum mismatch: server has wrong checksum for %s', filename) logging.error('local checksum: %s', digest) logging.error('remote checksum: %s', remote_digest) return verified def post(self, url, filepath): filename = os.path.basename(filepath) file_url = os.path.join(url, filename) + '/' exists = requests.head(file_url, verify=chacractl.config['ssl_verify']) if exists.status_code == 200: if not self.force: logger.warning( 'resource exists and --force was not used, will not upload' ) logger.warning('SKIP %s', file_url) return return self.put(file_url, filepath) elif exists.status_code == 404: logger.info('POSTing file: %s', filepath) binary, digest = self.load_file(filepath) with binary: response = requests.post( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() if not self.upload_is_verified(url, filename, digest): # Since this is a new file, attempt to delete it logging.error('Deleting corrupted file from server...') self.delete(file_url) raise SystemExit( 'Checksum mismatch: remote server has wrong checksum for %s' % filepath) def put(self, url, filepath): filename = os.path.basename(filepath) logger.info('resource exists and --force was used, will re-upload') logger.info('PUTing file: %s', filepath) binary, digest = self.load_file(filepath) with binary: response = requests.put( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) # trim off binary filename url = url.rsplit('/', 2)[0] + "/" if not self.upload_is_verified(url, filename, digest): # Maybe the old file with a different digest is still there, so # don't delete it raise SystemExit( 'Checksum mismatch: server has wrong checksum for %s!' % filepath) def delete(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('resource already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE file: %s', url) response = requests.delete( url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code < 200 or response.status_code > 299: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() self.force = self.parser.has('--force') # handle posting binaries: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): filename = self.sanitize_filename(line) if not filename: continue url = os.path.join(self.base_url, url_part) self.post(url, filename) else: filepath = self.sanitize_filename(self.argv[-1]) if not filepath: logger.warning( 'provided path does not exist: %s', self.argv[-1] ) return url = os.path.join(self.base_url, url_part) self.post(url, filepath) elif self.parser.has('delete'): if self.parser.get('delete') is None: raise SystemExit('Specify a URL to delete a binary.') url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class Localbuild(object): help_menu = 'build a package on the local system' _help = """ Build a package on the local system, using pbuilder. Options: --dist "xenial" or "trusty". If unspecified, rhcephpkg will choose one based on the current branch's name. Rules for automatic distro selection: 1) If the branch suffix is an ubuntu distro name, use that. eg "ceph-3.0-xenial". 2) If a branch has a version number starting with "1.3", return "trusty". eg. "ceph-1.3-ubuntu" 3) If a branch has a version number starting with "2" return "xenial". eg. "ceph-2-ubuntu" 4) If a branch has a version number starting with "3" return "xenial". eg. "ceph-3.0-ubuntu" 5) Otherwise raise, because we need to add more rules. """ name = 'localbuild' def __init__(self, argv): self.argv = argv self.options = ('--dist', ) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() # Allow user to override the distro. if self.parser.has('--dist'): if self.parser.get('--dist') is None: raise SystemExit('Specify a distro to --dist') distro = self.parser.get('--dist') else: distro = get_distro() if self.parser.unknown_commands: log.error('unknown option %s', ' '.join(self.parser.unknown_commands)) return self.parser.print_help() self._run(distro) def help(self): return self._help def _run(self, distro): """ Build a package on the local system, using pbuilder. """ pkg_name = util.package_name() os.environ['BUILDER'] = 'pbuilder' j_arg = self._get_j_arg(cpu_count()) pbuilder_cache = '/var/cache/pbuilder/base-%s-amd64.tgz' % distro setup_pbuilder_cache(pbuilder_cache, distro) util.setup_pristine_tar_branch() # TODO: we should also probably check parent dir for leftovers and warn # the user to delete them (or delete them ourselves?) cmd = [ 'gbp', 'buildpackage', '--git-dist=%s' % distro, '--git-arch=amd64', '--git-verbose', '--git-pbuilder', j_arg, '-us', '-uc' ] log.info('building %s with pbuilder', pkg_name) subprocess.check_call(cmd) def _get_j_arg(self, cpus, total_ram_gb=None): """ Returns a string like "-j4" or "-j8". j is the number of processors, with a maximum of x, where x = TOTAL_RAM_GB / 4. We want to use all our processors (a high "j" value), but the build process will fail with an "out of memory" error out if this j value is too high. An 8 GB system would have a maximum of -j2 A 16 GB system would have a maximum of -j4 A 32 GB system would have a maximum of -j8 """ if total_ram_gb is None: page_size = os.sysconf('SC_PAGE_SIZE') mem_bytes = page_size * os.sysconf('SC_PHYS_PAGES') # mem_gib is a decimal, eg. 7.707 on 8GB system mem_gib = mem_bytes / (1024.**3) # Round up to the nearest GB for our purposes. total_ram_gb = math.ceil(mem_gib) number = min(cpus, total_ram_gb / 4) return '-j%d' % max(number, 1)
class NewVersion(object): help_menu = 'Import a new version with git-buildpackage and uscan' _help = """ Import a new upstream version with "gbp import-orig --uscan". This command makes it easier to rebase a package to a new upstream version. Note: the package must use pristine-tar. Optional Arguments: [tarball] The upstream tarball to import. Omit this to use uscan (debian/watch file) instead. Example: rhcephpkg new-version ansible_2.4.1.0.orig.tar.gz This will import the upstream ansible 2.4.1.0 tarball. -B, --bug The BZ(s) that this new version resolves. Example: rhcephpkg new-version -B "rhbz#12345 rhbz#67980" This will add rhbz#12345 and rhbz#67890 to the debian/changelog. """ name = 'new-version' def __init__(self, argv): self.argv = argv self.options = [['-B', '--bug']] def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() try: tarball = self.parser.unknown_commands[0] except IndexError: tarball = None bugstr = self.parser.get('--bug') self._run(tarball, bugstr) def help(self): return self._help def _run(self, tarball, bugstr): # Ensure we're on the right branch. current_branch = util.current_branch() debian_branch = util.current_debian_branch() if current_branch != debian_branch: log.error('current branch is "%s"' % current_branch) log.error('debian branch is "%s"' % debian_branch) raise RuntimeError('Must run `new-version` on debian branch') util.setup_pristine_tar_branch() self.ensure_gbp_settings() self.setup_upstream_branch() self.import_orig(tarball) version = self.upstream_version() self.run_dch(version, bugstr) self.commit() self.show() def ensure_gbp_settings(self): """ Ensure some gbp settings are correct. """ parser = GbpOptionParser('import-orig') if parser.config.get('pristine-tar') != 'True': err = '"pristine-tar" is %s. Set to "True" in debian/gbp.conf.' raise RuntimeError(err % parser.config.get('pristine-tar')) if parser.config.get('merge-mode') != 'replace': err = '"merge-mode" is %s. Set to "replace" in debian/gbp.conf.' raise RuntimeError(err % parser.config.get('merge-mode')) # ensure upstream branch is unique for this debian branch debian_branch = parser.config.get('debian-branch') upstream_branch = parser.config.get('upstream-branch') expected = 'upstream/%s' % debian_branch if upstream_branch != expected: err = '"upstream-branch" is "%s". Set to "%s" in debian/gbp.conf.' raise RuntimeError(err % (upstream_branch, expected)) def setup_upstream_branch(self): """ Ensure we have a local "upstream/foo" branch. """ parser = GbpOptionParser('import-orig') upstream_branch = parser.config.get('upstream-branch') util.ensure_local_branch(upstream_branch) def import_orig(self, tarball=None): """ Import new upstream tarball, optionally with uscan. """ cmd = ['gbp', 'import-orig', '--no-interactive'] if tarball is None: cmd.append('--uscan') else: cmd.append(tarball) log.info(' '.join(cmd)) subprocess.check_call(cmd) def upstream_version(self): """ Find the upstream version we just imported. git-buildpackage import-orig will generate this "upstream" tag automatically, and we can use it to discover the version of the current branch. It uses git-describe, like so: git describe --match 'upstream/*' --abbrev=0 (Note: this method is similar to gbp.deb.git.DebianGitRepository debian_version_from_upstream(), but that appends the debian release number "-1", and we don't want that here.) """ repo = DebianGitRepository('.') tag = repo.find_branch_tag('HEAD', 'HEAD', pattern='upstream/*') # should we get tagformat from GbpOptionParser instead of hardcoding? tagformat = "upstream/%(version)s" return repo.tag_to_version(tag, tagformat) def run_dch(self, version, bugstr): """ Edit debian/changelog for a new upstream release """ version_release = version + '-2redhat1' text = 'Imported Upstream version %s' % version if bugstr: text = '%s (%s)' % (text, bugstr) dist = changelog.distribution() # reuse previous distribution cmd = ['dch', '-D', dist, '-v', version_release, text] log.info(' '.join(cmd)) subprocess.check_call(cmd) def commit(self): """ Commit to Git, basing the message on our debian/changelog. """ message = changelog.git_commit_message() with tempfile.NamedTemporaryFile(mode='w+') as temp: temp.write(message) temp.flush() cmd = ['git', 'commit', 'debian/changelog', '-F', temp.name] subprocess.check_call(cmd) def show(self): """ Show our last Git commit. """ subprocess.check_call(['git', 'show'])
class Binary(object): _help = dedent(""" Operate binaries on a remote chacra instance. Creating a new binary:: chacractl binary create project/ref/distro/distro_version/arch /path/to/binary Options: create Creates a new binary at a given distro version architecture delete Deletes an existing binary from chacra --force If the resource exists, force the upload """) help_menu = "create, update metadata, or delete binaries" options = ['create', '--force', 'delete'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config['url'], 'binaries') def sanitize_filename(self, line): """ lines may come with newlines and leading slashes make sure they are clean so that they can be processed """ line = line.strip('\n') if os.path.isfile(line): return os.path.abspath(line) def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url def get_checksum(self, filepath): chsum = sha512() with open(filepath, 'rb') as binary: for chunk in iter(lambda: binary.read(4096), b''): chsum.update(chunk) return chsum.hexdigest() @retry() def upload_is_verified(self, arch_url, filename, digest): r = requests.get(arch_url, verify=chacractl.config['ssl_verify']) r.raise_for_status() arch_data = r.json() remote_digest = arch_data[filename]['checksum'] verified = remote_digest == digest if not verified: logging.error( 'Checksum mismatch: server has wrong checksum for %s', filename) logging.error('local checksum: %s', digest) logging.error('remote checksum: %s', remote_digest) return verified @retry() def post(self, url, filepath): filename = os.path.basename(filepath) file_url = os.path.join(url, filename) + '/' exists = requests.head(file_url, verify=chacractl.config['ssl_verify']) digest = self.get_checksum(filepath) if exists.status_code == 200: if not self.force: logger.warning( 'resource exists and --force was not used, will not upload' ) logger.warning('SKIP %s', file_url) return return self.put(file_url, filepath) elif exists.status_code == 404: length = os.path.getsize(filepath) logger.info('POSTing file: %s', filepath) mpart = MultipartEncoder(fields={ 'file': (filename, open(filepath, 'rb'), 'text/plain') }) response = requests.post(url, data=mpart, headers={ 'Content-Type': mpart.content_type, 'Content-Length': '%d' % length, }, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() if not self.upload_is_verified(url, filename, digest): # Since this is a new file, attempt to delete it logging.error('Deleting corrupted file from server...') self.delete(file_url) raise SystemExit( 'Checksum mismatch: remote server has wrong checksum for %s' % filepath) @retry() def put(self, url, filepath): filename = os.path.basename(filepath) logger.info('resource exists and --force was used, will re-upload') logger.info('PUTing file: %s', filepath) digest = self.get_checksum(filepath) length = os.path.getsize(filepath) mpart = MultipartEncoder( fields={'file': (filename, open(filepath, 'rb'), 'text/plain')}) response = requests.put(url, data=mpart, headers={ 'Content-Type': mpart.content_type, 'Content-Length': '%d' % length, }, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) # trim off binary filename url = url.rsplit('/', 2)[0] + "/" if not self.upload_is_verified(url, filename, digest): # Maybe the old file with a different digest is still there, so # don't delete it raise SystemExit( 'Checksum mismatch: server has wrong checksum for %s!' % filepath) @retry() def delete(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('resource already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE file: %s', url) response = requests.delete(url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code < 200 or response.status_code > 299: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() self.force = self.parser.has('--force') # handle posting binaries: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): filename = self.sanitize_filename(line) if not filename: continue url = os.path.join(self.base_url, url_part) self.post(url, filename) else: filepath = self.sanitize_filename(self.argv[-1]) if not filepath: logger.warning('provided path does not exist: %s', self.argv[-1]) return url = os.path.join(self.base_url, url_part) self.post(url, filepath) elif self.parser.has('delete'): if self.parser.get('delete') is None: raise SystemExit('Specify a URL to delete a binary.') url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
def main(self, argv): options = [ '--cluster', '--ssh-config', '--inventory', '--config', '--verbosity', ] parser = Transport(argv, options=options, check_help=False, check_version=False) parser.parse_args() self.config_path = parser.get('--config', configuration.location()) # load medic configuration loaded_config = configuration.load( path=parser.get('--config', self.config_path)) # this is the earliest we can have enough config to setup logging log.setup(loaded_config) ceph_medic.config.file = loaded_config global_options = dict(ceph_medic.config.file._sections['global']) # SSH config ceph_medic.config.ssh_config = parser.get( '--ssh-config', global_options.get('--ssh-config')) if ceph_medic.config.ssh_config: ssh_config_path = ceph_medic.config.ssh_config if not os.path.exists(ssh_config_path): terminal.error("the given ssh config path does not exist: %s" % ssh_config_path) sys.exit() ceph_medic.config.cluster_name = parser.get('--cluster', 'ceph') ceph_medic.metadata['cluster_name'] = 'ceph' # Deployment Type deployment_type = ceph_medic.config.file.get_safe( 'global', 'deployment_type', 'baremetal') if deployment_type in ['kubernetes', 'openshift', 'k8s', 'oc']: pod_hosts = hosts.container_platform(deployment_type) ceph_medic.config.nodes = pod_hosts ceph_medic.config.hosts_file = ':memory:' self.hosts_file = ':memory:' else: # Hosts file self.hosts_file = parser.get('--inventory', configuration.get_host_file()) # find the hosts files, by the CLI first, fallback to the configuration # file, and lastly if none of those are found or defined, try to load # from well known locations (cwd, and /etc/ansible/) loaded_hosts = configuration.load_hosts( parser.get('--inventory', global_options.get('--inventory', self.hosts_file))) ceph_medic.config.nodes = loaded_hosts.nodes ceph_medic.config.hosts_file = loaded_hosts.filename self.hosts_file = loaded_hosts.filename parser.catch_version = ceph_medic.__version__ parser.mapper = self.mapper parser.catch_help = self.help(parser.subhelp()) if len(argv) <= 1: return parser.print_help() ceph_medic.config.config_path = self.config_path parser.dispatch() parser.catches_help() parser.catches_version() # Verbosity verbosity = parser.get('--verbosity', 'debug') ceph_medic.config.verbosity = verbosity.lowercase()
class Binary(object): _help = dedent(""" Operate binaries on a remote chacra instance. Creating a new binary:: chacractl binary create project/ref/distro/distro_version/arch /path/to/binary Options: create Creates a new binary at a given distro version architecture delete Deletes an existing binary from chacra --force If the resource exists, force the upload """) help_menu = "create, update metadata, or delete binaries" options = ['create', '--force', 'delete'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join( chacractl.config['url'], 'binaries' ) def sanitize_filename(self, line): """ lines may come with newlines and leading slashes make sure they are clean so that they can be processed """ line = line.strip('\n') if os.path.isfile(line): return os.path.abspath(line) def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url def post(self, url, filepath): filename = os.path.basename(filepath) file_url = os.path.join(url, filename) + '/' exists = requests.head(file_url, verify=chacractl.config['ssl_verify']) if exists.status_code == 200: if not self.force: logger.warning( 'resource exists and --force was not used, will not upload' ) logger.warning('SKIP %s', file_url) return return self.put(file_url, filepath) elif exists.status_code == 404: logger.info('POSTing file: %s', filepath) with open(filepath, 'rb') as binary: response = requests.post( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() def put(self, url, filepath): logger.info('resource exists and --force was used, will re-upload') logger.info('PUTing file: %s', filepath) with open(filepath, 'rb') as binary: response = requests.put( url, files={'file': binary}, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def delete(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('resource already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE file: %s', url) response = requests.delete( url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() self.force = self.parser.has('--force') # handle posting binaries: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): filename = self.sanitize_filename(line) if not filename: continue url = os.path.join(self.base_url, url_part) self.post(url, filename) else: filepath = self.sanitize_filename(self.argv[-1]) if not filepath: logger.warning( 'provided path does not exist: %s', self.argv[-1] ) return url = os.path.join(self.base_url, url_part) self.post(url, filepath) elif self.parser.has('delete'): url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class Project(object): _help = dedent( """ Handle projects on a remote chacra instance. Creating a new project:: chacractl project create project Options: create Creates a new project """ ) help_menu = "create projects" options = ["create"] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config["url"], "binaries") def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip("/") # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith("/"): url = "%s/" % url return url def post(self, url): exists = requests.head(url, verify=chacractl.config["ssl_verify"]) if exists.status_code == 200: logger.warning("resource exists, will not upload") logger.warning("SKIP %s", url) return elif exists.status_code == 404: logger.info("POSTing to project: %s", url) response = requests.post(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"]) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() def delete(self, url): # XXX This exists here but it is not yet implemented, e.g. nothing # calls this method exists = requests.head(url, verify=chacractl.config["ssl_verify"]) if exists.status_code == 404: logger.warning("project already deleted") logger.warning("SKIP %s", url) return logger.info("DELETE project: %s", url) response = requests.delete(url, auth=chacractl.config["credentials"], verify=chacractl.config["ssl_verify"]) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() # handle posting projects: if self.parser.has("create"): url_part = self.sanitize_url(self.parser.get("create")) if not sys.stdin.isatty(): # read from stdin logger.info("reading input from stdin") for line in sys.stdin.readlines(): url = os.path.join(self.base_url, url_part) self.post(url) else: url = os.path.join(self.base_url, url_part) self.post(url) # XXX this exists here but it not yet enabled from the CLI elif self.parser.has("delete"): url_part = self.sanitize_url(self.parser.get("delete")) url = os.path.join(self.base_url, url_part) self.delete(url)
class Project(object): _help = dedent(""" Handle projects on a remote chacra instance. Creating a new project:: chacractl project create project Options: create Creates a new project """) help_menu = "create projects" options = ['create'] def __init__(self, argv): self.argv = argv @property def base_url(self): return os.path.join(chacractl.config['url'], 'binaries') def sanitize_url(self, url_part): # get rid of the leading slash to prevent issues when joining url = url_part.lstrip('/') # and add a trailing slash so that the request is done at the correct # canonical url if not url.endswith('/'): url = "%s/" % url return url @retry() def post(self, url): exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 200: logger.warning('resource exists, will not upload') logger.warning('SKIP %s', url) return elif exists.status_code == 404: logger.info('POSTing to project: %s', url) response = requests.post(url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) response.raise_for_status() @retry() def delete(self, url): # XXX This exists here but it is not yet implemented, e.g. nothing # calls this method exists = requests.head(url, verify=chacractl.config['ssl_verify']) if exists.status_code == 404: logger.warning('project already deleted') logger.warning('SKIP %s', url) return logger.info('DELETE project: %s', url) response = requests.delete(url, auth=chacractl.config['credentials'], verify=chacractl.config['ssl_verify']) if response.status_code > 201: logger.warning("%s -> %s", response.status_code, response.text) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self._help self.parser.parse_args() # handle posting projects: if self.parser.has('create'): url_part = self.sanitize_url(self.parser.get('create')) if not sys.stdin.isatty(): # read from stdin logger.info('reading input from stdin') for line in sys.stdin.readlines(): url = os.path.join(self.base_url, url_part) self.post(url) else: url = os.path.join(self.base_url, url_part) self.post(url) # XXX this exists here but it not yet enabled from the CLI elif self.parser.has('delete'): url_part = self.sanitize_url(self.parser.get('delete')) url = os.path.join(self.base_url, url_part) self.delete(url)
class Localbuild(object): help_menu = 'build a package on the local system' _help = """ Build a package on the local system, using pbuilder. Options: --dist "xenial" or "trusty". Defaults to "trusty". """ name = 'localbuild' def __init__(self, argv): self.argv = argv self.options = ('--dist',) def main(self): self.parser = Transport(self.argv, options=self.options) self.parser.catch_help = self.help() self.parser.parse_args() # FIXME: stop hardcoding trusty. Use the git branch name instead, # translating "-ubuntu" into this local computer's own distro. distro = 'trusty' # Allow user to override the distro. if self.parser.has('--dist'): if self.parser.get('--dist') is None: raise SystemExit('Specify a distro to --dist') distro = self.parser.get('--dist') self._run(distro) def help(self): return self._help def _run(self, distro): """ Build a package on the local system, using pbuilder. """ pkg_name = util.package_name() os.environ['BUILDER'] = 'pbuilder' j_arg = self._get_j_arg(cpu_count()) pbuilder_cache = '/var/cache/pbuilder/base-%s-amd64.tgz' % distro if not os.path.isfile(pbuilder_cache): cmd = ['sudo', 'pbuilder', 'create', '--debootstrapopts', '--variant=buildd', '--basetgz', pbuilder_cache, '--distribution', distro] log.info('initializing pbuilder cache %s', pbuilder_cache) subprocess.check_call(cmd) # TODO: we should also probably check parent dir for leftovers and warn # the user to delete them (or delete them ourselves?) cmd = ['gbp', 'buildpackage', '--git-dist=%s' % distro, '--git-arch=amd64', '--git-verbose', '--git-pbuilder', j_arg, '-us', '-uc'] log.info('building %s with pbuilder', pkg_name) subprocess.check_call(cmd) def _get_j_arg(self, cpus, total_ram_gb=None): """ Returns a string like "-j4" or "-j8". j is the number of processors, with a maximum of x, where x = TOTAL_RAM_GB / 4. We want to use all our processors (a high "j" value), but the build process will fail with an "out of memory" error out if this j value is too high. An 8 GB system would have a maximum of -j2 A 16 GB system would have a maximum of -j4 A 32 GB system would have a maximum of -j8 """ if total_ram_gb is None: page_size = os.sysconf('SC_PAGE_SIZE') mem_bytes = page_size * os.sysconf('SC_PHYS_PAGES') # mem_gib is a decimal, eg. 7.707 on 8GB system mem_gib = mem_bytes / (1024. ** 3) # Round up to the nearest GB for our purposes. total_ram_gb = math.ceil(mem_gib) number = min(cpus, total_ram_gb / 4) return '-j%d' % max(number, 1)