def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') data = client.cluster_templates.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.node_groups: raise exceptions.CommandError( 'At least --name , --node-groups arguments should be ' 'specified or json template should be provided with ' '--json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng(self.app, parsed_args.node_groups, client)) data = utils.create_cluster_template(self.app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups) return data
def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') data = client.cluster_templates.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.node_groups: raise exceptions.CommandError( 'At least --name , --node-groups arguments should be ' 'specified or json template should be provided with ' '--json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng(self.app, parsed_args.node_groups, client)) data = utils.create_cluster_template(self.app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups) return data
def _read_filters(self, path): """Read and parse rules from path Expect the file to contain a valid JSON structure. :param path: path to the file :return: loaded and valid dictionary with filters :raises exception.CommandError: In case the file cannot be accessed or the content is not a valid JSON. Example of the content of the file: { "interface": "admin", "service_id": "1b501a" } """ blob = utils.read_blob_file_contents(path) try: rules = json.loads(blob) except ValueError as e: msg = _("An error occurred when reading filters from file " "%(path)s: %(error)s") % {"path": path, "error": e} raise exceptions.CommandError(msg) else: return rules
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, shares=shares) data = client.clusters.update(cluster_id, **update_dict).cluster _format_cluster_output(data) data = utils.prepare_data(data, CLUSTER_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): identity_client = self.app.client_manager.identity if parsed_args.remote_id_file: file_content = utils.read_blob_file_contents( parsed_args.remote_id_file) remote_ids = file_content.splitlines() remote_ids = list(map(str.strip, remote_ids)) else: remote_ids = (parsed_args.remote_id if parsed_args.remote_id else None) domain_id = None if parsed_args.domain: domain_id = common.find_domain(identity_client, parsed_args.domain).id idp = identity_client.federation.identity_providers.create( id=parsed_args.identity_provider_id, remote_ids=remote_ids, description=parsed_args.description, domain_id=domain_id, enabled=parsed_args.enabled) idp._info.pop('links', None) remote_ids = utils.format_list(idp._info.pop('remote_ids', [])) idp._info['remote_ids'] = remote_ids return zip(*sorted(six.iteritems(idp._info)))
def take_action(self, parsed_args): federation_client = self.app.client_manager.identity.federation # Always set remote_ids if either is passed in if parsed_args.remote_id_file: file_content = utils.read_blob_file_contents( parsed_args.remote_id_file) remote_ids = file_content.splitlines() remote_ids = list(map(str.strip, remote_ids)) elif parsed_args.remote_id: remote_ids = parsed_args.remote_id # Setup keyword args for the client kwargs = {} if parsed_args.description: kwargs['description'] = parsed_args.description if parsed_args.enable: kwargs['enabled'] = True if parsed_args.disable: kwargs['enabled'] = False if parsed_args.remote_id_file or parsed_args.remote_id: kwargs['remote_ids'] = remote_ids federation_client.identity_providers.update( parsed_args.identity_provider, **kwargs )
def take_action(self, parsed_args): identity_client = self.app.client_manager.identity if parsed_args.remote_id_file: file_content = utils.read_blob_file_contents( parsed_args.remote_id_file) remote_ids = file_content.splitlines() remote_ids = list(map(str.strip, remote_ids)) else: remote_ids = (parsed_args.remote_id if parsed_args.remote_id else None) domain_id = None if parsed_args.domain: domain_id = common.find_domain(identity_client, parsed_args.domain).id idp = identity_client.federation.identity_providers.create( id=parsed_args.identity_provider_id, remote_ids=remote_ids, description=parsed_args.description, domain_id=domain_id, enabled=parsed_args.enabled) idp._info.pop('links', None) remote_ids = format_columns.ListColumn(idp._info.pop('remote_ids', [])) idp._info['remote_ids'] = remote_ids return zip(*sorted(idp._info.items()))
def _create_take_action(self, client, app, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.node_group_templates.create(**template).to_dict() else: if (not parsed_args.name or not parsed_args.plugin or not parsed_args.plugin_version or not parsed_args.flavor or not parsed_args.processes): raise exceptions.CommandError( 'At least --name, --plugin, --plugin-version, --processes,' ' --flavor arguments should be specified or json template ' 'should be provided with --json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) compute_client = app.client_manager.compute flavor_id = osc_utils.find_resource(compute_client.flavors, parsed_args.flavor).id data = create_node_group_templates(client, app, parsed_args, flavor_id, configs, shares) return data
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.jobs.create(**template).to_dict() else: if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) mains_ids = [ utils.get_resource_id(client.job_binaries, m) for m in parsed_args.mains ] if parsed_args.mains else None libs_ids = [ utils.get_resource_id(client.job_binaries, m) for m in parsed_args.libs ] if parsed_args.libs else None data = client.jobs.create( name=parsed_args.name, type=parsed_args.type, mains=mains_ids, libs=libs_ids, description=parsed_args.description, interface=parsed_args.interface, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() _format_job_template_output(data) data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS) return self.dict2columns(data)
def _take_action(self, client, parsed_args, ct_id): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.cluster_templates.update( ct_id, **template).to_dict() else: plugin, plugin_version, node_groups = None, None, None if parsed_args.node_groups: plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng( self.app, parsed_args.node_groups, client)) configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) data = utils.update_cluster_template(self.app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups, ct_id) return data
def _take_action(self, client, parsed_args): cluster = utils.get_resource(client.clusters, parsed_args.cluster) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.clusters.scale(cluster.id, template).cluster else: scale_object = {"add_node_groups": [], "resize_node_groups": []} scale_node_groups = dict( map(lambda x: x.split(':', 1), parsed_args.instances)) cluster_ng_map = { ng['node_group_template_id']: ng['name'] for ng in cluster.node_groups } for name, count in scale_node_groups.items(): ngt = utils.get_resource(client.node_group_templates, name) if ngt.id in cluster_ng_map: scale_object["resize_node_groups"].append({ "name": cluster_ng_map[ngt.id], "count": int(count) }) else: scale_object["add_node_groups"].append({ "node_group_template_id": ngt.id, "name": ngt.name, "count": int(count) }) if not scale_object['add_node_groups']: del scale_object['add_node_groups'] if not scale_object['resize_node_groups']: del scale_object['resize_node_groups'] data = client.clusters.scale(cluster.id, scale_object).cluster sys.stdout.write( 'Cluster "{cluster}" scaling has been started.\n'.format( cluster=parsed_args.cluster)) if parsed_args.wait: if not osc_utils.wait_for_status(client.clusters.get, data['id']): self.log.error('Error occurred during cluster scaling: %s' % cluster.id) data = client.clusters.get(cluster.id).cluster return data
def _take_action(self, client, parsed_args): cluster = utils.get_resource( client.clusters, parsed_args.cluster) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.clusters.scale(cluster.id, template).cluster else: scale_object = { "add_node_groups": [], "resize_node_groups": [] } scale_node_groups = dict( map(lambda x: x.split(':', 1), parsed_args.instances)) cluster_ng_map = { ng['node_group_template_id']: ng['name'] for ng in cluster.node_groups} for name, count in scale_node_groups.items(): ngt = utils.get_resource(client.node_group_templates, name) if ngt.id in cluster_ng_map: scale_object["resize_node_groups"].append({ "name": cluster_ng_map[ngt.id], "count": int(count) }) else: scale_object["add_node_groups"].append({ "node_group_template_id": ngt.id, "name": ngt.name, "count": int(count) }) if not scale_object['add_node_groups']: del scale_object['add_node_groups'] if not scale_object['resize_node_groups']: del scale_object['resize_node_groups'] data = client.clusters.scale(cluster.id, scale_object).cluster sys.stdout.write( 'Cluster "{cluster}" scaling has been started.\n'.format( cluster=parsed_args.cluster)) if parsed_args.wait: if not osc_utils.wait_for_status( client.clusters.get, data['id']): self.log.error( 'Error occurred during cluster scaling: %s' % cluster.id) data = client.clusters.get(cluster.id).cluster return data
def _take_action(self, client, parsed_args, ct_id): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.cluster_templates.update(ct_id, **template).to_dict() else: plugin, plugin_version, node_groups = None, None, None if parsed_args.node_groups: plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng( self.app, parsed_args.node_groups, client)) configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) data = utils.update_cluster_template(self.app, client, plugin, plugin_version, parsed_args, configs, shares, node_groups, ct_id) return data
def take_action(self, parsed_args): blob = utils.read_blob_file_contents(parsed_args.rules) identity_client = self.app.client_manager.identity policy = identity_client.policies.create(blob=blob, type=parsed_args.type) policy._info.pop('links') policy._info.update({'rules': policy._info.pop('blob')}) return zip(*sorted(policy._info.items()))
def take_action(self, parsed_args): blob = utils.read_blob_file_contents(parsed_args.rules) identity_client = self.app.client_manager.identity policy = identity_client.policies.create( blob=blob, type=parsed_args.type ) policy._info.pop('links') policy._info.update({'rules': policy._info.pop('blob')}) return zip(*sorted(six.iteritems(policy._info)))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.create(**template).to_dict() else: if parsed_args.data: data = open(parsed_args.data).read() jbi_id = client.job_binary_internals.create( parsed_args.name, data).id parsed_args.url = 'internal-db://' + jbi_id if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password(self.app.stdin, confirm=False) if parsed_args.password and not parsed_args.username: raise exceptions.CommandError( 'Username via --username should be provided with password') if parsed_args.username and not parsed_args.password: raise exceptions.CommandError( 'Password should be provided via --password or entered ' 'interactively with --password-prompt') if parsed_args.password and parsed_args.username: extra = { 'user': parsed_args.username, 'password': parsed_args.password } else: extra = None data = client.job_binaries.create( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing jb_id = utils.get_resource_id(client.job_binaries, parsed_args.job_binary) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.update(jb_id, template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password(self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password(self.app.stdin, confirm=False) extra = {} if parsed_args.password: extra['password'] = parsed_args.password if parsed_args.username: extra['user'] = parsed_args.username if parsed_args.access_key: extra['accesskey'] = parsed_args.access_key if parsed_args.secret_key: extra['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: extra['endpoint'] = parsed_args.s3_endpoint if not extra: extra = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected) data = client.job_binaries.update(jb_id, update_fields).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing jb_id = utils.get_resource_id( client.job_binaries, parsed_args.job_binary) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.update(jb_id, template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password( self.app.stdin, confirm=False) extra = {} if parsed_args.password: extra['password'] = parsed_args.password if parsed_args.username: extra['user'] = parsed_args.username if parsed_args.access_key: extra['accesskey'] = parsed_args.access_key if parsed_args.secret_key: extra['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: extra['endpoint'] = parsed_args.s3_endpoint if not extra: extra = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected ) data = client.job_binaries.update( jb_id, update_fields).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.jobs.create(**template).to_dict() else: if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) mains_ids = [utils.get_resource_id(client.job_binaries, m) for m in parsed_args.mains] if parsed_args.mains else None libs_ids = [utils.get_resource_id(client.job_binaries, m) for m in parsed_args.libs] if parsed_args.libs else None data = client.jobs.create( name=parsed_args.name, type=parsed_args.type, mains=mains_ids, libs=libs_ids, description=parsed_args.description, interface=parsed_args.interface, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() _format_job_template_output(data) data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): identity_client = self.app.client_manager.identity blob = None if parsed_args.rules: blob = utils.read_blob_file_contents(parsed_args.rules) kwargs = {} if blob: kwargs['blob'] = blob if parsed_args.type: kwargs['type'] = parsed_args.type identity_client.policies.update(parsed_args.policy, **kwargs)
def _read_rules(self, path): """Read and parse rules from path Expect the file to contain a valid JSON structure. :param path: path to the file :return: loaded and valid dictionary with rules :raises exception.CommandError: In case the file cannot be accessed or the content is not a valid JSON. Example of the content of the file: [ { "local": [ { "group": { "id": "85a868" } } ], "remote": [ { "type": "orgPersonType", "any_one_of": [ "Employee" ] }, { "type": "sn", "any_one_of": [ "Young" ] } ] } ] """ blob = utils.read_blob_file_contents(path) try: rules = json.loads(blob) except ValueError as e: msg = _("An error occurred when reading rules from file " "%(path)s: %(error)s") % { "path": path, "error": e } raise exceptions.CommandError(msg) else: return rules
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.create(**template).to_dict() else: if parsed_args.data: data = open(parsed_args.data).read() jbi_id = client.job_binary_internals.create( parsed_args.name, data).id parsed_args.url = 'internal-db://' + jbi_id if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.password and not parsed_args.username: raise exceptions.CommandError( 'Username via --username should be provided with password') if parsed_args.username and not parsed_args.password: raise exceptions.CommandError( 'Password should be provided via --password or entered ' 'interactively with --password-prompt') if parsed_args.password and parsed_args.username: extra = { 'user': parsed_args.username, 'password': parsed_args.password } else: extra = None data = client.job_binaries.create( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data)
def _read_rules(self, path): """Read and parse rules from path Expect the file to contain a valid JSON structure. :param path: path to the file :return: loaded and valid dictionary with rules :raises exception.CommandError: In case the file cannot be accessed or the content is not a valid JSON. Example of the content of the file: [ { "local": [ { "group": { "id": "85a868" } } ], "remote": [ { "type": "orgPersonType", "any_one_of": [ "Employee" ] }, { "type": "sn", "any_one_of": [ "Young" ] } ] } ] """ blob = utils.read_blob_file_contents(path) try: rules = json.loads(blob) except ValueError as e: msg = _("An error occurred when reading rules from file " "%(path)s: %(error)s") % {"path": path, "error": e} raise exceptions.CommandError(msg) else: return rules
def take_action(self, parsed_args): identity_client = self.app.client_manager.identity blob = None if parsed_args.rules: blob = utils.read_blob_file_contents(parsed_args.rules) kwargs = {} if blob: kwargs['blob'] = blob if parsed_args.type: kwargs['type'] = parsed_args.type if not kwargs: sys.stdout.write(_('Policy not updated, no arguments present\n')) return identity_client.policies.update(parsed_args.policy, **kwargs)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing blob = osc_utils.read_blob_file_contents(parsed_args.json) try: update_dict = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'update dict from file %s: %s' % (parsed_args.json, e)) plugin = client.plugins.update(parsed_args.plugin, update_dict) data = plugin.to_dict() data['versions'] = osc_utils.format_list(data['versions']) items = _serialize_label_items(data) data = utils.prepare_data( data, ['versions', 'name', 'description', 'title']) data = utils.extend_columns(self.dict2columns(data), items) return data
def _take_action(self, client, parsed_args): if (not parsed_args.node_groups): raise exceptions.CommandError('--node_groups should be specified') blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if parsed_args.default_image_id: template['cluster_template']['default_image_id'] = ( parsed_args.default_image_id) else: template['cluster_template']['default_image_id'] = None if parsed_args.name: template['cluster_template']['name'] = parsed_args.name if 'neutron_management_network' in template['cluster_template']: template['cluster_template']['net_id'] = ( template['cluster_template'].pop('neutron_management_network')) plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng_configure_node_groups( self.app, parsed_args.node_groups, client)) if (('plugin_version' in template['cluster_template'] and template['cluster_template']['plugin_version'] != plugin_version) or ('plugin' in template['cluster_template'] and template['cluster_template']['plugin'] != plugin)): raise exceptions.CommandError( 'Plugin of plugin version do not match between template ' 'and given node group templates') template['cluster_template']['node_groups'] = node_groups data = client.cluster_templates.create( **template['cluster_template']).to_dict() return data
def take_action(self, parsed_args): identity_client = self.app.client_manager.identity if parsed_args.remote_id_file: file_content = utils.read_blob_file_contents( parsed_args.remote_id_file) remote_ids = file_content.splitlines() remote_ids = list(map(str.strip, remote_ids)) else: remote_ids = (parsed_args.remote_id if parsed_args.remote_id else None) idp = identity_client.federation.identity_providers.create( id=parsed_args.identity_provider_id, remote_ids=remote_ids, description=parsed_args.description, enabled=parsed_args.enabled) idp._info.pop('links', None) remote_ids = utils.format_list(idp._info.pop('remote_ids', [])) idp._info['remote_ids'] = remote_ids return zip(*sorted(six.iteritems(idp._info)))
def _take_action(self, client, parsed_args): if (not parsed_args.node_groups): raise exceptions.CommandError('--node_groups should be specified') blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if parsed_args.default_image_id: template['cluster_template']['default_image_id'] = ( parsed_args.default_image_id) else: template['cluster_template']['default_image_id'] = None if parsed_args.name: template['cluster_template']['name'] = parsed_args.name if 'neutron_management_network' in template['cluster_template']: template['cluster_template']['net_id'] = ( template['cluster_template'].pop('neutron_management_network')) plugin, plugin_version, node_groups = ( utils._cluster_templates_configure_ng_configure_node_groups( self.app, parsed_args.node_groups, client)) if (('plugin_version' in template['cluster_template'] and template['cluster_template']['plugin_version'] != plugin_version) or ('plugin' in template['cluster_template'] and template['cluster_template']['plugin'] != plugin)): raise exceptions.CommandError( 'Plugin of plugin version do not match between template ' 'and given node group templates') template['cluster_template']['node_groups'] = node_groups data = client.cluster_templates.create( **template['cluster_template']).to_dict() return data
def _take_action(self, client, parsed_args): network_client = self.app.client_manager.network if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') if 'count' in template: parsed_args.count = template['count'] data = client.clusters.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.cluster_template \ or not parsed_args.image: raise exceptions.CommandError( 'At least --name , --cluster-template, --image arguments ' 'should be specified or json template should be provided ' 'with --json argument') plugin, plugin_version, template_id = utils._get_plugin_version( self.app, parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) net_id = (network_client.find_network( parsed_args.neutron_network, ignore_missing=False).id if parsed_args.neutron_network else None) data = utils.create_cluster(client, self.app, parsed_args, plugin, plugin_version, template_id, image_id, net_id) return data
def _take_action(self, client, parsed_args): network_client = self.app.client_manager.network if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') if 'count' in template: parsed_args.count = template['count'] data = client.clusters.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.cluster_template \ or not parsed_args.image: raise exceptions.CommandError( 'At least --name , --cluster-template, --image arguments ' 'should be specified or json template should be provided ' 'with --json argument') plugin, plugin_version, template_id = utils._get_plugin_version( self.app, parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) net_id = (network_client.find_network(parsed_args.neutron_network, ignore_missing=False).id if parsed_args.neutron_network else None) data = utils.create_cluster(client, self.app, parsed_args, plugin, plugin_version, template_id, image_id, net_id) return data
def _take_action(self, client, parsed_args): cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, shares=shares) data = client.clusters.update(cluster_id, **update_dict).cluster return data
def _import_take_action(self, client, parsed_args): if (not parsed_args.image_id or not parsed_args.flavor_id): raise exceptions.CommandError( 'At least --image_id and --flavor_id should be specified') blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) template['node_group_template']['floating_ip_pool'] = ( parsed_args.floating_ip_pool) template['node_group_template']['image_id'] = (parsed_args.image_id) template['node_group_template']['flavor_id'] = (parsed_args.flavor_id) template['node_group_template']['security_groups'] = ( parsed_args.security_groups) if parsed_args.name: template['node_group_template']['name'] = parsed_args.name data = client.node_group_templates.create( **template['node_group_template']).to_dict() return data
def _take_action(self, client, parsed_args): cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, shares=shares ) data = client.clusters.update(cluster_id, **update_dict).cluster return data
def take_action(self, parsed_args): federation_client = self.app.client_manager.identity.federation # Basic argument checking if (not parsed_args.enable and not parsed_args.disable and not parsed_args.remote_id and not parsed_args.remote_id_file and not parsed_args.description): LOG.error(_('No changes requested')) return (None, None) # Always set remote_ids if either is passed in if parsed_args.remote_id_file: file_content = utils.read_blob_file_contents( parsed_args.remote_id_file) remote_ids = file_content.splitlines() remote_ids = list(map(str.strip, remote_ids)) elif parsed_args.remote_id: remote_ids = parsed_args.remote_id # Setup keyword args for the client kwargs = {} if parsed_args.description: kwargs['description'] = parsed_args.description if parsed_args.enable: kwargs['enabled'] = True if parsed_args.disable: kwargs['enabled'] = False if parsed_args.remote_id_file or parsed_args.remote_id: kwargs['remote_ids'] = remote_ids identity_provider = federation_client.identity_providers.update( parsed_args.identity_provider, **kwargs) identity_provider._info.pop('links', None) return zip(*sorted(six.iteritems(identity_provider._info)))
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing ngt_id = utils.get_resource_id( client.node_group_templates, parsed_args.node_group_template) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.node_group_templates.update( ngt_id, **template).to_dict() else: configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) flavor_id = None if parsed_args.flavor: compute_client = self.app.client_manager.compute flavor_id = osc_utils.find_resource( compute_client.flavors, parsed_args.flavor).id update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, plugin_name=parsed_args.plugin, hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, volumes_size=parsed_args.volumes_size, node_processes=parsed_args.processes, floating_ip_pool=parsed_args.floating_ip_pool, security_groups=parsed_args.security_groups, auto_security_group=parsed_args.use_auto_security_group, availability_zone=parsed_args.availability_zone, volume_type=parsed_args.volumes_type, is_proxy_gateway=parsed_args.is_proxy_gateway, volume_local_to_instance=parsed_args.volume_locality, use_autoconfig=parsed_args.use_autoconfig, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, node_configs=configs, shares=shares, volumes_availability_zone=parsed_args.volumes_availability_zone ) data = client.node_group_templates.update( ngt_id, **update_dict).to_dict() _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data)
def _update_take_action(self, client, app, parsed_args): ngt_id = get_resource_id(client.node_group_templates, parsed_args.node_group_template) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.node_group_templates.update(ngt_id, **template).to_dict() else: configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) flavor_id = None if parsed_args.flavor: compute_client = self.app.client_manager.compute flavor_id = osc_utils.find_resource(compute_client.flavors, parsed_args.flavor).id update_dict = create_dict_from_kwargs( name=parsed_args.name, plugin_name=parsed_args.plugin, hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, volumes_size=parsed_args.volumes_size, node_processes=parsed_args.processes, floating_ip_pool=parsed_args.floating_ip_pool, security_groups=parsed_args.security_groups, auto_security_group=parsed_args.use_auto_security_group, availability_zone=parsed_args.availability_zone, volume_type=parsed_args.volumes_type, is_proxy_gateway=parsed_args.is_proxy_gateway, volume_local_to_instance=parsed_args.volume_locality, use_autoconfig=parsed_args.use_autoconfig, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, node_configs=configs, shares=shares, volumes_availability_zone=( parsed_args.volumes_availability_zone), volume_mount_prefix=parsed_args.volumes_mount_prefix) if app.api_version['data_processing'] == '2': if 'hadoop_version' in update_dict: update_dict.pop('hadoop_version') update_dict['plugin_version'] = parsed_args.plugin_version if parsed_args.boot_from_volume is not None: update_dict['boot_from_volume'] = ( parsed_args.boot_from_volume) if parsed_args.boot_volume_type is not None: update_dict['boot_volume_type'] = ( parsed_args.boot_volume_type) if parsed_args.boot_volume_availability_zone is not None: update_dict['boot_volume_availability_zone'] = ( parsed_args.boot_volume_availability_zone) if parsed_args.boot_volume_local_to_instance is not None: update_dict['boot_volume_local_to_instance'] = ( parsed_args.boot_volume_local_to_instance) data = client.node_group_templates.update(ngt_id, **update_dict).to_dict() return data
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') data = client.cluster_templates.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.node_groups: raise exceptions.CommandError( 'At least --name , --node-groups arguments should be ' 'specified or json template should be provided with ' '--json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) plugin, plugin_version, node_groups = _configure_node_groups( parsed_args.node_groups, client) data = client.cluster_templates.create( name=parsed_args.name, plugin_name=plugin, hadoop_version=plugin_version, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.autoconfig, cluster_configs=configs, shares=shares, is_public=parsed_args.public, is_protected=parsed_args.protected ).to_dict() _format_ct_output(data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data)
def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = utils.create_job_json(client, self.app, template) else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_job_template_id(self.app, client, parsed_args) cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) if parsed_args.input not in [None, "", "None"]: input_id = utils.get_resource_id( client.data_sources, parsed_args.input) else: input_id = None if parsed_args.output not in [None, "", "None"]: output_id = utils.get_resource_id( client.data_sources, parsed_args.output) else: output_id = None data = utils.create_job(client, self.app, jt_id, cluster_id, input_id, output_id, job_configs, parsed_args) sys.stdout.write( 'Job "{job}" has been started successfully.\n'.format( job=data['id'])) return data
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ct_id = utils.get_resource_id(client.cluster_templates, parsed_args.cluster_template) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.cluster_templates.update(ct_id, **template).to_dict() else: plugin, plugin_version, node_groups = None, None, None if parsed_args.node_groups: plugin, plugin_version, node_groups = _configure_node_groups( parsed_args.node_groups, client) configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, plugin_name=plugin, hadoop_version=plugin_version, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.use_autoconfig, cluster_configs=configs, shares=shares, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, domain_name=parsed_args.domain_name) data = client.cluster_templates.update(ct_id, **update_dict).to_dict() _format_ct_output(data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') data = client.cluster_templates.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.node_groups: raise exceptions.CommandError( 'At least --name , --node-groups arguments should be ' 'specified or json template should be provided with ' '--json argument') configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) plugin, plugin_version, node_groups = _configure_node_groups( parsed_args.node_groups, client) data = client.cluster_templates.create( name=parsed_args.name, plugin_name=plugin, hadoop_version=plugin_version, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.autoconfig, cluster_configs=configs, shares=shares, is_public=parsed_args.public, is_protected=parsed_args.protected, domain_name=parsed_args.domain_name).to_dict() _format_ct_output(data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing ct_id = utils.get_resource_id( client.cluster_templates, parsed_args.cluster_template) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.cluster_templates.update( ct_id, **template).to_dict() else: plugin, plugin_version, node_groups = None, None, None if parsed_args.node_groups: plugin, plugin_version, node_groups = _configure_node_groups( parsed_args.node_groups, client) configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, plugin_name=plugin, hadoop_version=plugin_version, description=parsed_args.description, node_groups=node_groups, use_autoconfig=parsed_args.use_autoconfig, cluster_configs=configs, shares=shares, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected ) data = client.cluster_templates.update( ct_id, **update_dict).to_dict() _format_ct_output(data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing ngt_id = utils.get_resource_id(client.node_group_templates, parsed_args.node_group_template) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.node_group_templates.update(ngt_id, **template).to_dict() else: configs = None if parsed_args.configs: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: configs = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.configs, e)) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = json.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) flavor_id = None if parsed_args.flavor: compute_client = self.app.client_manager.compute flavor_id = osc_utils.find_resource(compute_client.flavors, parsed_args.flavor).id update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, plugin_name=parsed_args.plugin, hadoop_version=parsed_args.plugin_version, flavor_id=flavor_id, description=parsed_args.description, volumes_per_node=parsed_args.volumes_per_node, volumes_size=parsed_args.volumes_size, node_processes=parsed_args.processes, floating_ip_pool=parsed_args.floating_ip_pool, security_groups=parsed_args.security_groups, auto_security_group=parsed_args.use_auto_security_group, availability_zone=parsed_args.availability_zone, volume_type=parsed_args.volumes_type, is_proxy_gateway=parsed_args.is_proxy_gateway, volume_local_to_instance=parsed_args.volume_locality, use_autoconfig=parsed_args.use_autoconfig, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, node_configs=configs, shares=shares, volumes_availability_zone=parsed_args.volumes_availability_zone ) data = client.node_group_templates.update(ngt_id, **update_dict).to_dict() _format_ngt_output(data) data = utils.prepare_data(data, NGT_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = client.job_executions.create(**template).to_dict() else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_resource_id(client.jobs, parsed_args.job_template) cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) input_id = utils.get_resource_id(client.data_sources, parsed_args.input) output_id = utils.get_resource_id(client.data_sources, parsed_args.output) data = client.job_executions.create( job_id=jt_id, cluster_id=cluster_id, input_id=input_id, output_id=output_id, interface=parsed_args.interface, configs=job_configs, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() sys.stdout.write('Job "{job}" has been started successfully.\n'.format( job=data['id'])) _format_job_output(data) data = utils.prepare_data(data, JOB_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = client.job_executions.create(**template).to_dict() else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_resource_id( client.jobs, parsed_args.job_template) cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) input_id = utils.get_resource_id( client.data_sources, parsed_args.input) output_id = utils.get_resource_id( client.data_sources, parsed_args.output) data = client.job_executions.create( job_id=jt_id, cluster_id=cluster_id, input_id=input_id, output_id=output_id, interface=parsed_args.interface, configs=job_configs, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() sys.stdout.write( 'Job "{job}" has been started successfully.\n'.format( job=data['id'])) _format_job_output(data) data = utils.prepare_data(data, JOB_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)" % parsed_args) client = self.app.client_manager.data_processing network_client = self.app.client_manager.network if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = json.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') if 'count' in template: parsed_args.count = template['count'] data = client.clusters.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.cluster_template \ or not parsed_args.image: raise exceptions.CommandError( 'At least --name , --cluster-template, --image arguments ' 'should be specified or json template should be provided ' 'with --json argument') plugin, plugin_version, template_id = _get_plugin_version( parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) net_id = (network_client.find_network(parsed_args.neutron_network, ignore_missing=False).id if parsed_args.neutron_network else None) data = client.clusters.create( name=parsed_args.name, plugin_name=plugin, hadoop_version=plugin_version, cluster_template_id=template_id, default_image_id=image_id, description=parsed_args.description, is_transient=parsed_args.transient, user_keypair_id=parsed_args.user_keypair, net_id=net_id, count=parsed_args.count, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() if parsed_args.count and parsed_args.count > 1: clusters = [ utils.get_resource(client.clusters, id) for id in data['clusters'] ] if parsed_args.wait: for cluster in clusters: if not osc_utils.wait_for_status(client.clusters.get, cluster.id): self.log.error( 'Error occurred during cluster creation: %s' % data['id']) data = {} for cluster in clusters: data[cluster.name] = cluster.id else: if parsed_args.wait: if not osc_utils.wait_for_status(client.clusters.get, data['id']): self.log.error( 'Error occurred during cluster creation: %s' % data['id']) data = client.clusters.get(data['id']).to_dict() _format_cluster_output(data) data = utils.prepare_data(data, CLUSTER_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.create(**template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password(self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password(self.app.stdin, confirm=False) if not parsed_args.password: parsed_args.password = parsed_args.secret_key if not parsed_args.username: parsed_args.username = parsed_args.access_key if parsed_args.password and not parsed_args.username: raise exceptions.CommandError( 'Username via --username, or S3 access key via ' '--access-key should be provided with password') if parsed_args.username and not parsed_args.password: raise exceptions.CommandError( 'Password should be provided via --password or ' '--secret-key, or entered interactively with ' '--password-prompt or --secret-key-prompt') if parsed_args.password and parsed_args.username: if not parsed_args.url: raise exceptions.CommandError( 'URL must be provided via --url') if parsed_args.url.startswith('s3'): if not parsed_args.s3_endpoint: raise exceptions.CommandError( 'S3 job binaries need an endpoint provided via ' '--s3-endpoint') extra = { 'accesskey': parsed_args.username, 'secretkey': parsed_args.password, 'endpoint': parsed_args.s3_endpoint, } else: extra = { 'user': parsed_args.username, 'password': parsed_args.password } else: extra = None data = client.job_binaries.create( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() data = utils.prepare_data(data, jb_v1.JOB_BINARY_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.create(**template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password( self.app.stdin, confirm=False) if not parsed_args.password: parsed_args.password = parsed_args.secret_key if not parsed_args.username: parsed_args.username = parsed_args.access_key if parsed_args.password and not parsed_args.username: raise exceptions.CommandError( 'Username via --username, or S3 access key via ' '--access-key should be provided with password') if parsed_args.username and not parsed_args.password: raise exceptions.CommandError( 'Password should be provided via --password or ' '--secret-key, or entered interactively with ' '--password-prompt or --secret-key-prompt') if parsed_args.password and parsed_args.username: if not parsed_args.url: raise exceptions.CommandError( 'URL must be provided via --url') if parsed_args.url.startswith('s3'): if not parsed_args.s3_endpoint: raise exceptions.CommandError( 'S3 job binaries need an endpoint provided via ' '--s3-endpoint') extra = { 'accesskey': parsed_args.username, 'secretkey': parsed_args.password, 'endpoint': parsed_args.s3_endpoint, } else: extra = { 'user': parsed_args.username, 'password': parsed_args.password } else: extra = None data = client.job_binaries.create( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.public, is_protected=parsed_args.protected).to_dict() data = utils.prepare_data(data, jb_v1.JOB_BINARY_FIELDS) return self.dict2columns(data)
def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = utils.create_job_json(client, self.app, template) else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_job_template_id(self.app, client, parsed_args) cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) if parsed_args.input not in [None, "", "None"]: input_id = utils.get_resource_id(client.data_sources, parsed_args.input) else: input_id = None if parsed_args.output not in [None, "", "None"]: output_id = utils.get_resource_id(client.data_sources, parsed_args.output) else: output_id = None data = utils.create_job(client, self.app, jt_id, cluster_id, input_id, output_id, job_configs, parsed_args) sys.stdout.write('Job "{job}" has been started successfully.\n'.format( job=data['id'])) return data