def test_get_resource_id(self): manager = mock.Mock() uuid = '82065b4d-2c79-420d-adc3-310de275e922' manager.find_unique.return_value = mock.Mock(id=uuid) # check case when resource id is passed res = utils.get_resource_id(manager, uuid) self.assertEqual(uuid, res) manager.get.assert_not_called() manager.find_unique.assert_not_called() # check case when resource name is passed res = utils.get_resource_id(manager, 'name') manager.find_unique.assert_called_once_with(name='name') self.assertEqual(uuid, res)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing delete_function_attr = self._choose_delete_mode(parsed_args) clusters = [] for cluster in parsed_args.cluster: cluster_id = utils.get_resource_id( client.clusters, cluster) getattr(client.clusters, delete_function_attr)(cluster_id) clusters.append((cluster_id, cluster)) sys.stdout.write( 'Cluster "{cluster}" deletion has been started.\n'.format( cluster=cluster)) if parsed_args.wait: for cluster_id, cluster_arg in clusters: if not utils.wait_for_delete(client.clusters, cluster_id): self.log.error( 'Error occurred during cluster deleting: %s' % cluster_id) else: sys.stdout.write( 'Cluster "{cluster}" has been removed ' 'successfully.\n'.format(cluster=cluster_arg))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for image in parsed_args.image: image_id = utils.get_resource_id(client.images, image) client.images.unregister_image(image_id) sys.stdout.write('Image "{image}" has been unregistered ' 'successfully.\n'.format(image=image))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ct in parsed_args.cluster_template: ct_id = utils.get_resource_id(client.cluster_templates, ct) client.cluster_templates.delete(ct_id) sys.stdout.write('Cluster template "{ct}" has been removed ' 'successfully.\n'.format(ct=ct))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ds in parsed_args.data_source: data_source_id = utils.get_resource_id(client.data_sources, ds) client.data_sources.delete(data_source_id) sys.stdout.write('Data Source "{ds}" has been removed ' 'successfully.\n'.format(ds=ds))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ct in parsed_args.cluster_template: ct_id = utils.get_resource_id(client.cluster_templates, ct) client.cluster_templates.delete(ct_id) sys.stdout.write( 'Cluster template "{ct}" has been removed ' 'successfully.\n'.format(ct=ct))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for image in parsed_args.image: image_id = utils.get_resource_id(client.images, image) client.images.unregister_image(image_id) sys.stdout.write( 'Image "{image}" has been unregistered ' 'successfully.\n'.format(image=image))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing for ngt in parsed_args.node_group_template: ngt_id = utils.get_resource_id( client.node_group_templates, ngt) client.node_group_templates.delete(ngt_id) sys.stdout.write( 'Node group template "{ngt}" has been removed ' 'successfully.\n'.format(ngt=ngt))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) client.clusters.update_keypair(cluster_id) sys.stdout.write( 'Cluster "{cluster}" keypair has been updated.\n'.format( cluster=parsed_args.cluster)) return {}, {}
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) client.clusters.update_keypair(cluster_id) sys.stdout.write( 'Cluster "{cluster}" keypair has been updated.\n' .format(cluster=parsed_args.cluster)) return {}, {}
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ngt_id = utils.get_resource_id(client.cluster_templates, parsed_args.cluster_template) response = client.cluster_templates.export(ngt_id) result = json.dumps(response._info, indent=4) + "\n" if parsed_args.file: with open(parsed_args.file, "w+") as file: file.write(result) else: sys.stdout.write(result)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ngt_id = utils.get_resource_id( client.cluster_templates, parsed_args.cluster_template) response = client.cluster_templates.export(ngt_id) result = json.dumps(response._info, indent=4)+"\n" if parsed_args.file: with open(parsed_args.file, "w+") as file: file.write(result) else: sys.stdout.write(result)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing image_id = utils.get_resource_id(client.images, parsed_args.image) data = client.images.update_tags(image_id, parsed_args.tags).to_dict() data['tags'] = osc_utils.format_list(data['tags']) data = utils.prepare_data(data, IMAGE_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ct_id = utils.get_resource_id(client.cluster_templates, parsed_args.cluster_template) data = self._take_action(client, parsed_args, ct_id) _format_ct_output(self.app, data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing ct_id = utils.get_resource_id( client.cluster_templates, parsed_args.cluster_template) data = self._take_action(client, parsed_args, ct_id) _format_ct_output(self.app, data) data = utils.prepare_data(data, CT_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing jb_id = utils.get_resource_id(client.job_binaries, parsed_args.job_binary) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.update(jb_id, template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password(self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password(self.app.stdin, confirm=False) extra = {} if parsed_args.password: extra['password'] = parsed_args.password if parsed_args.username: extra['user'] = parsed_args.username if parsed_args.access_key: extra['accesskey'] = parsed_args.access_key if parsed_args.secret_key: extra['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: extra['endpoint'] = parsed_args.s3_endpoint if not extra: extra = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected) data = client.job_binaries.update(jb_id, update_fields).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing jb_id = utils.get_resource_id( client.job_binaries, parsed_args.job_binary) if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = client.job_binaries.update(jb_id, template).to_dict() else: if parsed_args.password_prompt: parsed_args.password = osc_utils.get_password( self.app.stdin, confirm=False) if parsed_args.secret_key_prompt: parsed_args.secret_key = osc_utils.get_password( self.app.stdin, confirm=False) extra = {} if parsed_args.password: extra['password'] = parsed_args.password if parsed_args.username: extra['user'] = parsed_args.username if parsed_args.access_key: extra['accesskey'] = parsed_args.access_key if parsed_args.secret_key: extra['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: extra['endpoint'] = parsed_args.s3_endpoint if not extra: extra = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, url=parsed_args.url, description=parsed_args.description, extra=extra, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected ) data = client.job_binaries.update( jb_id, update_fields).to_dict() data = utils.prepare_data(data, JOB_BINARY_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) data = utils.create_job_template_json(self.app, client, **template) else: if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) mains_ids = [utils.get_resource_id(client.job_binaries, m) for m in parsed_args.mains] if parsed_args.mains else None libs_ids = [utils.get_resource_id(client.job_binaries, m) for m in parsed_args.libs] if parsed_args.libs else None data = utils.create_job_templates(self.app, client, mains_ids, libs_ids, parsed_args) _format_job_template_output(data) data = utils.prepare_data(data, JOB_TEMPLATE_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing credentials = {} if parsed_args.type == 'swift': if parsed_args.username: credentials['user'] = parsed_args.username if parsed_args.password: credentials['password'] = parsed_args.password elif parsed_args.type == 's3': if parsed_args.access_key: credentials['accesskey'] = parsed_args.access_key if parsed_args.secret_key: credentials['secretkey'] = parsed_args.secret_key if parsed_args.s3_endpoint: credentials['endpoint'] = parsed_args.s3_endpoint if parsed_args.enable_s3_ssl == parsed_args.disable_s3_ssl: credentials['ssl'] = parsed_args.enable_s3_ssl if (parsed_args.enable_s3_bucket_in_path == parsed_args.disable_s3_bucket_in_path): credentials['bucket_in_path'] = ( parsed_args.enable_s3_bucket_in_path) if not credentials: credentials = None update_fields = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, type=parsed_args.type, url=parsed_args.url, credentials=credentials, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected) ds_id = utils.get_resource_id(client.data_sources, parsed_args.data_source) data = client.data_sources.update(ds_id, update_fields).data_source data = utils.prepare_data(data, DATA_SOURCE_FIELDS) return self.dict2columns(data)
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if not parsed_args.file: parsed_args.file = parsed_args.job_binary if path.exists(parsed_args.file): msg = ('File "%s" already exists. Chose another one with ' '--file argument.' % parsed_args.file) raise exceptions.CommandError(msg) else: jb_id = utils.get_resource_id( client.job_binaries, parsed_args.job_binary) data = client.job_binaries.get_file(jb_id) with open(parsed_args.file, 'w') as f: f.write(data) sys.stdout.write( 'Job binary "{jb}" has been downloaded ' 'successfully.\n'.format(jb=parsed_args.job_binary))
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if not parsed_args.file: parsed_args.file = parsed_args.job_binary if path.exists(parsed_args.file): msg = ('File "%s" already exists. Chose another one with ' '--file argument.' % parsed_args.file) raise exceptions.CommandError(msg) else: jb_id = utils.get_resource_id(client.job_binaries, parsed_args.job_binary) data = client.job_binaries.get_file(jb_id) with open(parsed_args.file, 'wb') as f: f.write(data) sys.stdout.write( 'Job binary "{jb}" has been downloaded ' 'successfully.\n'.format(jb=parsed_args.job_binary))
def _take_action(self, client, parsed_args): network_client = self.app.client_manager.network if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') if 'count' in template: parsed_args.count = template['count'] data = client.clusters.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.cluster_template \ or not parsed_args.image: raise exceptions.CommandError( 'At least --name , --cluster-template, --image arguments ' 'should be specified or json template should be provided ' 'with --json argument') plugin, plugin_version, template_id = utils._get_plugin_version( self.app, parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) net_id = (network_client.find_network( parsed_args.neutron_network, ignore_missing=False).id if parsed_args.neutron_network else None) data = utils.create_cluster(client, self.app, parsed_args, plugin, plugin_version, template_id, image_id, net_id) return data
def _take_action(self, client, parsed_args): network_client = self.app.client_manager.network if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'neutron_management_network' in template: template['net_id'] = template.pop('neutron_management_network') if 'count' in template: parsed_args.count = template['count'] data = client.clusters.create(**template).to_dict() else: if not parsed_args.name or not parsed_args.cluster_template \ or not parsed_args.image: raise exceptions.CommandError( 'At least --name , --cluster-template, --image arguments ' 'should be specified or json template should be provided ' 'with --json argument') plugin, plugin_version, template_id = utils._get_plugin_version( self.app, parsed_args.cluster_template, client) image_id = utils.get_resource_id(client.images, parsed_args.image) net_id = (network_client.find_network(parsed_args.neutron_network, ignore_missing=False).id if parsed_args.neutron_network else None) data = utils.create_cluster(client, self.app, parsed_args, plugin, plugin_version, template_id, image_id, net_id) return data
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.show: data = utils.get_resource(client.clusters, parsed_args.cluster).to_dict() ver_data, ver_fields = _prepare_health_checks(data) data = utils.prepare_data(ver_data, ver_fields) return self.dict2columns(data) else: cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) client.clusters.verification_update(cluster_id, parsed_args.status) if parsed_args.status == 'START': print_status = 'started' sys.stdout.write( 'Cluster "{cluster}" health verification has been ' '{status}.\n'.format(cluster=parsed_args.cluster, status=print_status)) return {}, {}
def _take_action(self, client, parsed_args): cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, shares=shares) data = client.clusters.update(cluster_id, **update_dict).cluster return data
def _take_action(self, client, parsed_args): cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) shares = None if parsed_args.shares: blob = osc_utils.read_blob_file_contents(parsed_args.shares) try: shares = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'shares from file %s: %s' % (parsed_args.shares, e)) update_dict = utils.create_dict_from_kwargs( name=parsed_args.name, description=parsed_args.description, is_public=parsed_args.is_public, is_protected=parsed_args.is_protected, shares=shares ) data = client.clusters.update(cluster_id, **update_dict).cluster return data
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing if parsed_args.show: data = utils.get_resource( client.clusters, parsed_args.cluster).to_dict() ver_data, ver_fields = _prepare_health_checks(data) data = utils.prepare_data(ver_data, ver_fields) return self.dict2columns(data) else: cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) client.clusters.verification_update( cluster_id, parsed_args.status) if parsed_args.status == 'START': print_status = 'started' sys.stdout.write( 'Cluster "{cluster}" health verification has been ' '{status}.\n'.format(cluster=parsed_args.cluster, status=print_status)) return {}, {}
def take_action(self, parsed_args): self.log.debug("take_action(%s)", parsed_args) client = self.app.client_manager.data_processing delete_function_attr = self._choose_delete_mode(parsed_args) clusters = [] for cluster in parsed_args.cluster: cluster_id = utils.get_resource_id(client.clusters, cluster) getattr(client.clusters, delete_function_attr)(cluster_id) clusters.append((cluster_id, cluster)) sys.stdout.write( 'Cluster "{cluster}" deletion has been started.\n'.format( cluster=cluster)) if parsed_args.wait: for cluster_id, cluster_arg in clusters: if not utils.wait_for_delete(client.clusters, cluster_id): self.log.error( 'Error occurred during cluster deleting: %s' % cluster_id) else: sys.stdout.write( 'Cluster "{cluster}" has been removed ' 'successfully.\n'.format(cluster=cluster_arg))
def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError('An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = utils.create_job_json(client, self.app, template) else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_job_template_id(self.app, client, parsed_args) cluster_id = utils.get_resource_id(client.clusters, parsed_args.cluster) if parsed_args.input not in [None, "", "None"]: input_id = utils.get_resource_id(client.data_sources, parsed_args.input) else: input_id = None if parsed_args.output not in [None, "", "None"]: output_id = utils.get_resource_id(client.data_sources, parsed_args.output) else: output_id = None data = utils.create_job(client, self.app, jt_id, cluster_id, input_id, output_id, job_configs, parsed_args) sys.stdout.write('Job "{job}" has been started successfully.\n'.format( job=data['id'])) return data
def _take_action(self, client, parsed_args): if parsed_args.json: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: template = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'template from file %s: %s' % (parsed_args.json, e)) if 'job_configs' in template: template['configs'] = template.pop('job_configs') data = utils.create_job_json(client, self.app, template) else: if not parsed_args.cluster or not parsed_args.job_template: raise exceptions.CommandError( 'At least --cluster, --job-template, arguments should be ' 'specified or json template should be provided with ' '--json argument') job_configs = {} if parsed_args.interface: blob = osc_utils.read_blob_file_contents(parsed_args.json) try: parsed_args.interface = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'interface from file %s: %s' % (parsed_args.json, e)) if parsed_args.config_json: blob = osc_utils.read_blob_file_contents(parsed_args.configs) try: job_configs['configs'] = jsonutils.loads(blob) except ValueError as e: raise exceptions.CommandError( 'An error occurred when reading ' 'configs from file %s: %s' % (parsed_args.json, e)) elif parsed_args.configs: job_configs['configs'] = dict( map(lambda x: x.split(':', 1), parsed_args.configs)) if parsed_args.args: job_configs['args'] = parsed_args.args if parsed_args.params: job_configs['params'] = dict( map(lambda x: x.split(':', 1), parsed_args.params)) jt_id = utils.get_job_template_id(self.app, client, parsed_args) cluster_id = utils.get_resource_id( client.clusters, parsed_args.cluster) if parsed_args.input not in [None, "", "None"]: input_id = utils.get_resource_id( client.data_sources, parsed_args.input) else: input_id = None if parsed_args.output not in [None, "", "None"]: output_id = utils.get_resource_id( client.data_sources, parsed_args.output) else: output_id = None data = utils.create_job(client, self.app, jt_id, cluster_id, input_id, output_id, job_configs, parsed_args) sys.stdout.write( 'Job "{job}" has been started successfully.\n'.format( job=data['id'])) return data