def get_snapshot_policy_compliance(self, storage_group_name, last_week=False, last_four_weeks=False, from_epoch=None, to_epoch=None, from_time_string=None, to_time_string=None): """Get compliance attributes on a storage group. :param storage_group_name: storage group name :param last_week: compliance in last week -- bool :param last_four_weeks: compliance in last four weeks -- bool :param from_epoch: timestamp since epoch -- str e.g 1606820929 (seconds) :param to_epoch: timestamp since epoch -- str e.g 1606820929 (seconds) :param from_time_string: human readable date -- str e.g 2020-12-01 15:00 :param to_time_string: human readable date -- str e.g 2020-12-01 15:00 :returns: resource -- dict """ if not storage_group_name: msg = 'Storage group name cannot be None.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) msg, query_params = self.verify_input_params(last_week, last_four_weeks, from_epoch, to_epoch, from_time_string, to_time_string) if msg: LOG.exception(msg) raise exception.InvalidInputException(data=msg) return self.get_resource(category=REPLICATION, resource_level=SYMMETRIX, resource_level_id=self.array_id, resource_type=STORAGEGROUP, resource_type_id=storage_group_name, resource=COMPLIANCE, object_type=SNAPSHOT, params=query_params)
def format_time_input(time_in, return_seconds=False, return_milliseconds=False): """Format timestamp as seconds/milliseconds for use in REST requests. :param time_in: timestamp -- int/float :param return_seconds: return time in seconds -- bool :param return_milliseconds: return time in milliseconds -- bool :returns: timestamp -- int """ # Validate input params if not isinstance(time_in, (int, float)): msg = ('Invalid input: {i}, input must be of type int or float, input ' 'is {f}.'.format(i=time_in, f=type(time_in))) LOG.error(msg) raise exception.InvalidInputException(msg) if not any([return_seconds, return_milliseconds]) or (all( [return_seconds, return_milliseconds])): msg = ('Invalid input param selection, one of return_seconds or ' 'return_milliseconds must be True.') LOG.error(msg) raise exception.InvalidInputException(msg) # Determine if the time_in is in format seconds or milliseconds since # epoch. We can deduce that it is milliseconds if the time input is greater # than the current time in seconds since epoch. There are edge cases, # especially if date was derived from a string representation without # timezone info, or the clocks are being changed. This is mitigated by # applying an offset of one year to the calculation. Millisecond times are # an order of magnitude greater so this is safe. time_sec, time_milli = False, False if time_in > time.time() + ONE_YEAR: time_milli = True else: time_sec = True if time_sec and return_milliseconds: return int(time_in) * 1000 elif time_milli and return_seconds: return int(time_in) // 1000 else: return int(time_in)
def delete_snapshot_policy(self, snapshot_policy_name): """Delete a snapshot policy :param snapshot_policy_name: the snapshot policy name-- str """ if not snapshot_policy_name: msg = 'Snapshot policy name cannot be None.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) self.delete_resource(category=REPLICATION, resource_level=SYMMETRIX, resource_level_id=self.array_id, resource_type=SNAPSHOT_POLICY, resource_type_id=snapshot_policy_name)
def format_metrics(metrics): """Format metrics input for inclusion in REST request. Take metric parameters and format them correctly to be used in REST request body. Valid input types are string and list. :param metrics: metric(s) -- str or list :returns: metrics -- list :raises: InvalidInputException """ if isinstance(metrics, str): if metrics.lower() == pc.ALL: metrics = pc.All_CAP input_list = [metrics] elif isinstance(metrics, list): input_list = metrics else: msg = ('Unknown input parameter type, please pass in ' '<string> or <list> input type.') LOG.error(msg) raise exception.InvalidInputException(msg) return input_list
def write_binary_data_to_file(data, file_extension, file_name, dir_path=None): """Write Unisphere binary data to file. :param data: Unisphere REST response with data for writing -- json response :param file_extension: file extension used for writing to file -- str :param file_name: file name -- str :param dir_path: file write directory path -- str :returns: file name and write directory -- str """ # Set file write directory if dir_path: try: path = Path(dir_path) assert path.is_dir() is True except (TypeError, AssertionError) as error: msg = ('Invalid file path supplied for download ' 'location: {f}'.format(f=dir_path)) LOG.error(msg) raise exception.InvalidInputException(msg) from error else: # No path set, use current working directory path = Path.cwd() # Set download file name with .zip extension f_name = Path(file_name) pdf_name = f_name.with_suffix(file_extension) # Join directory & OS idempotent path file_write_path = Path.joinpath(path, pdf_name) # Write binary file data to zip file with open(file_write_path, FILE_WRITE_MODE) as fd: LOG.info('Writing settings to: {p}'.format(p=file_write_path)) for chunk in data.iter_content(chunk_size=128): fd.write(chunk) LOG.info('File writing complete.') return file_write_path
def create_snapshot_policy(self, snapshot_policy_name, interval, cloud_retention_days=None, cloud_provider_name=None, local_snapshot_policy_secure=False, local_snapshot_policy_snapshot_count=None, offset_mins=None, compliance_count_warning=None, compliance_count_critical=None, _async=False): """Create a new snapshot policy. :param snapshot_policy_name: the snapshot policy name -- str :param interval: The value of the interval counter for snapshot policy execution. Must be one of '10 Minutes', '12 Minutes', '15 Minutes', '20 Minutes', '30 Minutes', '1 Hour', '2 Hours', '3 Hours', '4 Hours', '6 Hours', '8 Hours', '12 Hours', '1 Day', '7 Days' -- enum :param cloud_retention_days: part of cloud_snapshot_policy_details number of days to retain the policy -- int :param cloud_provider_name: part of cloud_snapshot_policy_details the cloud provider name -- str :param local_snapshot_policy_secure: secure snapshots may only be terminated after they expire or by Dell EMC support -- bool :param local_snapshot_policy_snapshot_count: the max snapshot count of the policy -- int :param offset_mins: Defines when, within the interval the snapshots will be taken for a specified Snapshot Policy. The offset must be less than the interval of the Snapshot Policy. The format must be in minutes -- int :param compliance_count_warning: The Number of snapshots which are not failed or bad when compliance changes to warning. -- int :param compliance_count_critical: The Number of snapshots which are not failed or bad when compliance changes to critical. -- int :param _async: is the operation asynchronous """ payload = dict() if snapshot_policy_name: payload.update({'snapshot_policy_name': snapshot_policy_name}) else: msg = 'Snapshot policy name cannot be None.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) if cloud_provider_name: if not cloud_retention_days: msg = ('If cloud_provider_name is set, cloud_retention_days ' 'cannot be None.') LOG.exception(msg) raise exception.InvalidInputException(data=msg) cloud_snapshot_policy_details = { 'cloud_retention_days': cloud_retention_days, 'cloud_provider_name': cloud_provider_name } payload.update({ 'cloud_snapshot_policy_details': cloud_snapshot_policy_details }) elif local_snapshot_policy_snapshot_count: local_snapshot_policy_details = { 'snapshot_count': local_snapshot_policy_snapshot_count } if local_snapshot_policy_secure: local_snapshot_policy_details.update( {'secure': local_snapshot_policy_secure}) LOG.warning('The secure snap option cannot be enabled or ' 'disabled on an existing policy. Secure ' 'snapshots may only be terminated after ' 'they expire or by customer-authorized ' 'Dell EMC support.') payload.update({ 'local_snapshot_policy_details': local_snapshot_policy_details }) else: msg = ('One of cloud snapshot policy or local snapshot policy ' 'must be chosen. Check that you have the minimum ' 'parameters set.') LOG.exception(msg) raise exception.InvalidInputException(data=msg) msg = ('The interval supplied must be one of \'10 Minutes\', ' '\'12 Minutes\', \'15 Minutes\' etc.') if interval: try: index = [ x.lower() for x in (constants.SNAPSHOT_POLICY_INTERVALS) ].index(interval.lower()) except ValueError as error: LOG.exception(msg) raise exception.InvalidInputException(data=msg) from error payload.update( {'interval': constants.SNAPSHOT_POLICY_INTERVALS[index]}) else: message = 'interval cannot be None. {}'.format(msg) LOG.exception(message) raise exception.InvalidInputException(data=message) if offset_mins: payload.update({'offset_mins': offset_mins}) if compliance_count_warning: payload.update( {'compliance_count_warning': compliance_count_warning}) if compliance_count_critical: payload.update( {'compliance_count_critical': compliance_count_critical}) if _async: payload.update(constants.ASYNC_UPDATE) return self.create_resource(category=REPLICATION, resource_level=SYMMETRIX, resource_level_id=self.array_id, resource_type=SNAPSHOT_POLICY, payload=payload)
def modify_snapshot_policy(self, snapshot_policy_name, action, interval=None, offset_mins=None, snapshot_count=None, compliance_count_warning=None, compliance_count_critical=None, storage_group_names=None, new_snapshot_policy_name=None, _async=False): """Modify a snapshot policy This can be action: [Modify, Suspend, Resume, AssociateToStorageGroups, DisassociateFromStorageGroups]. A modify of the snapshot policy or adding or removing storage groups associated with the policy. :param snapshot_policy_name: the snapshot policy name-- str :param action: the modification action, must be one of 'AssociateToStorageGroups', 'DisassociateFromStorageGroups' 'Modify', 'Suspend', 'Resume' :param interval: The value of the interval counter for snapshot policy execution. Must be one of '10 Minutes', '12 Minutes', '15 Minutes', '20 Minutes', '30 Minutes', '1 Hour', '2 Hours', '3 Hours', '4 Hours', '6 Hours', '8 Hours', '12 Hours', '1 Day', '7 Days' -- enum :param offset_mins: The number of minutes after 00:00 on Monday to first run the service policy. The offset must be less than the interval of the Snapshot Policy. The format must be in minutes -- int :param snapshot_count: The maximum number of snapshots that should be maintained for a specified Snapshot Policy. The maximum count must be between 1 to 1024. :param compliance_count_warning: The Number of snapshots which are not failed or bad when compliance changes to warning. The warning compliance count cannot be set to 0 and must be less than or equal to the maximum count of the Snapshot Policy. -- int :param compliance_count_critical: The Number of snapshots which are not failed or bad when compliance changes to critical. If the warning compliance count is also set, the critical compliance count must be less than or equal to that. -- int :param storage_group_names: List of storage group names -- list :param new_snapshot_policy_name: change the name if set -- str :param _async: is the operation asynchronous """ payload = dict() if not snapshot_policy_name: msg = 'Snapshot policy name cannot be None.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) msg = ('The action supplied must be one of \'Modify\', ' '\'Suspend\', \'Resume\', \'AssociateToStorageGroups\', ' '\'DisassociateFromStorageGroups\'.') if action: try: index = [ x.lower() for x in (constants.SNAPSHOT_POLICY_ACTIONS) ].index(action.lower()) except ValueError as error: LOG.exception(msg) raise exception.InvalidInputException(data=msg) from error else: message = 'The action cannot be None. {}'.format(msg) LOG.exception(message) raise exception.InvalidInputException(data=message) payload.update({'action': constants.SNAPSHOT_POLICY_ACTIONS[index]}) if action.lower() == constants.ASSOCIATE_TO_STORAGE_GROUPS.lower(): LOG.info('Associating storage groups to {spn}.'.format( spn=snapshot_policy_name)) if not storage_group_names: msg = 'storage_group_names cannot be None.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) associate_to_storage_group_param = dict() storage_group_name_param = { 'storage_group_name': storage_group_names } associate_to_storage_group_param.update( {'associate_to_storage_group': storage_group_name_param}) payload.update(associate_to_storage_group_param) elif action.lower() == ( constants.DISASSOCIATE_FROM_STORAGE_GROUPS.lower()): LOG.info('Disassociating storage groups from {spn}.'.format( spn=snapshot_policy_name)) if not storage_group_names: msg = 'storage_group_names cannot be None.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) disassociate_from_storage_group_param = dict() storage_group_name_param = { 'storage_group_name': storage_group_names } disassociate_from_storage_group_param.update( {'disassociate_from_storage_group': storage_group_name_param}) payload.update(disassociate_from_storage_group_param) elif action.lower() == constants.MODIFY_POLICY.lower(): LOG.info('Modifying {spn}.'.format(spn=snapshot_policy_name)) modify_param = dict() if new_snapshot_policy_name: modify_param.update( {'snapshot_policy_name': new_snapshot_policy_name}) if interval: modify_param.update( {'interval_mins': policy_interval_enum.get(interval)}) if offset_mins: modify_param.update({'offset_mins': offset_mins}) if snapshot_count: modify_param.update({'snapshot_count': snapshot_count}) if compliance_count_warning: modify_param.update( {'compliance_count_warning': compliance_count_warning}) if compliance_count_critical: modify_param.update( {'compliance_count_critical': compliance_count_critical}) if modify_param: payload.update({'modify': modify_param}) else: msg = 'No modify payload received.' LOG.exception(msg) raise exception.InvalidInputException(data=msg) elif action.lower() == constants.SUSPEND_POLICY.lower(): LOG.info('Suspending {spn}.'.format(spn=snapshot_policy_name)) elif action.lower() == constants.RESUME_POLICY.lower(): LOG.info('Resuming {spn}.'.format(spn=snapshot_policy_name)) if _async: payload.update(constants.ASYNC_UPDATE) return self.modify_resource(category=REPLICATION, resource_level=SYMMETRIX, resource_level_id=self.array_id, resource_type=SNAPSHOT_POLICY, resource_type_id=snapshot_policy_name, payload=payload)
def create_metrodr_environment(self, storage_group_name, environment_name, metro_r1_array_id, metro_r2_array_id, dr_array_id, dr_replication_mode, metro_r2_storage_group_name=None, dr_storage_group_name=None, force_new_metro_r1_dr_rdfg=True, force_new_metro_r2_dr_rdfg=True, _async=True): """Protects Non-SRDF Storage Group with Metro and DR legs. Note: This function is set to run as an Asynchronous job on the server by default as there is the potential for this task to take a few minutes. Storage Groups and SRDF groups are created automatically for the user end result is R2--SRDF/A--R11--Metro--R2. :param storage_group_name: name of storage group containing devices to be replicated in Metro DR environment -- str :param environment_name: name of Metro Dr Environment up to 16 characters-- str :param metro_r1_array_id: 12 Digit Serial Number of R1 Array for SRDF Metro Source Array -- int :param metro_r2_array_id: 12 Digit Serial Number of SRDF Metro R2 Array -- int :param dr_array_id: 12 Digit Serial Number of Disaster Recovery Array, replication -- int :param dr_replication_mode: Asynchronous or AdaptiveCopyDisk -- str :param metro_r2_storage_group_name: Name for R2 Storage Group of metro SRDF pairing, only used if R2 group naming is required to be different from source - str :param dr_storage_group_name: Name for Storage Group at DR, only used if group naming is required to be different from source - str :param force_new_metro_r1_dr_rdfg: whether or not to create a new RDFG to be created for Metro R1 array to DR array, or will autoselect from existing -- bool :param force_new_metro_r2_dr_rdfg: whether or not to create a new RDFG to be created for Metro R2 array to DR array, or will autoselect from existing -- bool :param _async: if call should be executed asynchronously or synchronously -- bool :returns: details of newly created metro dr environment-- dict """ if not dr_storage_group_name: dr_storage_group_name = storage_group_name if not metro_r2_storage_group_name: metro_r2_storage_group_name = storage_group_name if dr_replication_mode: if 'ASYNCHRONOUS' in dr_replication_mode.upper(): dr_replication_mode = ASYNCHRONOUS elif 'ADAPTIVECOPY' in dr_replication_mode.upper(): dr_replication_mode = ADAPTIVE_COPY else: msg = ('DR Replication Mode must be either Asynchronous or ' 'AdaptiveCopyDisk') LOG.exception(msg) raise exception.InvalidInputException(message=msg) payload = { 'action': 'CreateEnvironment', 'create_environment_param': { 'storage_group_name': storage_group_name, 'environment_name': environment_name, 'metro_r2_array_id': metro_r2_array_id, 'metro_r2_storage_group_name': metro_r2_storage_group_name, 'dr_array_id': dr_array_id, 'force_new_metro_r1_dr_rdfg': force_new_metro_r1_dr_rdfg, 'force_new_metro_r2_dr_rdfg': force_new_metro_r2_dr_rdfg, 'dr_replication_mode': dr_replication_mode, 'dr_storage_group_name': dr_storage_group_name, 'metro_establish': True, 'dr_establish': True } } if _async: payload.update(ASYNC_UPDATE) response = self.create_resource(category=REPLICATION, resource_level=SYMMETRIX, resource_level_id=metro_r1_array_id, resource_type=METRO_DR, payload=payload) return response
def modify_metrodr_environment(self, environment_name, action, metro=False, dr=False, keep_r2=False, force=False, symforce=False, _async=False, dr_replication_mode=None): """Performs Functions to modify state of MetroDR environment. :param environment_name: name of Metro Dr Environment up to 16 characters-- str :param action: action to be performed on Environment, Establish, Failover, Failback, Restore, SetMode, Split, UpdateR1 --str :param metro: directs action towards R11--R21 Metro Device leg of Metro DR environment -- bool :param dr: directs action towards Device Pairs on Disaster Recovery leg of Metro DR environment -- bool :param keep_r2: Used with Suspend Option, Ensures that the R2 data on Metro remains available to host -- bool :param force: forces operation to complete, used with caution, not recommended as part of fully automated workflow --bool :param symforce: forces operation to complete, used with caution, requires ALLOW_SRDF_SYMFORCE parameter to be set in solutions enabler options file, default is not enabled,not recommended as part of fully automated workflow -- bool :param _async: if call should be executed asynchronously or synchronously -- bool :param dr_replication_mode: set mode of DR link, AdaptiveCopyDisk or Asynchronous -- str :returns: details of metro dr environment and state -- dict """ metro_dr_action = constants.METRO_DR_ACTIONS.get(action.upper()) if metro_dr_action: payload = {'action': metro_dr_action} else: msg = ('SRDF Action must be one of [Establish, Split, Suspend, ' 'Recover, Restore, Resume, Failover, Failback, Update_R1, ' 'SetMode]') LOG.exception(msg) raise exception.VolumeBackendAPIException(data=msg) action_params = constants.METRO_DR_ACTION_PARAMS.get( metro_dr_action.upper()) if metro_dr_action == 'Suspend': payload.update({ action_params: { 'metro': metro, 'force': force, 'keep_r2': keep_r2, 'dr': dr, 'symforce': symforce } }) elif metro_dr_action in ['Failover', 'Failback', 'Split', 'UpdateR1']: payload.update( {action_params: { 'force': force, 'symforce': symforce }}) elif metro_dr_action in ['Establish', 'Restore']: if (metro and dr) and metro_dr_action == 'Restore': msg = ('Restore Operation can only be performed on a single ' 'SRDF leg, please choice either Metro or DR not both') LOG.exception(msg) raise exception.InvalidInputException(message=msg) payload.update({ action_params: { 'metro': metro, 'force': force, 'dr': dr, 'symforce': symforce } }) elif metro_dr_action == 'SetMode': payload.update({ action_params: { "mode": dr_replication_mode, "force": force, "symforce": symforce } }) if _async: payload.update(ASYNC_UPDATE) return self.modify_resource(category=REPLICATION, resource_level=SYMMETRIX, resource_level_id=self.array_id, resource_type=METRO_DR, resource_type_id=environment_name, payload=payload)
def file_transfer_request(self, method, uri, timeout=None, download=False, r_obj=None, upload=False, form_data=None): """Send a file transfer request via REST to the target API. Valid methods are 'POST' and 'PUT'. :param method: request method -- str :param uri: target uri -- str :param timeout: optional timeout override -- int :param download: if download request -- bool :param r_obj: download request payload -- dict :param upload: if upload request -- bool :param form_data: upload multipart form data -- dict :returns: server response, status code -- dict, int :raises: InvalidInputException, VolumeBackendAPIException, Timeout, SSLError, ConnectionError, HTTPError """ if download and not upload: headers = { CONTENT_TYPE: APP_JSON, ACCEPT: APP_OCT, USER_AGENT: ua_details, APP_TYPE: self.headers.get('application-type') } elif upload and not download: headers = { ACCEPT_ENC: APP_MPART, USER_AGENT: ua_details, APP_TYPE: self.headers.get('application-type') } else: msg = ('You must select one of upload/download for ' 'file_transfer_request method.') LOG.error(msg) raise exception.InvalidInputException(msg) timeout_val = self.timeout if not timeout else timeout data = json.dumps(r_obj, sort_keys=True, indent=4) if r_obj else None url = '{base_url}{uri}'.format(base_url=self.base_url, uri=uri) try: ft_session = self.establish_rest_session(headers=headers) response = ft_session.request(method=method, url=url, timeout=timeout_val, stream=download, data=data, files=form_data) ft_session.close() status_code = response.status_code LOG.debug('{method} request to {url} has returned with a status ' 'code of: {sc}.'.format(method=method, url=url, sc=status_code)) return response, status_code except requests.Timeout as error: LOG.error( 'The {method} request to URL {url} timed-out, but may have ' 'been successful. Please check the array. Exception received: ' '{exc}.'.format(method=method, url=url, exc=error)) return None, None except r_exc.SSLError as error: msg = ( 'The connection to {base} has encountered an SSL error. ' 'Please check your SSL config or supplied SSL cert in Cinder ' 'configuration. SSL Exception message: {m}'.format( base=self.base_url, m=error)) raise r_exc.SSLError(msg) from error except (r_exc.ConnectionError, r_exc.HTTPError) as error: exc_class, __, __ = sys.exc_info() msg = ( 'The {met} to Unisphere server {base} has experienced a {exc} ' 'error. Please check your Unisphere server connection and ' 'availability. Exception message: {msg}'.format( met=method, base=self.base_url, exc=error.__class__.__name__, msg=error)) raise exc_class(msg) from error except Exception as error: exp_message = ( 'The {method} request to URL {url} failed with exception: ' '{e}.'.format(method=method, url=url, e=error)) raise exception.VolumeBackendAPIException(data=exp_message)
def _validate_real_time_input( self, start_date, end_date, category, metrics, instance_id): """Validate user input for real-time metrics collection. :param start_date: timestamp in milliseconds since epoch -- int :param end_date: timestamp in milliseconds since epoch -- int :param category: category id -- str :param metrics: performance metrics -- list :param instance_id: instance id -- str :raises: VolumeBackendAPIException, InvalidInputException """ delta, msg = end_date - start_date, None # Category validation if category not in self.get_categories(): # Allow for no 's' at the end of StorageGroups, StorageGroup is # still valid but not returned in category list if category != pc.SG: msg = ( 'Real-time performance category "{user_cat}" is not ' 'one of {uni_cat}.'.format( user_cat=category, uni_cat=self.get_categories())) # Metrics validation elif metrics != [pc.All_CAP] and not ( all(metric in self.get_category_metrics( category) for metric in metrics)): msg = ( 'The supplied real-time metrics {user_met} are not ' 'valid. Valid options are "All", and one or more of ' '{uni_met}'.format( user_met=metrics, uni_met=self.get_category_metrics(category))) # Required input validation elif category != pc.ARRAY and not instance_id: msg = ('For real-time performance data other than from the ' '"Array" category an instance_id must be specified.') # Instance ID key validation against known real-time keys elif instance_id and instance_id not in self.get_category_keys( category=category): msg = ( 'Instance ID "{inst}" is not one of {cat} real-time ' 'performance keys {uni_keys}'.format( inst=instance_id, cat=category, uni_keys=self.get_category_keys(category=category))) # Timestamp validation elif not isinstance(end_date, int) or not isinstance(start_date, int): msg = ('Start and end dates must be of type <int> and in ' 'milliseconds since epoch format.') elif delta < pc.ONE_MINUTE: ct, one_min = int(time.time()) * 1000, pc.ONE_MINUTE if (ct - end_date < one_min) or (ct - start_date < one_min): msg = ('Real-time timestamps cannot be for intervals of less ' 'than one minute if the start or end timestamps are ' 'within one minute of local time.') elif delta > pc.ONE_HOUR: msg = ('It is not possible to query for more than one hour of ' 'real-time performance data in one request.') elif self.recency: if not self.is_timestamp_current(int(end_date), self.recency): msg = ('Timestamp "{t}" failed recency check of {rec} ' 'minutes.'.format(t=end_date, rec=self.recency)) if msg: LOG.error(msg) raise exception.InvalidInputException(msg)
def upload_settings(self, file_password, file_path=None, array_id=None, binary_data=None): """Upload Unisphere and/or system settings to Unisphere. Allows for importing a zip file or binary data that contains settings that were previously exported from Unisphere. The settings that a file upload may include are: - All settings: - Unisphere settings: - Alert notifications - Performance metrics - Performance preferences - Performance user templates - System settings: - Alert policies - Alert level notifications - Performance thresholds - System thresholds A password that was specified during export needs to be provided during import operation. This is to assure that the imported file has not been tampered with. It is possible to upload system settings for more than one array, to do so pass a list of array IDs in to array_id input parameter. For Unisphere settings an array ID is not required. :param file_password: password that file has been signed with -- str :param file_path: full file location -- str :param array_id: array id -- str :param binary_data: binary settings data -- bytes :returns: upload details -- dict """ # Work around: We need to provide the array ID for all upload requests array_id = self.array_id if not array_id else array_id array_id = ','.join(array_id) if isinstance(array_id, list) else array_id if binary_data: try: assert isinstance(binary_data, bytes) is True data = binary_data except AssertionError as error: msg = ('You must provide valid bytes data before upload to ' 'Unisphere can proceed.') LOG.error(msg) raise exception.InvalidInputException(msg) from error else: try: f_path = Path(file_path) assert f_path.is_file() is True LOG.info('Uploading settings from {p}'.format(p=f_path)) data = open(f_path, FILE_READ_MODE) except (TypeError, AssertionError) as error: msg = ('Invalid file path supplied for settings upload ' 'location: {f}'.format(f=file_path)) LOG.error(msg) raise exception.InvalidInputException(msg) from error form_data = { ZIP_FILE: data, TGT_ARRAYS: array_id, FILE_PASSWORD: file_password } return self.common.upload_file(category=SYSTEM, resource_level=SETTINGS, resource_type=IMPORT_FILE, form_data=form_data)
def download_system_settings(self, file_password, dir_path=None, file_name=None, array_id=None, return_binary=False, exclude_alert_policy_settings=False, alert_level_notification_settings=False, exclude_system_threshold_settings=False, exclude_performance_threshold_settings=False): """Export System settings. - System settings: - Alert policies - Alert level notifications - Performance thresholds - System thresholds By default settings will be written to a zip file in the current working directory unless a supplied directory path and/or file name are provided. If any extension is provided in the file name it will be replaced with .zip before the data is written to file. If instead the file writing process should be handled outside of PyU4V or uploaded directly to Unisphere without any file handling set return_binary to True. The response dict will have the settings binary data included in key 'binary_data'. :param file_password: password to sign file (required) -- str :param dir_path: file save location -- str :param file_name: zip file name -- str :param array_id: array id -- str :param return_binary: return settings binary data -- bool :param exclude_alert_policy_settings: exclude alert policy settings -- bool :param alert_level_notification_settings: exclude alert level notification settings -- bool :param exclude_system_threshold_settings: exclude system threshold settings -- bool :param exclude_performance_threshold_settings: exclude performance threshold settings -- bool :returns: export details -- dict """ if False not in [ exclude_alert_policy_settings, exclude_system_threshold_settings, exclude_performance_threshold_settings, alert_level_notification_settings ]: msg = ('Invalid system settings input parameters supplied, at ' 'least one settings category must be not be excluded in ' 'the input parameters.') LOG.error(msg) raise exception.InvalidInputException(msg) exclusion_list = list() if exclude_alert_policy_settings: exclusion_list.append(SYS_ALERT_SETTINGS) if alert_level_notification_settings: exclusion_list.append(SYS_ALERT_NOTIFI_SETTINGS) if exclude_system_threshold_settings: exclusion_list.append(SYS_THRESH_SETTINGS) if exclude_performance_threshold_settings: exclusion_list.append(SYS_PERF_THRESH_SETTINGS) array_id = self.array_id if not array_id else array_id request_body = { FILE_PASSWORD: file_password, SRC_ARRAY: array_id, EXCLUDE_SYS_SETTINGS: exclusion_list, EXCLUDE_UNI_SETTINGS: [ALL_SETTINGS] } return self._download_settings(request_body=request_body, dir_path=dir_path, file_name=file_name, return_binary=return_binary)
def download_unisphere_settings( self, file_password, dir_path=None, file_name=None, return_binary=False, exclude_alert_notification_settings=False, exclude_performance_preference_settings=False, exclude_performance_user_templates=False, exclude_performance_metric_settings=False): """Download Unisphere settings. - Unisphere settings: - Alert notifications - Performance metrics - Performance preferences - Performance user templates By default settings will be written to a zip file in the current working directory unless a supplied directory path and/or file name are provided. If any extension is provided in the file name it will be replaced with .zip before the data is written to file. If instead the file writing process should be handled outside of PyU4V or uploaded directly to Unisphere without any file handling set return_binary to True. The response dict will have the settings binary data included in key 'binary_data'. :param file_password: password to sign file (required) -- str :param dir_path: file save location -- str :param file_name: zip file name -- str :param return_binary: return settings binary data -- bool :param exclude_alert_notification_settings: exclude alert notification settings -- bool :param exclude_performance_preference_settings: exclude performance preference settings -- bool :param exclude_performance_user_templates: exclude performance user templates -- bool :param exclude_performance_metric_settings: exclude performance metric settings :returns: download details -- dict """ if False not in [ exclude_alert_notification_settings, exclude_performance_preference_settings, exclude_performance_user_templates, exclude_performance_metric_settings ]: msg = ('Invalid Unisphere settings input parameters supplied, at ' 'least one settings category must be not be excluded in ' 'the input parameters.') LOG.error(msg) raise exception.InvalidInputException(msg) exclusion_list = list() if exclude_alert_notification_settings: exclusion_list.append(UNI_ALERT_SETTINGS) if exclude_performance_preference_settings: exclusion_list.append(UNI_PERF_PREF_SETTINGS) if exclude_performance_user_templates: exclusion_list.append(UNI_PERF_USER_SETTINGS) if exclude_performance_metric_settings: exclusion_list.append(UNI_PERF_METRIC_SETTINGS) request_body = { FILE_PASSWORD: file_password, SRC_ARRAY: self.array_id, EXCLUDE_SYS_SETTINGS: [ALL_SETTINGS], EXCLUDE_UNI_SETTINGS: exclusion_list } return self._download_settings(request_body=request_body, dir_path=dir_path, file_name=file_name, return_binary=return_binary)