Esempio n. 1
0
class NetAppHandler(object):

    # TODO
    OID_SERIAL_NUM = '1.3.6.1.4.1.789.1.1.9.0'
    OID_TRAP_DATA = '1.3.6.1.4.1.789.1.1.12.0'

    SECONDS_TO_MS = 1000

    def __init__(self, **kwargs):
        self.ssh_pool = SSHPool(**kwargs)

    @staticmethod
    def parse_alert(alert):
        try:
            alert_info = alert.get(NetAppHandler.OID_TRAP_DATA)
            alert_arr = alert_info.split(":")
            if len(alert_arr) > 1:
                alert_name = alert_arr[0]
                description = alert_arr[1]
                if netapp_constants.SEVERITY_MAP.get(alert_name):
                    severity = netapp_constants.SEVERITY_MAP.get(alert_name)
                    a = {
                        'alert_id': '',
                        'alert_name': alert_name,
                        'severity': severity,
                        'category': constants.Category.EVENT,
                        'type': constants.EventType.EQUIPMENT_ALARM,
                        'occur_time': int(time.time()),
                        'description': description,
                        'sequence_number': '',
                        'resource_type': constants.DEFAULT_RESOURCE_TYPE,
                        'location': ''
                    }
                    return a
        except exception.DelfinException as e:
            err_msg = "Failed to parse alert from " \
                      "netapp_fas fas: %s" % (six.text_type(e.msg))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to parse alert from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def login(self):
        try:
            self.exec_ssh_command('version')
        except Exception as e:
            LOG.error("Failed to login netapp_fas %s" %
                      (six.text_type(e)))
            raise e

    @staticmethod
    def do_exec(command_str, ssh):
        result = None
        try:
            utils.check_ssh_injection(command_str)
            if command_str is not None and ssh is not None:
                stdin, stdout, stderr = ssh.exec_command(command_str)
                res, err = stdout.read(), stderr.read()
                re = res if res else err
                result = re.decode()
        except paramiko.AuthenticationException as ae:
            LOG.error('doexec Authentication error:{}'.format(ae))
            raise exception.InvalidUsernameOrPassword()
        except Exception as e:
            err = six.text_type(e)
            LOG.error('doexec InvalidUsernameOrPassword error')
            if 'timed out' in err:
                raise exception.SSHConnectTimeout()
            elif 'No authentication methods available' in err \
                    or 'Authentication failed' in err:
                raise exception.InvalidUsernameOrPassword()
            elif 'not a valid RSA private key file' in err:
                raise exception.InvalidPrivateKey()
            else:
                raise exception.SSHException(err)
        return result

    def exec_ssh_command(self, command):
        try:
            with self.ssh_pool.item() as ssh:
                ssh_info = NetAppHandler.do_exec(command, ssh)
            return ssh_info
        except Exception as e:
            msg = "Failed to ssh netapp_fas %s: %s" % \
                  (command, six.text_type(e))
            raise exception.SSHException(msg)

    @staticmethod
    def change_capacity_to_bytes(unit):
        unit = unit.upper()
        if unit == 'TB':
            res = units.Ti
        elif unit == 'GB':
            res = units.Gi
        elif unit == 'MB':
            res = units.Mi
        elif unit == 'KB':
            res = units.Ki
        else:
            res = 1
        return int(res)

    def parse_string(self, value):
        capacity = 0
        if value:
            if value.isdigit():
                capacity = float(value)
            else:
                unit = value[-2:]
                capacity = float(value[:-2]) * int(
                    self.change_capacity_to_bytes(unit))
        return capacity

    def get_storage(self):
        try:
            STATUS_MAP = {
                'ok': constants.StorageStatus.NORMAL,
                'ok-with-suppressed': constants.StorageStatus.NORMAL,
                'degraded': constants.StorageStatus.ABNORMAL,
                'unreachable': constants.StorageStatus.ABNORMAL
            }
            raw_capacity = total_capacity = used_capacity = free_capacity = 0
            system_info = self.exec_ssh_command(
                netapp_constants.CLUSTER_SHOW_COMMAND)
            version = self.exec_ssh_command(
                netapp_constants.VERSION_SHOW_COMMAND)
            status_info = self.exec_ssh_command(
                netapp_constants.STORAGE_STATUS_COMMAND)
            version_arr = version.split('\n')
            status = STATUS_MAP.get(status_info.split("\n")[2])
            disk_list = self.list_disks(None)
            pool_list = self.list_storage_pools(None)
            storage_map = {}
            self.handle_detail(system_info, storage_map, split=':')
            for disk in disk_list:
                raw_capacity += disk['capacity']

            for pool in pool_list:
                total_capacity += pool['total_capacity']
                free_capacity += pool['free_capacity']
                used_capacity += pool['used_capacity']

            s = {
                "name": storage_map['ClusterName'],
                "vendor": netapp_constants.STORAGE_VENDOR,
                "model": '',
                "status": status,
                "serial_number": storage_map['ClusterSerialNumber'],
                "firmware_version": version_arr[0],
                "location": '',
                "total_capacity": total_capacity,
                "raw_capacity": raw_capacity,
                "used_capacity": used_capacity,
                "free_capacity": free_capacity
            }
            return s
        except exception.DelfinException as e:
            err_msg = "Failed to get storage from " \
                      "netapp_fas fas: %s" % (six.text_type(e.msg))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    @staticmethod
    def handle_detail(system_info, storage_map, split):
        detail_arr = system_info.split('\n')
        for detail in detail_arr:
            if detail is not None and detail != '':
                strinfo = detail.split(split + " ")
                key = strinfo[0].replace(' ', '')
                value = ''
                if len(strinfo) > 1:
                    value = strinfo[1]
                storage_map[key] = value

    def get_aggregate(self, storage_id):
        STATUS_MAP = {
            'online': constants.StoragePoolStatus.NORMAL,
            'creating': constants.StoragePoolStatus.NORMAL,
            'mounting': constants.StoragePoolStatus.NORMAL,
            'relocating': constants.StoragePoolStatus.NORMAL,
            'quiesced': constants.StoragePoolStatus.OFFLINE,
            'quiescing': constants.StoragePoolStatus.OFFLINE,
            'unmounted': constants.StoragePoolStatus.OFFLINE,
            'unmounting': constants.StoragePoolStatus.OFFLINE,
            'destroying': constants.StoragePoolStatus.ABNORMAL,
            'partial': constants.StoragePoolStatus.ABNORMAL,
            'frozen': constants.StoragePoolStatus.ABNORMAL,
            'reverted': constants.StoragePoolStatus.NORMAL,
            'restricted': constants.StoragePoolStatus.ABNORMAL,
            'inconsistent': constants.StoragePoolStatus.ABNORMAL,
            'iron_restricted': constants.StoragePoolStatus.ABNORMAL,
            'unknown': constants.StoragePoolStatus.OFFLINE,
            'offline': constants.StoragePoolStatus.OFFLINE,
            'failed': constants.StoragePoolStatus.ABNORMAL,
            'remote_cluster': constants.StoragePoolStatus.NORMAL,
        }
        agg_list = []
        agg_info = self.exec_ssh_command(
            netapp_constants.AGGREGATE_SHOW_DETAIL_COMMAND)
        agg_arr = agg_info.split(
            netapp_constants.AGGREGATE_SPLIT_STR)
        agg_map = {}
        for agg in agg_arr[1:]:
            self.handle_detail(agg, agg_map, split=':')
            status = STATUS_MAP.get(agg_map['State'])
            p = {
                'name': agg_map['e'],
                'storage_id': storage_id,
                'native_storage_pool_id': agg_map['UUIDString'],
                'description': '',
                'status': status,
                'storage_type': constants.StorageType.UNIFIED,
                'subscribed_capacity': '',
                'total_capacity':
                    int(self.parse_string(agg_map['Size'])),
                'used_capacity':
                    int(self.parse_string(agg_map['UsedSize'])),
                'free_capacity':
                    int(self.parse_string(agg_map['AvailableSize'])),
            }
            agg_list.append(p)
        return agg_list

    def get_pool(self, storage_id):
        pool_list = []
        pool_info = self.exec_ssh_command(
            netapp_constants.POOLS_SHOW_DETAIL_COMMAND)
        pool_arr = pool_info.split(netapp_constants.POOLS_SPLIT_STR)
        pool_map = {}
        for pool_str in pool_arr[1:]:
            self.handle_detail(pool_str, pool_map, split=':')
            status = \
                constants.StoragePoolStatus.NORMAL \
                if pool_map['IsPoolHealthy?'] == 'true' \
                else constants.StoragePoolStatus.OFFLINE
            p = {
                'name': pool_map['ame'],
                'storage_id': storage_id,
                'native_storage_pool_id': pool_map['UUIDofStoragePool'],
                'description': '',
                'status': status,
                'storage_type': constants.StorageType.BLOCK,
                'subscribed_capacity': '',
                'total_capacity':
                    int(self.parse_string(pool_map['StoragePoolTotalSize'])),
                'used_capacity':
                    int(self.parse_string(pool_map['StoragePoolTotalSize'])) -
                    int(self.parse_string(pool_map['StoragePoolUsableSize'])),
                'free_capacity':
                    int(self.parse_string(pool_map['StoragePoolUsableSize']))
            }
            pool_list.append(p)
        return pool_list

    def list_storage_pools(self, storage_id):
        try:
            pool_list = self.get_pool(storage_id)
            agg_list = self.get_aggregate(storage_id)
            return agg_list + pool_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage pool from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage pool from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_volumes(self, storage_id):
        try:
            STATUS_MAP = {
                'online': constants.VolumeStatus.AVAILABLE,
                'offline': constants.VolumeStatus.ERROR,
                'nvfail': constants.VolumeStatus.ERROR,
                'space-error': constants.VolumeStatus.ERROR,
                'foreign-lun-error': constants.VolumeStatus.ERROR,
            }
            volume_list = []
            volume_info = self.exec_ssh_command(
                netapp_constants.LUN_SHOW_DETAIL_COMMAND)
            volume_arr = volume_info.split(netapp_constants.LUN_SPLIT_STR)
            fs_list = self.list_filesystems(storage_id)
            volume_map = {}
            for volume_str in volume_arr[1:]:
                self.handle_detail(volume_str, volume_map, split=':')
                if volume_map is not None or volume_map != {}:
                    pool_id = ''
                    status = STATUS_MAP.get(volume_map['State'])
                    for fs in fs_list:
                        if fs['name'] == volume_map['VolumeName']:
                            pool_id = fs['native_pool_id']
                    type = constants.VolumeType.THIN \
                        if volume_map['SpaceAllocation'] == 'enabled' \
                        else constants.VolumeType.THICK
                    v = {
                        'name': volume_map['LUNName'],
                        'storage_id': storage_id,
                        'description': '',
                        'status': status,
                        'native_volume_id': volume_map['LUNUUID'],
                        'native_storage_pool_id': pool_id,
                        'wwn': '',
                        'compressed': '',
                        'deduplicated': '',
                        'type': type,
                        'total_capacity':
                            int(self.parse_string(volume_map['LUNSize'])),
                        'used_capacity':
                            int(self.parse_string(volume_map['UsedSize'])),
                        'free_capacity':
                            int(self.parse_string(volume_map['LUNSize'])) -
                            int(self.parse_string(volume_map['UsedSize']))
                    }
                    volume_list.append(v)
            return volume_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage volume from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage volume from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_alerts(self, query_para):
        try:
            alert_list = []
            alert_info = self.exec_ssh_command(
                netapp_constants.ALTER_SHOW_DETAIL_COMMAND)
            event_info = self.exec_ssh_command(
                netapp_constants.EVENT_SHOW_DETAIL_COMMAND)
            """Query the two alarms separately"""
            AlertHandler.list_events(self, event_info, query_para, alert_list)
            AlertHandler.list_alerts(self, alert_info, query_para, alert_list)
            return alert_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage alert: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage alert: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def clear_alert(self, alert):
        try:
            ssh_command = \
                netapp_constants.CLEAR_ALERT_COMMAND + alert['alert_id']
            self.exec_ssh_command(ssh_command)
        except exception.DelfinException as e:
            err_msg = "Failed to get storage alert from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage alert from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_controllers(self, storage_id):
        try:
            controller_list = []
            controller_info = self.exec_ssh_command(
                netapp_constants.CONTROLLER_SHOW_DETAIL_COMMAND)
            controller_arr = controller_info.split(
                netapp_constants.CONTROLLER_SPLIT_STR)
            controller_map = {}
            for controller_str in controller_arr[1:]:
                self.handle_detail(controller_str, controller_map, split=':')
                if controller_map is not None or controller_map != {}:
                    status = constants.ControllerStatus.NORMAL \
                        if controller_map['Health'] == 'true' \
                        else constants.ControllerStatus.OFFLINE
                    c = {
                        'name': controller_map['e'],
                        'storage_id': storage_id,
                        'native_controller_id': controller_map['SystemID'],
                        'status': status,
                        'location': controller_map['Location'],
                        'soft_version': '',
                        'cpu_info': '',
                        'memory_size': '',
                    }
                controller_list.append(c)
            return controller_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage controllers from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e

        except Exception as err:
            err_msg = "Failed to get storage controllers from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def get_network_port(self, storage_id):
        try:
            LOGICAL_TYPE_MAP = {
                'data': constants.PortLogicalType.FRONTEND,
                'cluster': constants.PortLogicalType.FRONTEND,
                'node-mgmt': constants.PortLogicalType.MANAGEMENT,
                'cluster-mgmt': constants.PortLogicalType.INTERNAL,
                'intercluster': constants.PortLogicalType.INTERCONNECT,
            }
            ports_list = []
            interfaces_info = self.exec_ssh_command(
                netapp_constants.INTERFACE_SHOW_DETAIL_COMMAND)
            interface_arr = interfaces_info.split(
                netapp_constants.INTERFACE_SPLIT_STR)
            interface_map = {}
            ipv4 = ipv4_mask = ipv6 = ipv6_mask = '-'
            """Traversal to get port IP address information"""
            for interface_info in interface_arr[1:]:
                self.handle_detail(interface_info, interface_map, split=':')
                logical_type = LOGICAL_TYPE_MAP.get(interface_map['Role'])
                type = interface_map['DataProtocol']
                if interface_map['Addressfamily'] == 'ipv4':
                    ipv4 += interface_map['NetworkAddress'] + ','
                    ipv4_mask += interface_map['Netmask'] + ','
                else:
                    ipv6 += interface_map['NetworkAddress'] + ','
                    ipv6_mask += interface_map['Netmask'] + ','
                p = {
                    'name': interface_map['LogicalInterfaceName'],
                    'storage_id': storage_id,
                    'native_port_id': interface_map['LogicalInterfaceName'],
                    'location': '',
                    'connection_status':
                        constants.PortConnectionStatus.CONNECTED
                        if interface_map['OperationalStatus'] == 'up'
                        else constants.PortConnectionStatus.DISCONNECTED,
                    'health_status':
                        constants.PortHealthStatus.NORMAL
                        if interface_map['OperationalStatus'] == 'healthy'
                        else constants.PortHealthStatus.ABNORMAL,
                    'type': type,
                    'logical_type': logical_type,
                    'speed': '',
                    'max_speed': '',
                    'native_parent_id': '',
                    'wwn': '',
                    'mac_address': '',
                    'ipv4': ipv4,
                    'ipv4_mask': ipv4_mask,
                    'ipv6': ipv6,
                    'ipv6_mask': ipv6_mask,
                }
                ports_list.append(p)
            return ports_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage ports from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e

        except Exception as err:
            err_msg = "Failed to get storage ports from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def get_eth_port(self, storage_id):
        try:
            eth_list = []
            eth_info = self.exec_ssh_command(
                netapp_constants.PORT_SHOW_DETAIL_COMMAND)
            eth_arr = eth_info.split(
                netapp_constants.PORT_SPLIT_STR)
            for eth in eth_arr[1:]:
                eth_map = {}
                self.handle_detail(eth, eth_map, split=':')
                e = {
                    'name': eth_map['Port'],
                    'storage_id': storage_id,
                    'native_port_id': eth_map['Port'],
                    'location': '',
                    'connection_status':
                        constants.PortConnectionStatus.CONNECTED
                        if eth_map['Link'] == 'up'
                        else constants.PortConnectionStatus.DISCONNECTED,
                    'health_status':
                        constants.PortHealthStatus.NORMAL
                        if eth_map['PortHealthStatus'] == 'healthy'
                        else constants.PortHealthStatus.ABNORMAL,
                    'type': constants.PortType.ETH,
                    'logical_type': '',
                    'speed': eth_map['SpeedOperational'],
                    'max_speed': eth_map['MTU'],
                    'native_parent_id': '',
                    'wwn': '',
                    'mac_address': eth_map['MACAddress'],
                    'ipv4': '',
                    'ipv4_mask': '',
                    'ipv6': '',
                    'ipv6_mask': '',
                }
                eth_list.append(e)
            return eth_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage ports from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e

        except Exception as err:
            err_msg = "Failed to get storage ports from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def get_fc_port(self, storage_id):
        try:
            TYPE_MAP = {
                'fibre-channel': constants.PortType.FC,
                'ethernet': constants.PortType.FCOE
            }
            fc_list = []
            fc_info = self.exec_ssh_command(
                netapp_constants.FC_PORT_SHOW_DETAIL_COMMAND)
            fc_arr = fc_info.split(
                netapp_constants.PORT_SPLIT_STR)
            for fc in fc_arr[1:]:
                fc_map = {}
                self.handle_detail(fc, fc_map, split=':')
                type = TYPE_MAP.get(fc_map['PhysicalProtocol'])
                f = {
                    'name': fc_map['Adapter'],
                    'storage_id': storage_id,
                    'native_port_id': fc_map['Adapter'],
                    'location': '',
                    'connection_status':
                        constants.PortConnectionStatus.CONNECTED
                        if fc_map['AdministrativeStatus'] == 'up'
                        else constants.PortConnectionStatus.DISCONNECTED,
                    'health_status':
                        constants.PortHealthStatus.NORMAL
                        if fc_map['OperationalStatus'] == 'online'
                        else constants.PortHealthStatus.ABNORMAL,
                    'type': type,
                    'logical_type': '',
                    'speed': fc_map['DataLinkRate(Gbit)'],
                    'max_speed': fc_map['MaximumSpeed'],
                    'native_parent_id': '',
                    'wwn': fc_map['AdapterWWNN'],
                    'mac_address': '',
                    'ipv4': '',
                    'ipv4_mask': '',
                    'ipv6': '',
                    'ipv6_mask': '',
                }
                fc_list.append(f)
            return fc_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage ports from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e

        except Exception as err:
            err_msg = "Failed to get storage ports from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_ports(self, storage_id):
        ports_list = \
            self.get_network_port(storage_id) + \
            self.get_fc_port(storage_id) + \
            self.get_eth_port(storage_id)
        return ports_list

    def list_disks(self, storage_id):
        try:
            TYPE_MAP = {
                'ATA': constants.DiskPhysicalType.SATA,
                'BSAS': constants.DiskPhysicalType,
                'FCAL': constants.DiskPhysicalType,
                'FSAS': constants.DiskPhysicalType,
                'LUN ': constants.DiskPhysicalType,
                'SAS': constants.DiskPhysicalType.SAS,
                'MSATA': constants.DiskPhysicalType,
                'SSD': constants.DiskPhysicalType.SSD,
                'VMDISK': constants.DiskPhysicalType,
                'unknown': constants.DiskPhysicalType.UNKNOWN,
            }
            LOGICAL_MAP = {
                'aggregate': constants.DiskLogicalType.MEMBER,
                'spare': constants.DiskLogicalType.HOTSPARE,
                'unknown': constants.DiskLogicalType.UNKNOWN,
                'free': constants.DiskLogicalType.FREE,
                'broken': constants.DiskLogicalType,
                'foreign': constants.DiskLogicalType,
                'labelmaint': constants.DiskLogicalType,
                'maintenance': constants.DiskLogicalType,
                'shared': constants.DiskLogicalType,
                'unassigned': constants.DiskLogicalType,
                'unsupported': constants.DiskLogicalType,
                'remote': constants.DiskLogicalType,
                'mediator': constants.DiskLogicalType,
            }
            disks_list = []
            physicals_list = []
            disks_info = self.exec_ssh_command(
                netapp_constants.DISK_SHOW_DETAIL_COMMAND)
            disks_arr = disks_info.split(
                netapp_constants.DISK_SPLIT_STR)
            physicals_info = self.exec_ssh_command(
                netapp_constants.DISK_SHOW_PHYSICAL_COMMAND)
            disks_map = {}
            physical_arr = physicals_info.split('\n')
            speed = physical_type = firmware = '-'
            for i in range(2, len(physical_arr), 2):
                physicals_list.append(physical_arr[i].split())
            for disk_str in disks_arr[1:]:
                self.handle_detail(disk_str, disks_map, split=':')
                logical_type = LOGICAL_MAP.get(disks_map['ContainerType'])
                """Map disk physical information"""
                for physical_info in physicals_list:
                    if len(physical_info) > 6:
                        if physical_info[0] == disks_map['k']:
                            physical_type = TYPE_MAP.get(physical_info[1])
                            speed = physical_info[5]
                            firmware = physical_info[4]
                status = constants.DiskStatus.NORMAL \
                    if disks_map['Errors:'] is None \
                    or disks_map['Errors:'] == "" \
                    else constants.DiskStatus.OFFLINE
                d = {
                    'name': disks_map['k'],
                    'storage_id': storage_id,
                    'native_disk_id': disks_map['k'],
                    'serial_number': disks_map['SerialNumber'],
                    'manufacturer': disks_map['Vendor'],
                    'model': disks_map['Model'],
                    'firmware': firmware,
                    'speed': speed,
                    'capacity':
                        int(self.parse_string(disks_map['PhysicalSize'])),
                    'status': status,
                    'physical_type': physical_type,
                    'logical_type': logical_type,
                    'health_score': '',
                    'native_disk_group_id': disks_map['Aggregate'],
                    'location': '',
                }
                disks_list.append(d)
            return disks_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage disks from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e

        except Exception as err:
            err_msg = "Failed to get storage disks from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_qtrees(self, storage_id):
        try:
            qt_list = []
            qt_info = self.exec_ssh_command(
                netapp_constants.QTREE_SHOW_DETAIL_COMMAND)
            qt_arr = qt_info.split(netapp_constants.QTREE_SPLIT_STR)
            qt_map = {}
            for qt in qt_arr[1:]:
                self.handle_detail(qt, qt_map, split=':')
                q = {
                    'name': qt_map['QtreeName'],
                    'storage_id': storage_id,
                    'native_qtree_id': qt_map['Actual(Non-Junction)QtreePath'],
                    'native_filesystem_id': qt_map['VolumeName'],
                    'security_mode': qt_map['SecurityStyle'],
                }
                qt_list.append(q)

            return qt_list
        except exception.DelfinException as err:
            err_msg = "Failed to get storage qtrees from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise err

        except Exception as err:
            err_msg = "Failed to get storage qtrees from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_shares(self, storage_id):
        try:
            shares_list = []
            cifs_share_info = self.exec_ssh_command(
                netapp_constants.CIFS_SHARE_SHOW_DETAIL_COMMAND)
            cifs_share_arr = cifs_share_info.split(
                netapp_constants.CIFS_SHARE_SPLIT_STR)
            protocol_info = self.exec_ssh_command(
                netapp_constants.SHARE_AGREEMENT_SHOW_COMMAND)
            cifs_share_map = {}
            protocol_map = {}
            protocol_arr = protocol_info.split('\n')
            for protocol in protocol_arr[2:]:
                agr_arr = protocol.split()
                if len(agr_arr) > 1:
                    protocol_map[agr_arr[0]] = agr_arr[1]
            for cifs_share in cifs_share_arr[1:]:
                self.handle_detail(cifs_share, cifs_share_map, split=':')
                protocol = protocol_map.get(cifs_share_map['r'])
                s = {
                    'name': cifs_share_map['Share'],
                    'storage_id': storage_id,
                    'native_share_id': cifs_share_map['Share'],
                    'native_filesystem_id': cifs_share_map['VolumeName'],
                    'path': cifs_share_map['Path'],
                    'protocol': protocol
                }
                shares_list.append(s)
            return shares_list
        except exception.DelfinException as err:
            err_msg = "Failed to get storage shares from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise err

        except Exception as err:
            err_msg = "Failed to get storage shares from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_filesystems(self, storage_id):
        try:
            STATUS_MAP = {
                'online': constants.FilesystemStatus.NORMAL,
                'restricted': constants.FilesystemStatus.FAULTY,
                'offline': constants.FilesystemStatus.NORMAL,
                'force-online': constants.FilesystemStatus.FAULTY,
                'force-offline': constants.FilesystemStatus.FAULTY,
            }
            fs_list = []
            fs_info = self.exec_ssh_command(
                netapp_constants.FS_SHOW_DETAIL_COMMAND)
            fs_arr = fs_info.split(
                netapp_constants.FS_SPLIT_STR)
            thin_fs_info = self.exec_ssh_command(
                netapp_constants.THIN_FS_SHOW_COMMAND)
            pool_list = self.list_storage_pools(storage_id)
            thin_fs_arr = thin_fs_info.split("\n")
            type = constants.FSType.THICK
            fs_map = {}
            for fs_str in fs_arr[1:]:
                self.handle_detail(fs_str, fs_map, split=':')
                if fs_map is not None or fs_map != {}:
                    pool_id = ""
                    """get pool id"""
                    for pool in pool_list:
                        if pool['name'] == fs_map['AggregateName']:
                            pool_id = pool['native_storage_pool_id']
                    deduplicated = False \
                        if fs_map['SpaceSavedbyDeduplication'] == '0B' \
                        else True
                    if len(thin_fs_arr) > 2:
                        for thin_vol in thin_fs_arr[2:]:
                            thin_arr = thin_vol.split()
                            if len(thin_arr) > 4:
                                if thin_arr[1] == fs_map['VolumeName']:
                                    type = constants.VolumeType.THIN
                    compressed = False \
                        if fs_map['VolumeContainsSharedorCompressedData'] == 'false' \
                        else True
                    status = STATUS_MAP.get(fs_map['VolumeState'])
                    f = {
                        'name': fs_map['VolumeName'],
                        'storage_id': storage_id,
                        'native_filesystem_id': fs_map['VolumeName'],
                        'native_pool_id': pool_id,
                        'compressed': compressed,
                        'deduplicated': deduplicated,
                        'worm': fs_map['SnapLockType'],
                        'status': status,
                        'type': type,
                        'total_capacity':
                            int(self.parse_string(fs_map['VolumeSize'])),
                        'used_capacity':
                            int(self.parse_string(fs_map['UsedSize'])),
                        'free_capacity':
                            int(self.parse_string(fs_map['VolumeSize'])) -
                            int(self.parse_string(fs_map['UsedSize']))
                    }
                    fs_list.append(f)
            return fs_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage volume from " \
                      "netapp_fas fas: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage volume from " \
                      "netapp_fas fas: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def add_trap_config(self, context, trap_config):
        pass

    def remove_trap_config(self, context, trap_config):
        pass
Esempio n. 2
0
class SSHHandler(object):
    # 显示配置参数的值
    CONFIG_GET = 'config_get [ name=machine_serial_number ]'
    # 打印系统的当前版本
    VERSION_GET = 'version_get'

    def __init__(self, **kwargs):
        self.kwargs = kwargs
        self.ssh_pool = SSHPool(**kwargs)

    def login(self):
        try:
            with self.ssh_pool.item() as ssh:
                SSHHandler.do_exec('lssystem', ssh)
        except Exception as e:
            LOG.error("Failed to login ibm a9000r %s" % (six.text_type(e)))
            raise e

    # 显示配置参数的值 machine_serial_number的获取
    def get_storage_serial_number(self):
        serialNumber = None
        try:
            serialNumber = self.exec_ssh_command(SSHHandler.CONFIG_GET)
        except Exception as e:
            LOG.error("Get all storage machine_serial_number error: %s",
                      six.text_type(e))
        return serialNumber

    # 可打印系统的当前版本  firmware_version字段获取
    def get_storage_version_get(self):
        firmwareVersion = None
        try:
            # ssh_client = SSHClient(**self.kwargs)
            # firmwareVersion = ssh_client.do_exec(SSHHandler.VERSION_GET)
            firmwareVersion = self.exec_ssh_command(SSHHandler.VERSION_GET)
        except Exception as e:
            LOG.error("Get all storage firmware_version error: %s",
                      six.text_type(e))
        return firmwareVersion

    def exec_ssh_command(self, command):
        try:
            with self.ssh_pool.item() as ssh:
                ssh_info = SSHHandler.do_exec(command, ssh)
            return ssh_info
        except Exception as e:
            msg = "Failed to ssh ibm a9000r ssh_handler %s: %s" % \
                  (command, six.text_type(e))
            raise exception.SSHException(msg)

    def exec_ssh_command(self, command):
        try:
            with self.ssh_pool.item() as ssh:
                ssh_info = SSHHandler.do_exec(command, ssh)
            return ssh_info
        except Exception as e:
            msg = "Failed to ssh ibm storwize_svc %s: %s" % \
                  (command, six.text_type(e))
            raise exception.SSHException(msg)

    @staticmethod
    def do_exec(command_str, ssh):
        """Execute command"""
        try:
            utils.check_ssh_injection(command_str)
            if command_str is not None and ssh is not None:
                stdin, stdout, stderr = ssh.exec_command(command_str)
                res, err = stdout.read(), stderr.read()
                re = res if res else err
                result = re.decode()
        except paramiko.AuthenticationException as ae:
            LOG.error('doexec Authentication error:{}'.format(ae))
            raise exception.InvalidUsernameOrPassword()
        except Exception as e:
            err = six.text_type(e)
            LOG.error('doexec InvalidUsernameOrPassword error')
            if 'timed out' in err:
                raise exception.SSHConnectTimeout()
            elif 'No authentication methods available' in err \
                    or 'Authentication failed' in err:
                raise exception.InvalidUsernameOrPassword()
            elif 'not a valid RSA private key file' in err:
                raise exception.InvalidPrivateKey()
            else:
                raise exception.SSHException(err)
        return result
Esempio n. 3
0
class SSHHandler(object):
    OID_ERR_ID = '1.3.6.1.4.1.2.6.190.4.3'
    OID_SEQ_NUMBER = '1.3.6.1.4.1.2.6.190.4.9'
    OID_LAST_TIME = '1.3.6.1.4.1.2.6.190.4.10'
    OID_OBJ_TYPE = '1.3.6.1.4.1.2.6.190.4.11'
    OID_OBJ_NAME = '1.3.6.1.4.1.2.6.190.4.17'
    OID_SEVERITY = '1.3.6.1.6.3.1.1.4.1.0'

    TRAP_SEVERITY_MAP = {
        '1.3.6.1.4.1.2.6.190.1': constants.Severity.CRITICAL,
        '1.3.6.1.4.1.2.6.190.2': constants.Severity.WARNING,
        '1.3.6.1.4.1.2.6.190.3': constants.Severity.INFORMATIONAL,
    }

    SEVERITY_MAP = {
        "warning": "Warning",
        "informational": "Informational",
        "error": "Major"
    }

    SECONDS_TO_MS = 1000

    def __init__(self, **kwargs):
        self.ssh_pool = SSHPool(**kwargs)

    @staticmethod
    def handle_split(split_str, split_char, arr_number):
        split_value = ''
        if split_str is not None and split_str != '':
            tmp_value = split_str.split(split_char, 1)
            if arr_number == 1 and len(tmp_value) > 1:
                split_value = tmp_value[arr_number].strip()
            elif arr_number == 0:
                split_value = tmp_value[arr_number].strip()
        return split_value

    @staticmethod
    def parse_alert(alert):
        try:
            alert_model = dict()
            alert_name = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_ERR_ID), ':', 1)
            error_info = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_ERR_ID), ':', 0)
            alert_id = SSHHandler.handle_split(error_info, '=', 1)
            severity = SSHHandler.TRAP_SEVERITY_MAP.get(
                alert.get(SSHHandler.OID_SEVERITY),
                constants.Severity.INFORMATIONAL)
            alert_model['alert_id'] = str(alert_id)
            alert_model['alert_name'] = alert_name
            alert_model['severity'] = severity
            alert_model['category'] = constants.Category.FAULT
            alert_model['type'] = constants.EventType.EQUIPMENT_ALARM
            alert_model['sequence_number'] = SSHHandler. \
                handle_split(alert.get(SSHHandler.OID_SEQ_NUMBER), '=', 1)
            timestamp = SSHHandler. \
                handle_split(alert.get(SSHHandler.OID_LAST_TIME), '=', 1)
            time_type = '%a %b %d %H:%M:%S %Y'
            occur_time = int(time.mktime(time.strptime(timestamp, time_type)))
            alert_model['occur_time'] = int(occur_time *
                                            SSHHandler.SECONDS_TO_MS)
            alert_model['description'] = alert_name
            alert_model['resource_type'] = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_OBJ_TYPE), '=', 1)
            alert_model['location'] = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_OBJ_NAME), '=', 1)
            return alert_model
        except Exception as e:
            LOG.error(e)
            msg = ("Failed to build alert model as some attributes missing "
                   "in alert message:%s.") % (six.text_type(e))
            raise exception.InvalidResults(msg)

    def login(self):
        try:
            with self.ssh_pool.item() as ssh:
                SSHHandler.do_exec('lssystem', ssh)
        except Exception as e:
            LOG.error("Failed to login ibm storwize_svc %s" %
                      (six.text_type(e)))
            raise e

    @staticmethod
    def do_exec(command_str, ssh):
        """Execute command"""
        try:
            utils.check_ssh_injection(command_str)
            if command_str is not None and ssh is not None:
                stdin, stdout, stderr = ssh.exec_command(command_str)
                res, err = stdout.read(), stderr.read()
                re = res if res else err
                result = re.decode()
        except paramiko.AuthenticationException as ae:
            LOG.error('doexec Authentication error:{}'.format(ae))
            raise exception.InvalidUsernameOrPassword()
        except Exception as e:
            err = six.text_type(e)
            LOG.error('doexec InvalidUsernameOrPassword error')
            if 'timed out' in err:
                raise exception.SSHConnectTimeout()
            elif 'No authentication methods available' in err \
                    or 'Authentication failed' in err:
                raise exception.InvalidUsernameOrPassword()
            elif 'not a valid RSA private key file' in err:
                raise exception.InvalidPrivateKey()
            else:
                raise exception.SSHException(err)
        return result

    def exec_ssh_command(self, command):
        try:
            with self.ssh_pool.item() as ssh:
                ssh_info = SSHHandler.do_exec(command, ssh)
            return ssh_info
        except Exception as e:
            msg = "Failed to ssh ibm storwize_svc %s: %s" % \
                  (command, six.text_type(e))
            raise exception.SSHException(msg)

    def change_capacity_to_bytes(self, unit):
        unit = unit.upper()
        if unit == 'TB':
            result = units.Ti
        elif unit == 'GB':
            result = units.Gi
        elif unit == 'MB':
            result = units.Mi
        elif unit == 'KB':
            result = units.Ki
        else:
            result = 1
        return int(result)

    def parse_string(self, value):
        capacity = 0
        if value:
            if value.isdigit():
                capacity = float(value)
            else:
                unit = value[-2:]
                capacity = float(value[:-2]) * int(
                    self.change_capacity_to_bytes(unit))
        return capacity

    def get_storage(self):
        try:
            system_info = self.exec_ssh_command('lssystem')
            enclosure_info = self.exec_ssh_command('lsenclosure -delim :')
            enclosure_res = enclosure_info.split('\n')
            enclosure = enclosure_res[1].split(':')
            serial_number = enclosure[7]
            storage_map = {}
            self.handle_detail(system_info, storage_map, split=' ')

            status = 'normal' if storage_map.get('statistics_status') == 'on' \
                else 'offline'
            location = storage_map.get('location')
            free_capacity = self.parse_string(
                storage_map.get('total_free_space'))
            used_capacity = self.parse_string(
                storage_map.get('total_used_capacity'))
            raw_capacity = self.parse_string(
                storage_map.get('total_drive_raw_capacity'))
            subscribed_capacity = self.parse_string(
                storage_map.get('virtual_capacity'))
            firmware_version = ''
            if storage_map.get('code_level') is not None:
                firmware_version = storage_map.get('code_level').split(' ')[0]
            s = {
                'name': storage_map.get('name'),
                'vendor': 'IBM',
                'model': storage_map.get('product_name'),
                'status': status,
                'serial_number': serial_number,
                'firmware_version': firmware_version,
                'location': location,
                'total_capacity': int(free_capacity + used_capacity),
                'raw_capacity': int(raw_capacity),
                'subscribed_capacity': int(subscribed_capacity),
                'used_capacity': int(used_capacity),
                'free_capacity': int(free_capacity)
            }
            return s
        except exception.DelfinException as e:
            err_msg = "Failed to get storage: %s" % (six.text_type(e.msg))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def handle_detail(self, deltail_info, detail_map, split):
        detail_arr = deltail_info.split('\n')
        for detail in detail_arr:
            if detail is not None and detail != '':
                strinfo = detail.split(split, 1)
                key = strinfo[0]
                value = ''
                if len(strinfo) > 1:
                    value = strinfo[1]
                detail_map[key] = value

    def list_storage_pools(self, storage_id):
        try:
            pool_list = []
            pool_info = self.exec_ssh_command('lsmdiskgrp')
            pool_res = pool_info.split('\n')
            for i in range(1, len(pool_res)):
                if pool_res[i] is None or pool_res[i] == '':
                    continue

                pool_str = ' '.join(pool_res[i].split())
                strinfo = pool_str.split(' ')
                detail_command = 'lsmdiskgrp %s' % strinfo[0]
                deltail_info = self.exec_ssh_command(detail_command)
                pool_map = {}
                self.handle_detail(deltail_info, pool_map, split=' ')
                status = 'normal' if pool_map.get('status') == 'online' \
                    else 'offline'
                total_cap = self.parse_string(pool_map.get('capacity'))
                free_cap = self.parse_string(pool_map.get('free_capacity'))
                used_cap = self.parse_string(pool_map.get('used_capacity'))
                subscribed_capacity = self.parse_string(
                    pool_map.get('virtual_capacity'))
                p = {
                    'name': pool_map.get('name'),
                    'storage_id': storage_id,
                    'native_storage_pool_id': pool_map.get('id'),
                    'description': '',
                    'status': status,
                    'storage_type': constants.StorageType.BLOCK,
                    'subscribed_capacity': int(subscribed_capacity),
                    'total_capacity': int(total_cap),
                    'used_capacity': int(used_cap),
                    'free_capacity': int(free_cap)
                }
                pool_list.append(p)

            return pool_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage pool: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage pool: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_volumes(self, storage_id):
        try:
            volume_list = []
            volume_info = self.exec_ssh_command('lsvdisk')
            volume_res = volume_info.split('\n')
            for i in range(1, len(volume_res)):
                if volume_res[i] is None or volume_res[i] == '':
                    continue
                volume_str = ' '.join(volume_res[i].split())
                strinfo = volume_str.split(' ')
                volume_name = strinfo[1]
                detail_command = 'lsvdisk -delim : %s' % volume_name
                deltail_info = self.exec_ssh_command(detail_command)
                volume_map = {}
                self.handle_detail(deltail_info, volume_map, split=':')
                status = 'normal' if volume_map.get('status') == 'online' \
                    else 'offline'
                volume_type = 'thin' if volume_map.get('se_copy') == 'yes' \
                    else 'thick'
                total_capacity = self.parse_string(volume_map.get('capacity'))
                free_capacity = self.parse_string(
                    volume_map.get('free_capacity'))
                used_capacity = self.parse_string(
                    volume_map.get('used_capacity'))
                compressed = True
                deduplicated = True
                if volume_map.get('compressed_copy') == 'no':
                    compressed = False
                if volume_map.get('deduplicated_copy') == 'no':
                    deduplicated = False

                v = {
                    'name': volume_map.get('name'),
                    'storage_id': storage_id,
                    'description': '',
                    'status': status,
                    'native_volume_id': str(volume_map.get('id')),
                    'native_storage_pool_id': volume_map.get('mdisk_grp_id'),
                    'wwn': str(volume_map.get('vdisk_UID')),
                    'type': volume_type,
                    'total_capacity': int(total_capacity),
                    'used_capacity': int(used_capacity),
                    'free_capacity': int(free_capacity),
                    'compressed': compressed,
                    'deduplicated': deduplicated
                }
                volume_list.append(v)

            return volume_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage volume: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage volume: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_alerts(self, query_para):
        try:
            alert_list = []
            alert_info = self.exec_ssh_command('lseventlog -monitoring yes')
            alert_res = alert_info.split('\n')
            for i in range(1, len(alert_res)):
                if alert_res[i] is None or alert_res[i] == '':
                    continue
                alert_str = ' '.join(alert_res[i].split())
                strinfo = alert_str.split(' ', 1)
                detail_command = 'lseventlog %s' % strinfo[0]
                deltail_info = self.exec_ssh_command(detail_command)
                alert_map = {}
                self.handle_detail(deltail_info, alert_map, split=' ')
                occur_time = int(alert_map.get('last_timestamp_epoch')) * \
                    self.SECONDS_TO_MS
                if not alert_util.is_alert_in_time_range(
                        query_para, occur_time):
                    continue
                alert_name = alert_map.get('event_id_text', '')
                event_id = alert_map.get('event_id')
                location = alert_map.get('object_name', '')
                resource_type = alert_map.get('object_type', '')
                severity = self.SEVERITY_MAP.get(
                    alert_map.get('notification_type'))

                alert_model = {
                    'alert_id': event_id,
                    'alert_name': alert_name,
                    'severity': severity,
                    'category': constants.Category.FAULT,
                    'type': 'EquipmentAlarm',
                    'sequence_number': alert_map.get('sequence_number'),
                    'occur_time': occur_time,
                    'description': alert_name,
                    'resource_type': resource_type,
                    'location': location
                }
                alert_list.append(alert_model)

            return alert_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage alert: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage alert: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)
Esempio n. 4
0
class SSHHandler(object):
    OID_ERR_ID = '1.3.6.1.4.1.2.6.190.4.3'
    OID_SEQ_NUMBER = '1.3.6.1.4.1.2.6.190.4.9'
    OID_LAST_TIME = '1.3.6.1.4.1.2.6.190.4.10'
    OID_OBJ_TYPE = '1.3.6.1.4.1.2.6.190.4.11'
    OID_OBJ_NAME = '1.3.6.1.4.1.2.6.190.4.17'
    OID_SEVERITY = '1.3.6.1.6.3.1.1.4.1.0'

    TRAP_SEVERITY_MAP = {
        '1.3.6.1.4.1.2.6.190.1': constants.Severity.CRITICAL,
        '1.3.6.1.4.1.2.6.190.2': constants.Severity.WARNING,
        '1.3.6.1.4.1.2.6.190.3': constants.Severity.INFORMATIONAL,
    }

    SEVERITY_MAP = {
        "warning": "Warning",
        "informational": "Informational",
        "error": "Major"
    }
    CONTRL_STATUS_MAP = {
        "online": constants.ControllerStatus.NORMAL,
        "offline": constants.ControllerStatus.OFFLINE,
        "service": constants.ControllerStatus.NORMAL,
        "flushing": constants.ControllerStatus.UNKNOWN,
        "pending": constants.ControllerStatus.UNKNOWN,
        "adding": constants.ControllerStatus.UNKNOWN,
        "deleting": constants.ControllerStatus.UNKNOWN
    }

    DISK_PHYSICAL_TYPE = {
        'fc': constants.DiskPhysicalType.FC,
        'sas_direct': constants.DiskPhysicalType.SAS
    }
    VOLUME_PERF_METRICS = {
        'readIops': 'ro',
        'writeIops': 'wo',
        'readThroughput': 'rb',
        'writeThroughput': 'wb',
        'readIoSize': 'rb',
        'writeIoSize': 'wb',
        'responseTime': 'res_time',
        'throughput': 'tb',
        'iops': 'to',
        'ioSize': 'tb',
        'cacheHitRatio': 'hrt',
        'readCacheHitRatio': 'rhr',
        'writeCacheHitRatio': 'whr'
    }
    DISK_PERF_METRICS = {
        'readIops': 'ro',
        'writeIops': 'wo',
        'readThroughput': 'rb',
        'writeThroughput': 'wb',
        'responseTime': 'res_time',
        'throughput': 'tb',
        'iops': 'to'
    }
    CONTROLLER_PERF_METRICS = {
        'readIops': 'ro',
        'writeIops': 'wo',
        'readThroughput': 'rb',
        'writeThroughput': 'wb',
        'responseTime': 'res_time',
        'throughput': 'tb',
        'iops': 'to'
    }
    PORT_PERF_METRICS = {
        'readIops': 'ro',
        'writeIops': 'wo',
        'readThroughput': 'rb',
        'writeThroughput': 'wb',
        'throughput': 'tb',
        'responseTime': 'res_time',
        'iops': 'to'
    }
    TARGET_RESOURCE_RELATION = {
        constants.ResourceType.DISK: 'mdsk',
        constants.ResourceType.VOLUME: 'vdsk',
        constants.ResourceType.PORT: 'port',
        constants.ResourceType.CONTROLLER: 'node'
    }
    RESOURCE_PERF_MAP = {
        constants.ResourceType.DISK: DISK_PERF_METRICS,
        constants.ResourceType.VOLUME: VOLUME_PERF_METRICS,
        constants.ResourceType.PORT: PORT_PERF_METRICS,
        constants.ResourceType.CONTROLLER: CONTROLLER_PERF_METRICS
    }
    SECONDS_TO_MS = 1000
    ALERT_NOT_FOUND_CODE = 'CMMVC8275E'
    BLOCK_SIZE = 512
    BYTES_TO_BIT = 8

    def __init__(self, **kwargs):
        self.ssh_pool = SSHPool(**kwargs)

    @staticmethod
    def handle_split(split_str, split_char, arr_number):
        split_value = ''
        if split_str is not None and split_str != '':
            tmp_value = split_str.split(split_char, 1)
            if arr_number == 1 and len(tmp_value) > 1:
                split_value = tmp_value[arr_number].strip()
            elif arr_number == 0:
                split_value = tmp_value[arr_number].strip()
        return split_value

    @staticmethod
    def parse_alert(alert):
        try:
            alert_model = dict()
            alert_name = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_ERR_ID), ':', 1)
            error_info = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_ERR_ID), ':', 0)
            alert_id = SSHHandler.handle_split(error_info, '=', 1)
            severity = SSHHandler.TRAP_SEVERITY_MAP.get(
                alert.get(SSHHandler.OID_SEVERITY),
                constants.Severity.INFORMATIONAL)
            alert_model['alert_id'] = str(alert_id)
            alert_model['alert_name'] = alert_name
            alert_model['severity'] = severity
            alert_model['category'] = constants.Category.FAULT
            alert_model['type'] = constants.EventType.EQUIPMENT_ALARM
            alert_model['sequence_number'] = SSHHandler. \
                handle_split(alert.get(SSHHandler.OID_SEQ_NUMBER), '=', 1)
            timestamp = SSHHandler. \
                handle_split(alert.get(SSHHandler.OID_LAST_TIME), '=', 1)
            time_type = '%a %b %d %H:%M:%S %Y'
            occur_time = int(time.mktime(time.strptime(timestamp, time_type)))
            alert_model['occur_time'] = int(occur_time *
                                            SSHHandler.SECONDS_TO_MS)
            alert_model['description'] = alert_name
            alert_model['resource_type'] = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_OBJ_TYPE), '=', 1)
            alert_model['location'] = SSHHandler.handle_split(
                alert.get(SSHHandler.OID_OBJ_NAME), '=', 1)
            return alert_model
        except Exception as e:
            LOG.error(e)
            msg = ("Failed to build alert model as some attributes missing "
                   "in alert message:%s.") % (six.text_type(e))
            raise exception.InvalidResults(msg)

    def login(self):
        try:
            with self.ssh_pool.item() as ssh:
                result = SSHHandler.do_exec('lssystem', ssh)
                if 'is not a recognized command' in result:
                    raise exception.InvalidIpOrPort()
        except Exception as e:
            LOG.error("Failed to login ibm storwize_svc %s" %
                      (six.text_type(e)))
            raise e

    @staticmethod
    def do_exec(command_str, ssh):
        """Execute command"""
        try:
            utils.check_ssh_injection(command_str.split())
            if command_str is not None and ssh is not None:
                stdin, stdout, stderr = ssh.exec_command(command_str)
                res, err = stdout.read(), stderr.read()
                re = res if res else err
                result = re.decode()
        except paramiko.AuthenticationException as ae:
            LOG.error('doexec Authentication error:{}'.format(ae))
            raise exception.InvalidUsernameOrPassword()
        except Exception as e:
            err = six.text_type(e)
            LOG.error('doexec InvalidUsernameOrPassword error')
            if 'timed out' in err:
                raise exception.SSHConnectTimeout()
            elif 'No authentication methods available' in err \
                    or 'Authentication failed' in err:
                raise exception.InvalidUsernameOrPassword()
            elif 'not a valid RSA private key file' in err:
                raise exception.InvalidPrivateKey()
            else:
                raise exception.SSHException(err)
        return result

    def exec_ssh_command(self, command):
        try:
            with self.ssh_pool.item() as ssh:
                ssh_info = SSHHandler.do_exec(command, ssh)
            return ssh_info
        except Exception as e:
            msg = "Failed to ssh ibm storwize_svc %s: %s" % \
                  (command, six.text_type(e))
            raise exception.SSHException(msg)

    def change_capacity_to_bytes(self, unit):
        unit = unit.upper()
        if unit == 'TB':
            result = units.Ti
        elif unit == 'GB':
            result = units.Gi
        elif unit == 'MB':
            result = units.Mi
        elif unit == 'KB':
            result = units.Ki
        else:
            result = 1
        return int(result)

    def parse_string(self, value):
        capacity = 0
        if value:
            if value.isdigit():
                capacity = float(value)
            else:
                unit = value[-2:]
                capacity = float(value[:-2]) * int(
                    self.change_capacity_to_bytes(unit))
        return capacity

    def get_storage(self):
        try:
            system_info = self.exec_ssh_command('lssystem')
            storage_map = {}
            self.handle_detail(system_info, storage_map, split=' ')
            serial_number = storage_map.get('id')
            status = 'normal' if storage_map.get('statistics_status') == 'on' \
                else 'offline'
            location = storage_map.get('location')
            free_capacity = self.parse_string(
                storage_map.get('total_free_space'))
            used_capacity = self.parse_string(
                storage_map.get('total_used_capacity'))
            raw_capacity = self.parse_string(
                storage_map.get('total_mdisk_capacity'))
            subscribed_capacity = self.parse_string(
                storage_map.get('virtual_capacity'))
            firmware_version = ''
            if storage_map.get('code_level') is not None:
                firmware_version = storage_map.get('code_level').split(' ')[0]
            s = {
                'name': storage_map.get('name'),
                'vendor': 'IBM',
                'model': storage_map.get('product_name'),
                'status': status,
                'serial_number': serial_number,
                'firmware_version': firmware_version,
                'location': location,
                'total_capacity': int(free_capacity + used_capacity),
                'raw_capacity': int(raw_capacity),
                'subscribed_capacity': int(subscribed_capacity),
                'used_capacity': int(used_capacity),
                'free_capacity': int(free_capacity)
            }
            return s
        except exception.DelfinException as e:
            err_msg = "Failed to get storage: %s" % (six.text_type(e.msg))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def handle_detail(self, deltail_info, detail_map, split):
        detail_arr = deltail_info.split('\n')
        for detail in detail_arr:
            if detail is not None and detail != '':
                strinfo = detail.split(split, 1)
                key = strinfo[0]
                value = ''
                if len(strinfo) > 1:
                    value = strinfo[1]
                detail_map[key] = value

    def list_storage_pools(self, storage_id):
        try:
            pool_list = []
            pool_info = self.exec_ssh_command('lsmdiskgrp')
            pool_res = pool_info.split('\n')
            for i in range(1, len(pool_res)):
                if pool_res[i] is None or pool_res[i] == '':
                    continue

                pool_str = ' '.join(pool_res[i].split())
                strinfo = pool_str.split(' ')
                detail_command = 'lsmdiskgrp %s' % strinfo[0]
                deltail_info = self.exec_ssh_command(detail_command)
                pool_map = {}
                self.handle_detail(deltail_info, pool_map, split=' ')
                status = 'normal' if pool_map.get('status') == 'online' \
                    else 'offline'
                total_cap = self.parse_string(pool_map.get('capacity'))
                free_cap = self.parse_string(pool_map.get('free_capacity'))
                used_cap = self.parse_string(pool_map.get('used_capacity'))
                subscribed_capacity = self.parse_string(
                    pool_map.get('virtual_capacity'))
                p = {
                    'name': pool_map.get('name'),
                    'storage_id': storage_id,
                    'native_storage_pool_id': pool_map.get('id'),
                    'description': '',
                    'status': status,
                    'storage_type': constants.StorageType.BLOCK,
                    'subscribed_capacity': int(subscribed_capacity),
                    'total_capacity': int(total_cap),
                    'used_capacity': int(used_cap),
                    'free_capacity': int(free_cap)
                }
                pool_list.append(p)

            return pool_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage pool: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage pool: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_volumes(self, storage_id):
        try:
            volume_list = []
            volume_info = self.exec_ssh_command('lsvdisk')
            volume_res = volume_info.split('\n')
            for i in range(1, len(volume_res)):
                if volume_res[i] is None or volume_res[i] == '':
                    continue
                volume_str = ' '.join(volume_res[i].split())
                strinfo = volume_str.split(' ')
                volume_id = strinfo[0]
                detail_command = 'lsvdisk -delim : %s' % volume_id
                deltail_info = self.exec_ssh_command(detail_command)
                volume_map = {}
                self.handle_detail(deltail_info, volume_map, split=':')
                status = 'normal' if volume_map.get('status') == 'online' \
                    else 'offline'
                volume_type = 'thin' if volume_map.get('se_copy') == 'yes' \
                    else 'thick'
                total_capacity = self.parse_string(volume_map.get('capacity'))
                free_capacity = self.parse_string(
                    volume_map.get('free_capacity'))
                used_capacity = self.parse_string(
                    volume_map.get('used_capacity'))
                compressed = True
                deduplicated = True
                if volume_map.get('compressed_copy') == 'no':
                    compressed = False
                if volume_map.get('deduplicated_copy') == 'no':
                    deduplicated = False

                v = {
                    'name': volume_map.get('name'),
                    'storage_id': storage_id,
                    'description': '',
                    'status': status,
                    'native_volume_id': str(volume_map.get('id')),
                    'native_storage_pool_id': volume_map.get('mdisk_grp_id'),
                    'wwn': str(volume_map.get('vdisk_UID')),
                    'type': volume_type,
                    'total_capacity': int(total_capacity),
                    'used_capacity': int(used_capacity),
                    'free_capacity': int(free_capacity),
                    'compressed': compressed,
                    'deduplicated': deduplicated
                }
                volume_list.append(v)
            return volume_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage volume: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage volume: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_alerts(self, query_para):
        try:
            alert_list = []
            alert_info = self.exec_ssh_command('lseventlog -monitoring yes '
                                               '-message no')
            alert_res = alert_info.split('\n')
            for i in range(1, len(alert_res)):
                if alert_res[i] is None or alert_res[i] == '':
                    continue
                alert_str = ' '.join(alert_res[i].split())
                strinfo = alert_str.split(' ', 1)
                detail_command = 'lseventlog %s' % strinfo[0]
                deltail_info = self.exec_ssh_command(detail_command)
                alert_map = {}
                self.handle_detail(deltail_info, alert_map, split=' ')
                occur_time = int(alert_map.get('last_timestamp_epoch')) * \
                    self.SECONDS_TO_MS
                if not alert_util.is_alert_in_time_range(
                        query_para, occur_time):
                    continue
                alert_name = alert_map.get('event_id_text', '')
                event_id = alert_map.get('event_id')
                location = alert_map.get('object_name', '')
                resource_type = alert_map.get('object_type', '')
                severity = self.SEVERITY_MAP.get(
                    alert_map.get('notification_type'))
                if severity == 'Informational' or severity is None:
                    continue
                alert_model = {
                    'alert_id': event_id,
                    'alert_name': alert_name,
                    'severity': severity,
                    'category': constants.Category.FAULT,
                    'type': 'EquipmentAlarm',
                    'sequence_number': alert_map.get('sequence_number'),
                    'occur_time': occur_time,
                    'description': alert_name,
                    'resource_type': resource_type,
                    'location': location
                }
                alert_list.append(alert_model)

            return alert_list
        except exception.DelfinException as e:
            err_msg = "Failed to get storage alert: %s" % (six.text_type(e))
            LOG.error(err_msg)
            raise e
        except Exception as err:
            err_msg = "Failed to get storage alert: %s" % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def fix_alert(self, alert):
        command_line = 'cheventlog -fix %s' % alert
        result = self.exec_ssh_command(command_line)
        if result:
            if self.ALERT_NOT_FOUND_CODE not in result:
                raise exception.InvalidResults(six.text_type(result))
            LOG.warning("Alert %s doesn't exist.", alert)

    def list_controllers(self, storage_id):
        try:
            controller_list = []
            controller_cmd = 'lsnode'
            control_info = self.exec_ssh_command(controller_cmd)
            if 'command not found' in control_info:
                controller_cmd = 'lsnodecanister'
                control_info = self.exec_ssh_command(controller_cmd)
            control_res = control_info.split('\n')
            for i in range(1, len(control_res)):
                if control_res[i] is None or control_res[i] == '':
                    continue
                control_str = ' '.join(control_res[i].split())
                str_info = control_str.split(' ')
                control_id = str_info[0]
                detail_command = '%s %s' % (controller_cmd, control_id)
                deltail_info = self.exec_ssh_command(detail_command)
                control_map = {}
                self.handle_detail(deltail_info, control_map, split=' ')
                status = SSHHandler.CONTRL_STATUS_MAP.get(
                    control_map.get('status'),
                    constants.ControllerStatus.UNKNOWN)
                controller_result = {
                    'name': control_map.get('name'),
                    'storage_id': storage_id,
                    'native_controller_id': control_map.get('id'),
                    'status': status,
                    'soft_version': control_map.get('code_level',
                                                    '').split(' ')[0],
                    'location': control_map.get('name')
                }
                controller_list.append(controller_result)
            return controller_list
        except Exception as err:
            err_msg = "Failed to get controller attributes from Storwize: %s"\
                      % (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)

    def list_disks(self, storage_id):
        try:
            disk_list = []
            disk_info = self.exec_ssh_command('lsmdisk')
            disk_res = disk_info.split('\n')
            for i in range(1, len(disk_res)):
                if disk_res[i] is None or disk_res[i] == '':
                    continue
                control_str = ' '.join(disk_res[i].split())
                str_info = control_str.split(' ')
                disk_id = str_info[0]
                detail_command = 'lsmdisk %s' % disk_id
                deltail_info = self.exec_ssh_command(detail_command)
                disk_map = {}
                self.handle_detail(deltail_info, disk_map, split=' ')
                status = constants.DiskStatus.NORMAL
                if disk_map.get('status') == 'offline':
                    status = constants.DiskStatus.OFFLINE
                physical_type = SSHHandler.DISK_PHYSICAL_TYPE.get(
                    disk_map.get('fabric_type'),
                    constants.DiskPhysicalType.UNKNOWN)
                location = '%s_%s' % (disk_map.get('controller_name'),
                                      disk_map.get('name'))
                disk_result = {
                    'name': disk_map.get('name'),
                    'storage_id': storage_id,
                    'native_disk_id': disk_map.get('id'),
                    'capacity':
                    int(self.parse_string(disk_map.get('capacity'))),
                    'status': status,
                    'physical_type': physical_type,
                    'native_disk_group_id': disk_map.get('mdisk_grp_name'),
                    'location': location
                }
                disk_list.append(disk_result)
            return disk_list
        except Exception as err:
            err_msg = "Failed to get disk attributes from Storwize: %s" % \
                      (six.text_type(err))
            raise exception.InvalidResults(err_msg)

    def get_fc_port(self, storage_id):
        port_list = []
        fc_info = self.exec_ssh_command('lsportfc')
        fc_res = fc_info.split('\n')
        for i in range(1, len(fc_res)):
            if fc_res[i] is None or fc_res[i] == '':
                continue
            control_str = ' '.join(fc_res[i].split())
            str_info = control_str.split(' ')
            port_id = str_info[0]
            detail_command = 'lsportfc %s' % port_id
            deltail_info = self.exec_ssh_command(detail_command)
            port_map = {}
            self.handle_detail(deltail_info, port_map, split=' ')
            status = constants.PortHealthStatus.NORMAL
            conn_status = constants.PortConnectionStatus.CONNECTED
            if port_map.get('status') != 'active':
                status = constants.PortHealthStatus.ABNORMAL
                conn_status = constants.PortConnectionStatus.DISCONNECTED
            port_type = constants.PortType.FC
            if port_map.get('type') == 'ethernet':
                port_type = constants.PortType.ETH
            location = '%s_%s' % (port_map.get('node_name'),
                                  port_map.get('id'))
            speed = None
            if port_map.get('port_speed')[:-2].isdigit():
                speed = int(
                    self.handle_port_bps(port_map.get('port_speed'), 'fc'))
            port_result = {
                'name': location,
                'storage_id': storage_id,
                'native_port_id': port_map.get('id'),
                'location': location,
                'connection_status': conn_status,
                'health_status': status,
                'type': port_type,
                'speed': speed,
                'native_parent_id': port_map.get('node_name'),
                'wwn': port_map.get('WWPN')
            }
            port_list.append(port_result)
        return port_list

    def get_iscsi_port(self, storage_id):
        port_list = []
        for i in range(1, 3):
            port_array = []
            port_command = 'lsportip %s' % i
            port_info = self.exec_ssh_command(port_command)
            port_arr = port_info.split('\n')
            port_map = {}
            for detail in port_arr:
                if detail is not None and detail != '':
                    strinfo = detail.split(' ', 1)
                    key = strinfo[0]
                    value = ''
                    if len(strinfo) > 1:
                        value = strinfo[1]
                    port_map[key] = value
                else:
                    if len(port_map) > 1:
                        port_array.append(port_map)
                        port_map = {}
                        continue
            for port in port_array:
                if port.get('failover') == 'yes':
                    continue
                status = constants.PortHealthStatus.ABNORMAL
                if port.get('state') == 'online':
                    status = constants.PortHealthStatus.NORMAL
                conn_status = constants.PortConnectionStatus.DISCONNECTED
                if port.get('link_state') == 'active':
                    conn_status = constants.PortConnectionStatus.CONNECTED
                port_type = constants.PortType.ETH
                location = '%s_%s' % (port.get('node_name'), port.get('id'))
                port_result = {
                    'name': location,
                    'storage_id': storage_id,
                    'native_port_id': location,
                    'location': location,
                    'connection_status': conn_status,
                    'health_status': status,
                    'type': port_type,
                    'speed': int(self.handle_port_bps(port.get('speed'),
                                                      'eth')),
                    'native_parent_id': port.get('node_name'),
                    'mac_address': port.get('MAC'),
                    'ipv4': port.get('IP_address'),
                    'ipv4_mask': port.get('mask'),
                    'ipv6': port.get('IP_address_6')
                }
                port_list.append(port_result)
        return port_list

    @staticmethod
    def change_speed_to_bytes(unit):
        unit = unit.upper()
        if unit == 'TB':
            result = units.T
        elif unit == 'GB':
            result = units.G
        elif unit == 'MB':
            result = units.M
        elif unit == 'KB':
            result = units.k
        else:
            result = 1
        return int(result)

    def handle_port_bps(self, value, port_type):
        speed = 0
        if value:
            if value.isdigit():
                speed = float(value)
            else:
                if port_type == 'fc':
                    unit = value[-2:]
                    speed = float(value[:-2]) * int(
                        self.change_speed_to_bytes(unit))
                else:
                    unit = value[-4:-2]
                    speed = float(value[:-4]) * int(
                        self.change_speed_to_bytes(unit))
        return speed

    def list_ports(self, storage_id):
        try:
            port_list = []
            port_list.extend(self.get_fc_port(storage_id))
            port_list.extend(self.get_iscsi_port(storage_id))
            return port_list
        except Exception as err:
            err_msg = "Failed to get ports attributes from Storwize: %s" % \
                      (six.text_type(err))
            raise exception.InvalidResults(err_msg)

    @staticmethod
    def handle_stats_filename(file_name, file_map):
        name_arr = file_name.split('_')
        file_type = '%s_%s_%s' % (name_arr[0], name_arr[1], name_arr[2])
        file_time = '20%s%s' % (name_arr[3], name_arr[4])
        time_pattern = '%Y%m%d%H%M%S'
        tools = Tools()
        occur_time = tools.time_str_to_timestamp(file_time, time_pattern)
        if file_map.get(file_type):
            file_map[file_type][occur_time] = file_name
        else:
            file_map[file_type] = {occur_time: file_name}

    def get_stats_filelist(self, file_map):
        stats_file_command = 'lsdumps -prefix /dumps/iostats'
        file_list = self.exec_ssh_command(stats_file_command)
        file_line = file_list.split('\n')
        for file in islice(file_line, 1, None):
            if file:
                file_arr = ' '.join(file.split()).split(' ')
                if len(file_arr) > 1:
                    file_name = file_arr[1]
                    SSHHandler.handle_stats_filename(file_name, file_map)
        for file_stats in file_map:
            file_map[file_stats] = sorted(file_map.get(file_stats).items(),
                                          key=lambda x: x[0],
                                          reverse=False)

    def packege_data(self, storage_id, resource_type, metrics, metric_map):
        resource_id = None
        resource_name = None
        unit = None
        for resource_info in metric_map:
            if resource_type == constants.ResourceType.PORT:
                port_info = self.get_fc_port(storage_id)
                if port_info:
                    for fc_port in port_info:
                        if resource_info.strip('0x').upper() == fc_port.get(
                                'wwn').upper():
                            resource_id = fc_port.get('native_port_id')
                            resource_name = fc_port.get('name')
                            break
            else:
                resource_arr = resource_info.split('_')
                resource_id = resource_arr[0]
                resource_name = resource_arr[1]
            for target in metric_map.get(resource_info):
                if resource_type == constants.ResourceType.PORT:
                    unit = consts.PORT_CAP[target]['unit']
                elif resource_type == constants.ResourceType.VOLUME:
                    unit = consts.VOLUME_CAP[target]['unit']
                elif resource_type == constants.ResourceType.DISK:
                    unit = consts.DISK_CAP[target]['unit']
                elif resource_type == constants.ResourceType.CONTROLLER:
                    unit = consts.CONTROLLER_CAP[target]['unit']
                if 'responseTime' == target:
                    for res_time in metric_map.get(resource_info).get(target):
                        for iops_time in metric_map.get(resource_info).get(
                                'iops'):
                            if res_time == iops_time:
                                res_value = metric_map.get(resource_info).get(
                                    target).get(res_time)
                                iops_value = metric_map.get(resource_info).get(
                                    'iops').get(iops_time)
                                res_value = \
                                    res_value / iops_value if iops_value else 0
                                res_value = round(res_value, 3)
                                metric_map[resource_info][target][res_time] = \
                                    res_value
                                break
                labels = {
                    'storage_id': storage_id,
                    'resource_type': resource_type,
                    'resource_id': resource_id,
                    'resource_name': resource_name,
                    'type': 'RAW',
                    'unit': unit
                }
                metric_value = constants.metric_struct(
                    name=target,
                    labels=labels,
                    values=metric_map.get(resource_info).get(target))
                metrics.append(metric_value)

    @staticmethod
    def count_metric_data(last_data, now_data, interval, target, metric_type,
                          metric_map, res_id):
        if not target:
            return
        if 'CACHEHITRATIO' not in metric_type.upper():
            value = SSHHandler.count_difference(now_data.get(target),
                                                last_data.get(target))
        else:
            value = now_data.get(
                SSHHandler.VOLUME_PERF_METRICS.get(metric_type))
        if 'THROUGHPUT' in metric_type.upper():
            value = value / interval / units.Mi
        elif 'IOSIZE' in metric_type.upper():
            value = value / units.Ki
        elif 'IOPS' in metric_type.upper() or 'RESPONSETIME' \
                in metric_type.upper():
            value = value / interval
        value = round(value, 3)
        if metric_map.get(res_id):
            if metric_map.get(res_id).get(metric_type):
                if metric_map.get(res_id).get(metric_type).get(
                        now_data.get('time')):
                    metric_map[res_id][metric_type][now_data.get('time')] \
                        += value
                else:
                    metric_map[res_id][metric_type][now_data.get('time')] \
                        = value
            else:
                metric_map[res_id][metric_type] = {now_data.get('time'): value}
        else:
            metric_map[res_id] = {metric_type: {now_data.get('time'): value}}

    @staticmethod
    def count_difference(now_value, last_value):
        value = 0
        if now_value >= last_value:
            value = now_value - last_value
        else:
            value = now_value
        return value

    @staticmethod
    def handle_volume_cach_hit(now_data, last_data):
        rh = SSHHandler.count_difference(now_data.get('rh'),
                                         last_data.get('rh'))
        wh = SSHHandler.count_difference(now_data.get('wh'),
                                         last_data.get('wh'))
        rht = SSHHandler.count_difference(now_data.get('rht'),
                                          last_data.get('rht'))
        wht = SSHHandler.count_difference(now_data.get('wht'),
                                          last_data.get('wht'))
        rhr = rh * 100 / rht if rht > 0 else 0
        whr = wh * 100 / wht if wht > 0 else 0
        hrt = rhr + whr
        now_data['rhr'] = rhr
        now_data['whr'] = whr
        now_data['hrt'] = hrt

    def get_date_from_each_file(self, file, metric_map, target_list,
                                resource_type, last_data):
        with self.ssh_pool.item() as ssh:
            local_path = '%s/%s' % (os.path.abspath(os.path.join(
                os.getcwd())), consts.LOCAL_FILE_PATH)
            file_xml = Tools.get_remote_file_to_xml(ssh, file[1], local_path,
                                                    consts.REMOTE_FILE_PATH)
            if not file_xml:
                return
            for data in file_xml:
                if re.sub(u"\\{.*?}", "", data.tag) == \
                        SSHHandler.TARGET_RESOURCE_RELATION.get(
                            resource_type):
                    if resource_type == constants.ResourceType.PORT:
                        if data.attrib.get('fc_wwpn'):
                            resource_info = data.attrib.get('fc_wwpn')
                        else:
                            continue
                    elif resource_type == constants. \
                            ResourceType.CONTROLLER:
                        resource_info = '%s_%s' % (int(
                            data.attrib.get('node_id'),
                            16), data.attrib.get('id'))
                    else:
                        resource_info = '%s_%s' % (data.attrib.get('idx'),
                                                   data.attrib.get('id'))
                    now_data = SSHHandler.package_xml_data(
                        data.attrib, file[0], resource_type)
                    if last_data.get(resource_info):
                        interval = (
                            int(file[0]) -
                            last_data.get(resource_info).get('time')) / units.k
                        if interval <= 0:
                            break
                        if resource_type == constants.ResourceType.VOLUME:
                            SSHHandler.handle_volume_cach_hit(
                                now_data, last_data.get(resource_info))
                        for target in target_list:
                            device_target = SSHHandler. \
                                RESOURCE_PERF_MAP.get(resource_type)
                            SSHHandler.count_metric_data(
                                last_data.get(resource_info), now_data,
                                interval, device_target.get(target), target,
                                metric_map, resource_info)
                        last_data[resource_info] = now_data
                    else:
                        last_data[resource_info] = now_data

    def get_stats_from_file(self, file_list, metric_map, target_list,
                            resource_type, start_time, end_time):
        if not file_list:
            return
        find_first_file = False
        recent_file = None
        last_data = {}
        for file in file_list:
            if file[0] >= start_time and file[0] <= end_time:
                if find_first_file is False:
                    if recent_file:
                        self.get_date_from_each_file(recent_file, metric_map,
                                                     target_list,
                                                     resource_type, last_data)
                    self.get_date_from_each_file(file, metric_map, target_list,
                                                 resource_type, last_data)
                    find_first_file = True
                else:
                    self.get_date_from_each_file(file, metric_map, target_list,
                                                 resource_type, last_data)
            recent_file = file

    @staticmethod
    def package_xml_data(file_data, file_time, resource_type):
        rb = 0
        wb = 0
        res_time = 0
        rh = 0
        wh = 0
        rht = 0
        wht = 0
        if resource_type == constants.ResourceType.PORT:
            rb = int(file_data.get('cbr')) + int(file_data.get('hbr')) + int(
                file_data.get('lnbr')) + int(
                    file_data.get('rmbr')) * SSHHandler.BYTES_TO_BIT
            wb = int(file_data.get('cbt')) + int(file_data.get('hbt')) + int(
                file_data.get('lnbt')) + int(
                    file_data.get('rmbt')) * SSHHandler.BYTES_TO_BIT
            ro = int(file_data.get('cer')) + int(file_data.get('her')) + int(
                file_data.get('lner')) + int(file_data.get('rmer'))
            wo = int(file_data.get('cet')) + int(file_data.get('het')) + int(
                file_data.get('lnet')) + int(file_data.get('rmet'))
            res_time = int(file_data.get('dtdt', 0)) / units.Ki
        else:
            if resource_type == constants.ResourceType.VOLUME:
                rb = int(file_data.get('rb')) * SSHHandler.BLOCK_SIZE
                wb = int(file_data.get('wb')) * SSHHandler.BLOCK_SIZE
                rh = int(file_data.get('ctrhs'))
                wh = int(file_data.get('ctwhs'))
                rht = int(file_data.get('ctrs'))
                wht = int(file_data.get('ctws'))
                res_time = int(file_data.get('xl'))
            elif resource_type == constants.ResourceType.DISK:
                rb = int(file_data.get('rb')) * SSHHandler.BLOCK_SIZE
                wb = int(file_data.get('wb')) * SSHHandler.BLOCK_SIZE
                res_time = int(file_data.get('rq')) + int(file_data.get('wq'))
            elif resource_type == constants.ResourceType.CONTROLLER:
                rb = int(file_data.get('rb')) * SSHHandler.BYTES_TO_BIT
                wb = int(file_data.get('wb')) * SSHHandler.BYTES_TO_BIT
                res_time = int(file_data.get('rq')) + int(file_data.get('wq'))
            ro = int(file_data.get('ro'))
            wo = int(file_data.get('wo'))
        now_data = {
            'rb': rb,
            'wb': wb,
            'ro': ro,
            'wo': wo,
            'tb': rb + wb,
            'to': ro + wo,
            'rh': rh,
            'wh': wh,
            'rht': rht,
            'wht': wht,
            'res_time': res_time,
            'time': int(file_time)
        }
        return now_data

    def get_stats_file_data(self, file_map, res_type, metrics, storage_id,
                            target_list, start_time, end_time):
        metric_map = {}
        for file_tye in file_map:
            file_list = file_map.get(file_tye)
            if 'Nv' in file_tye and res_type == constants.ResourceType.VOLUME:
                self.get_stats_from_file(file_list, metric_map, target_list,
                                         constants.ResourceType.VOLUME,
                                         start_time, end_time)
            elif 'Nm' in file_tye and res_type == constants.ResourceType.DISK:
                self.get_stats_from_file(file_list, metric_map, target_list,
                                         constants.ResourceType.DISK,
                                         start_time, end_time)
            elif 'Nn' in file_tye and res_type == constants.ResourceType.PORT:
                self.get_stats_from_file(file_list, metric_map, target_list,
                                         constants.ResourceType.PORT,
                                         start_time, end_time)
            elif 'Nn' in file_tye and res_type == \
                    constants.ResourceType.CONTROLLER:
                self.get_stats_from_file(file_list, metric_map, target_list,
                                         constants.ResourceType.CONTROLLER,
                                         start_time, end_time)
        self.packege_data(storage_id, res_type, metrics, metric_map)

    def collect_perf_metrics(self, storage_id, resource_metrics, start_time,
                             end_time):
        metrics = []
        file_map = {}
        try:
            self.get_stats_filelist(file_map)
            if resource_metrics.get(constants.ResourceType.VOLUME):
                self.get_stats_file_data(
                    file_map, constants.ResourceType.VOLUME, metrics,
                    storage_id,
                    resource_metrics.get(constants.ResourceType.VOLUME),
                    start_time, end_time)
            if resource_metrics.get(constants.ResourceType.DISK):
                self.get_stats_file_data(
                    file_map, constants.ResourceType.DISK, metrics, storage_id,
                    resource_metrics.get(constants.ResourceType.DISK),
                    start_time, end_time)
            if resource_metrics.get(constants.ResourceType.PORT):
                self.get_stats_file_data(
                    file_map, constants.ResourceType.PORT, metrics, storage_id,
                    resource_metrics.get(constants.ResourceType.PORT),
                    start_time, end_time)
            if resource_metrics.get(constants.ResourceType.CONTROLLER):
                self.get_stats_file_data(
                    file_map, constants.ResourceType.CONTROLLER, metrics,
                    storage_id,
                    resource_metrics.get(constants.ResourceType.CONTROLLER),
                    start_time, end_time)
        except Exception as err:
            err_msg = "Failed to collect metrics from svc: %s" % \
                      (six.text_type(err))
            LOG.error(err_msg)
            raise exception.InvalidResults(err_msg)
        return metrics

    def get_latest_perf_timestamp(self):
        latest_time = 0
        stats_file_command = 'lsdumps -prefix /dumps/iostats'
        file_list = self.exec_ssh_command(stats_file_command)
        file_line = file_list.split('\n')
        for file in islice(file_line, 1, None):
            if file:
                file_arr = ' '.join(file.split()).split(' ')
                if len(file_arr) > 1:
                    file_name = file_arr[1]
                    name_arr = file_name.split('_')
                    file_time = '20%s%s' % (name_arr[3], name_arr[4])
                    time_pattern = '%Y%m%d%H%M%S'
                    tools = Tools()
                    occur_time = tools.time_str_to_timestamp(
                        file_time, time_pattern)
                    if latest_time < occur_time:
                        latest_time = occur_time
        return latest_time