def query_file_pool_stats(self):
        """
        Query filepool data
        """
        # Configure HTTP basic authorization: basicAuth
        configuration = isi_sdk_8_1_1.Configuration()
        configuration.host = 'https://' + self._host + ':8080'
        configuration.username = self._username
        configuration.password = self._password
        configuration.verify_ssl = self._verify_ssl

        if self._verify_ssl is False:
            urllib3.disable_warnings()

        file_pools = []
        resume = ""
        # create an instance of the API class
        api_instance = isi_sdk_8_1_1.FilepoolApi(
            isi_sdk_8_1_1.ApiClient(configuration))
        api_response = api_instance.list_filepool_policies()

        file_pools.extend([{'host':self._host,'apply_order':r.apply_order, \
        'birth_cluster_id':r.birth_cluster_id,'description':r.description, 'name':r.name, \
        'id':r.id, 'state':r.state, \
        'state_details':r.state_details} for r in api_response.policies])
        return file_pools
Exemplo n.º 2
0
def main():
    # configure username and password
    configuration = isi_sdk.Configuration()
    configuration.username = test_constants.USERNAME
    configuration.password = test_constants.PASSWORD
    configuration.verify_ssl = test_constants.VERIFY_SSL
    configuration.host = test_constants.HOST

    # configure client connection
    api_client = isi_sdk.ApiClient(configuration)
    api = isi_sdk.NamespaceApi(api_client)
    auth_api = isi_sdk.AuthApi(api_client)

    # get list of access points
    print('Access points: {}'.format(api.list_access_points().namespaces))

    # get list of access point versions
    versions = api.list_access_points(versions=True).versions
    print('Protocol versions of namespace access server: {}'.format(versions))

    # create access point
    ap_path = isi_sdk.AccessPointCreateParams(path='/ifs/home')
    api.create_access_point('user1', access_point=ap_path)
    print('Access points: {}'.format(api.list_access_points().namespaces))

    # create test user
    auth_user = isi_sdk.AuthUserCreateParams(name='user1',
                                             password='******',
                                             home_directory='/ifs/home/user1')
    auth_api.create_auth_user(auth_user)

    # set ACL for user
    acl_body = isi_sdk.NamespaceAcl(authoritative='acl',
                                    acl=[
                                        isi_sdk.AclObject(
                                            trustee={
                                                'name': 'user1',
                                                'type': 'user'
                                            },
                                            accesstype='allow',
                                            accessrights=['file_read'],
                                            op='add')
                                    ])
    api.set_acl('user1', acl=True, nsaccess=True, namespace_acl=acl_body)

    # get access control list
    print('ACL: {}'.format(api.get_acl('user1', acl=True, nsaccess=True)))

    # clean up test access point
    api.delete_access_point('user1')
    # clean up test user
    auth_api.delete_auth_user('user1')
    api.delete_directory('ifs/home/user1', recursive=True)
    print('Successful clean up')
def get_isilon_connection(module_params):
    if HAS_ISILON_SDK:
        conn = isi_sdk.Configuration()
        if module_params['port_no'] is not None:
            conn.host = module_params['onefs_host'] + ":" + module_params[
                'port_no']
        else:
            conn.host = module_params['onefs_host']
        conn.verify_ssl = module_params['verify_ssl']
        conn.username = module_params['api_user']
        conn.password = module_params['api_password']
        api_client = isi_sdk.ApiClient(conn)
        return api_client
def main():
    # configure username and password
    configuration = isi_sdk.Configuration()
    configuration.username = test_constants.USERNAME
    configuration.password = test_constants.PASSWORD
    configuration.verify_ssl = test_constants.VERIFY_SSL
    configuration.host = test_constants.HOST

    # configure client connection
    api_client = isi_sdk.ApiClient(configuration)
    api = isi_sdk.NamespaceApi(api_client)

    # create a directory
    api.create_directory('ifs/ns_src/ns_dir',
                         x_isi_ifs_target_type='container',
                         recursive=True,
                         overwrite=True)

    # recursively copy directory from /ifs/ns_src to /ifs/ns_dest
    api.copy_directory('ifs/ns_dest',
                       x_isi_ifs_copy_source='/namespace/ifs/ns_src',
                       merge=True)
    print('Copied directory: {}'.format(
        api.get_directory_contents('ifs/ns_dest').children[0].name))
    api.delete_directory('ifs/ns_dest', recursive=True)

    # move directory from /ifs/ns_src to /ifs/ns_dest
    api.move_directory('ifs/ns_src',
                       x_isi_ifs_set_location='/namespace/ifs/ns_dest')
    print('Moved directory: {}'.format(
        api.get_directory_contents('ifs/ns_dest').children[0].name))
    api.delete_directory('ifs/ns_dest', recursive=True)

    # get directory attributes from response headers
    sdk_resp = api.get_directory_attributes_with_http_info('ifs/data')
    # the third index of the response is the response headers
    print('Directory attributes from headers: {}'.format(sdk_resp[2]))

    # get default directory detail
    details = api.get_directory_contents(
        'ifs', detail='default').children[0].to_dict()
    details = dict((k, v) for k, v in details.items() if v)
    print('Default directory details: {}'.format(details))
    # get directory last modified time
    print('Last modified time: {}'.format(
        api.get_directory_contents(
            'ifs', detail='last_modified').children[0].last_modified))

    # use resume token to paginate requests
    resume = api.get_directory_contents('ifs', limit=3).resume
    api.get_directory_contents('ifs', resume=resume)

    # get extended attributes on a directory
    print('Directory metadata attributes: {}'.format(
        api.get_directory_metadata('ifs', metadata=True)))
    # create extended attribute
    meta = isi_sdk.NamespaceMetadata(action='update',
                                     attrs=[
                                         isi_sdk.NamespaceMetadataAttrs(
                                             name='test',
                                             value='42',
                                             op='update',
                                             namespace='user')
                                     ])
    # set extended attribute on a directory
    api.set_directory_metadata('ifs', metadata=True, directory_metadata=meta)
    # remove extended attribute
    meta = isi_sdk.NamespaceMetadata(action='update',
                                     attrs=[
                                         isi_sdk.NamespaceMetadataAttrs(
                                             name='test',
                                             value='42',
                                             op='delete',
                                             namespace='user')
                                     ])
    api.set_directory_metadata('ifs', metadata=True, directory_metadata=meta)

    # set access control list on a directory
    test_dir = 'ifs/ns_src'
    api.create_directory(test_dir,
                         x_isi_ifs_target_type='container',
                         x_isi_ifs_access_control='0770')
    print('Directory ACL: {}'.format(api.get_acl(test_dir, acl=True)))

    # give everyone read permissions on the directory
    acl_body = isi_sdk.NamespaceAcl(authoritative='mode', mode='0444')
    api.set_acl(test_dir, acl=True, namespace_acl=acl_body)
    print('Set directory permissions: {}'.format(
        api.get_acl(test_dir, acl=True).mode))
    api.delete_directory(test_dir)

    # build directory query
    query = isi_sdk.DirectoryQuery(
        result=['name', 'size', 'last_modified', 'owner'],
        scope=isi_sdk.DirectoryQueryScope(
            logic='and',
            conditions=[
                isi_sdk.DirectoryQueryScopeConditions(
                    operator='>=',
                    attr='last_modified',
                    value="Thu, 15 Dec 2011 06:41:04"),
                isi_sdk.DirectoryQueryScopeConditions(operator='>=',
                                                      attr='size',
                                                      value=1000)
            ]))
    # exhaustive list of optional details
    details = ('access_time,atime_val,block_size,blocks,btime_val,'
               'change_time,create_time,ctime_val,gid,group,id,'
               'is_hidden,mode,mtime_val,nlink,stub,type,uid,'
               'container,container_path')
    # execute directory query
    query_resp = api.query_directory('ifs/data',
                                     query=True,
                                     directory_query=query,
                                     detail=details,
                                     max_depth=2,
                                     limit=10)
    print('Query results for /ifs/data: {}'.format(query_resp))
    # request remaining results in chunks
    while query_resp.resume:
        query_resp = api.query_directory('ifs/data',
                                         query=True,
                                         directory_query=query,
                                         resume=query_resp.resume)
        print('Resume query results: {}'.format(query_resp))

    print('Successful clean up')
Exemplo n.º 5
0
def main():
    # configure username and password
    configuration = isi_sdk.Configuration()
    configuration.username = test_constants.USERNAME
    configuration.password = test_constants.PASSWORD
    configuration.verify_ssl = test_constants.VERIFY_SSL
    configuration.host = test_constants.HOST

    # configure client connection
    api_client = isi_sdk.ApiClient(configuration)
    api = isi_sdk.NamespaceApi(api_client)

    # create a file
    contents = 'Lorem ipsum dolor sit amet, est eu nobis volutpat maluisset.'
    api.create_file('ifs/data/lorem',
                    x_isi_ifs_target_type='object',
                    file_contents=contents,
                    x_isi_ifs_access_control='600')
    # fetch file contents
    print('File contents: {}'.format(api.get_file_contents('ifs/data/lorem')))

    # copy file /ifs/data/lorem to /ifs/data/ipsum
    api.copy_file('ifs/data/ipsum',
                  x_isi_ifs_copy_source='/namespace/ifs/data/lorem')
    api.delete_file('ifs/data/ipsum')

    # clone file /ifs/data/lorem to /ifs/data/ipsum
    api.copy_file('ifs/data/ipsum',
                  x_isi_ifs_copy_source='/namespace/ifs/data/lorem',
                  clone=True,
                  overwrite=True)
    api.delete_file('ifs/data/ipsum')

    # move file /ifs/data/lorem to /ifs/data/ipsum
    api.move_file('ifs/data/lorem',
                  x_isi_ifs_set_location='/namespace/ifs/data/ipsum')

    # set extended attribute on a file
    print(api.get_file_metadata('ifs/data/ipsum', metadata=True))

    # create extended attribute
    meta = isi_sdk.NamespaceMetadata(action='update',
                                     attrs=[
                                         isi_sdk.NamespaceMetadataAttrs(
                                             name='test',
                                             value='42',
                                             op='update',
                                             namespace='user')
                                     ])
    api.set_file_metadata('ifs/data/ipsum', metadata=True, file_metadata=meta)

    # assert that extended attribute value was set
    for attr in api.get_file_metadata('ifs/data/ipsum', metadata=True).attrs:
        if attr.name == 'test':
            print('Extended attribute was set: {}'.format(attr.value == '42'))

    attrs = api.get_file_attributes_with_http_info('ifs/data/ipsum')
    # the third index of the response is the response headers
    print('File attribute headers: {}'.format(attrs[2]))
    api.delete_file('ifs/data/ipsum')

    # set access control list on a file
    api.create_file('ifs/data/lorem',
                    x_isi_ifs_target_type='object',
                    x_isi_ifs_access_control='private_read',
                    file_contents='Lorem ipsum dolor sit amet.')

    # get current file permissions
    acl = api.get_acl('ifs/data/lorem', acl=True)
    print('ACL mode is {}'.format(acl.mode))

    # modify file permissions
    acl_body = isi_sdk.NamespaceAcl(authoritative='mode', mode='0555')
    api.set_acl('ifs/data/lorem', acl=True, namespace_acl=acl_body)
    acl = api.get_acl('ifs/data/lorem', acl=True)
    print('New ACL mode is {}'.format(acl.mode))

    api.delete_file('ifs/data/lorem')
    print('Successful clean up')
    def query_sync_iq_stats(self):
        """
        Query SyncIQ jobs data
        start_time The time the job started in unix epoch seconds.
        end_time The time the job ended in unix epoch seconds.
        network_bytes_to_source The total number of bytes sent to the source by this job.
        network_bytes_to_target The total number of bytes sent to the target by this job.
        """
        # Configure HTTP basic authorization: basicAuth
        configuration = isi_sdk_8_1_1.Configuration()
        configuration.host = 'https://' + self._host + ':8080'
        configuration.username = self._username
        configuration.password = self._password
        configuration.verify_ssl = self._verify_ssl

        if self._verify_ssl is False:
            urllib3.disable_warnings()

        # create an instance of the API class
        api_instance = isi_sdk_8_1_1.SyncApi(
            isi_sdk_8_1_1.ApiClient(configuration))
        resume = ""
        sync_reports = []
        collection_time = int(round(time.time() * 1000))
        LOG.info("Collecting SyncIQ data from Isilon host [%s] " % self._host)
        #print >> sys.stdout, "Collecting SyncIQ data from Isilon host [%s] " % self._host
        while (resume != None):
            if resume != "":
                api_response = api_instance.get_sync_reports(resume=resume,
                                                             limit=MAX_RESULTS)
            else:
                api_response = api_instance.get_sync_reports(limit=MAX_RESULTS)
            resume = api_response.resume
            sync_reports.extend([{
                'host':
                self._host,
                'collection_time':
                collection_time,
                'sync_job_id':
                r.id,
                'policy_id':
                r.policy_id,
                'total_data_bytes':
                r.total_data_bytes,
                'job_id':
                r.job_id,
                'duration':
                r.duration,
                'start_time':
                r.start_time,
                'end_time':
                r.end_time,
                'state':
                r.state,
                'data_replicated':
                r.network_bytes_to_source,
                'data_to_be_replicated':
                r.network_bytes_to_target
            } for r in api_response.reports])

        return sync_reports
    def query_cloudpool_stats(self):
        """
        Query cloudpool data
        """
        # Configure HTTP basic authorization: basicAuth
        configuration = isi_sdk_8_1_1.Configuration()
        configuration.host = 'https://' + self._host + ':8080'
        configuration.username = self._username
        configuration.password = self._password
        configuration.verify_ssl = self._verify_ssl

        if self._verify_ssl is False:
            urllib3.disable_warnings()

        # create an instance of the API class
        api_instance = isi_sdk_8_1_1.CloudApi(
            isi_sdk_8_1_1.ApiClient(configuration))
        resume = ""
        cloud_jobs = []

        # Get all cloud jobs
        while (resume != None):
            if resume != "":
                api_response = api_instance.list_cloud_jobs(resume=resume,
                                                            limit=MAX_RESULTS)
            else:
                api_response = api_instance.list_cloud_jobs(limit=MAX_RESULTS)
            resume = api_response.resume

            cloud_jobs.extend([{'cloud_job_id':r.id,'host':self._host,'create_time':r.create_time, \
            'effective_state':r.effective_state,'cloud_job_type':r.type, 'completion_time':r.completion_time, \
            'files_total':r.files.total, 'files_total_canceled':r.files.total_canceled, 'files_total_failed':r.files.total_failed, \
            'files_total_pending':r.files.total_pending, 'files_total_processing':r.files.total_processing, 'files_total_succeeded':r.files.total_succeeded} for r in api_response.jobs])

        # For each jobs with files total != 0, fetch Cloud Job Files
        # cloud_job_id -> {cloud_job_files, create_time, effective_state, cloud_job_type, files_total}
        cloud_jobs_files = []
        # Get all cloud job files
        for cloud_job in cloud_jobs:
            resume = ""
            files = {}
            LOG.info(
                "Collecting cloudpool data from Isilon host [%s] and Job with id [%s]"
                % (self._host, str(cloud_job['cloud_job_id'])))
            #print >> sys.stdout, "Collecting cloudpool data from Isilon host [%s] and Job with id [%s]" % (self._host,str(cloud_job['cloud_job_id']))
            if cloud_job['files_total'] != 0:
                while (resume != None):
                    if resume != "":
                        api_response = api_instance.get_cloud_jobs_file(
                            str(cloud_job['cloud_job_id']),
                            resume=resume,
                            limit=MAX_RESULTS,
                            batch='true')
                    else:
                        api_response = api_instance.get_cloud_jobs_file(
                            str(cloud_job['cloud_job_id']),
                            limit=MAX_RESULTS,
                            batch='true')
                    resume = api_response.resume

                    for f in api_response.files:
                        if ast.literal_eval(f)['name'].encode(
                                FILE_ENCODING) != '<missing>':
                            files[ast.literal_eval(f)['name'].encode(
                                FILE_ENCODING)] = ast.literal_eval(f)['state']

                    #files.extend(['name':ast.literal_eval(f)['name'].encode(FILE_ENCODING) for f in api_response.files if ast.literal_eval(f)['name'].encode(FILE_ENCODING)!='<missing>'])
                # For each cloud job map entry -> request to FlasK REST service to get File size
                #response = requests.post('http://' + self._host + CLOUD_POOL_FILE_SIZE_END_POINT, data={'files':files})
                if len(files) != 0:
                    # Using ssh access on Clusters
                    file_with_sizes = []
                    LOG.info(
                        "Collecting cloudpool data from Isilon host [%s] and Job with id [%s] and files %s"
                        % (self._host, str(
                            cloud_job['cloud_job_id']), str(files.keys())))
                    file_with_sizes = self._remote_file_size(
                        self._host, self._username, self._password,
                        files.keys())
                    cloud_file_size = 0
                    for f in file_with_sizes:
                        cloud_job_file = dict()
                        cloud_job_file['host'] = cloud_job['host']
                        cloud_job_file['cloud_job_id'] = cloud_job[
                            'cloud_job_id']
                        cloud_job_file['cloud_job_type'] = cloud_job[
                            'cloud_job_type']
                        cloud_job_file['effective_state'] = cloud_job[
                            'effective_state']
                        cloud_job_file['create_time'] = cloud_job[
                            'create_time']
                        cloud_job_file['file_name'] = f['name']
                        cloud_job_file['file_state'] = files[f['name']]
                        cloud_job_file['file_sizes'] = f['size']
                        cloud_file_size = cloud_file_size + int(f['size'])
                        if cloud_job['completion_time'] is None:
                            cloud_job['completion_time'] = None
                            cloud_job_file['completion_time'] = None
                        else:
                            cloud_job_file['completion_time'] = cloud_job[
                                'completion_time']
                        cloud_jobs_files.append(cloud_job_file)
                    cloud_job['file_size'] = cloud_file_size
                else:
                    cloud_job_file = dict()
                    cloud_job_file['host'] = cloud_job['host']
                    cloud_job_file['cloud_job_id'] = cloud_job['cloud_job_id']
                    cloud_job_file['cloud_job_type'] = cloud_job[
                        'cloud_job_type']
                    cloud_job_file['effective_state'] = cloud_job[
                        'effective_state']
                    cloud_job_file['create_time'] = cloud_job['create_time']
                    cloud_job_file['file_name'] = 'No files tiered'
                    cloud_job_file['file_state'] = None
                    cloud_job_file['file_sizes'] = 0
                    if cloud_job['completion_time'] is None:
                        cloud_job['completion_time'] = None
                        cloud_job_file['completion_time'] = None
                    else:
                        cloud_job_file['completion_time'] = cloud_job[
                            'completion_time']
                    cloud_jobs_files.append(cloud_job_file)
                    cloud_job['file_size'] = 0
            else:
                cloud_job_file = dict()
                cloud_job_file['host'] = cloud_job['host']
                cloud_job_file['cloud_job_id'] = cloud_job['cloud_job_id']
                cloud_job_file['cloud_job_type'] = cloud_job['cloud_job_type']
                cloud_job_file['effective_state'] = cloud_job[
                    'effective_state']
                cloud_job_file['create_time'] = cloud_job['create_time']
                cloud_job_file['file_name'] = 'No files tiered'
                cloud_job_file['file_state'] = None
                cloud_job_file['file_sizes'] = 0
                if cloud_job['completion_time'] is None:
                    cloud_job['completion_time'] = None
                    cloud_job_file['completion_time'] = None
                else:
                    cloud_job_file['completion_time'] = cloud_job[
                        'completion_time']
                cloud_jobs_files.append(cloud_job_file)
                cloud_job['file_size'] = 0

        return cloud_jobs_files, cloud_jobs