def clean_up_environment(): # delete pool server.webapi('delete', 'pool/0?force=1') # delete period snap period_snap_request_nas = server.webapi( 'get', 'periodicsnap?page=1&type=nasshare') period_snap_nas_id = [ period_nas["id"] for period_nas in json.loads(period_snap_request_nas["text"]) ] period_snap_request_vol = server.webapi('get', 'periodicsnap?page=1&type=volume') period_snap_vol_id = [ period_nas["id"] for period_nas in json.loads(period_snap_request_vol["text"]) ] period_snap_id = period_snap_nas_id + period_snap_vol_id for id in period_snap_id: server.webapi('delete', 'periodicsnap/' + str(id) + '?clear=1')
def precondition(): # delete old bgasched clean_up_environment() # create pool pdId = find_pd_id() if len(pdId) >= 6: server.webapi('post', 'pool', { "name": "testBgasched1", "pds": pdId[:3], "raid_level": "raid5" }) server.webapi('post', 'pool', { "name": "testBgasched2", "pds": pdId[3:5], "raid_level": "raid1" }) server.webapi('post', 'pool', { "name": "testBgasched3", "pds": pdId[5:6], "raid_level": "raid0" }) return
def precondition(): pdId = find_pd_id() # create pool server.webapi('post', 'pool', { "name": "test_periodsnap_pool", "pds": pdId[:3], "raid_level": "raid5" }) # create volume snapshot for i in range(15): server.webapi('post', 'volume', { 'pool_id': 0, 'name': 'test_periodsnap' + str(i), 'capacity': '2GB' }) # create nasShare snapshot for i in range(10): server.webapi('post', 'nasshare', { 'pool_id': 0, 'name': 'test_periodsnap_' + str(i), 'capacity': '2GB' })
def clean_up_environment(): # delete pool server.webapi('delete', 'pool/0?force=1') # delete nas user server.webapi('delete', 'dsuser/test_protocol')
def precondition(): # disable domain, if enabled domain, to add user or group will happen error server.webapi('post', 'domain/leave') try: pdId = find_pd_id() # create pool server.webapi('post', 'pool', { "name": "test_acl_pool", "pds": pdId[:3], "raid_level": "raid5" }) # create nasShare for i in range(2): server.webapi('post', 'nasshare', { 'pool_id': 0, 'name': 'test_acl_nas_' + str(i), 'capacity': '2GB' }) # create nas user for i in range(10): server.webapi('post', 'dsuser', { "id": 'test_acl_' + str(i), "password": '******' }) # create nas group for i in range(10): server.webapi('post', 'dsgroup/editcancel') step1 = server.webapi('post', 'dsgroup/editbegin', { "page": 1, "page_size": 20 }) token = json.loads(step1["text"])[0]["token"] get_page_data = json.loads(step1["text"])[0]["page_data"] page_data = [[0, uid["uid"]] for uid in get_page_data] server.webapi('post', 'dsgroup/editnext', { "page": 1, "page_size": 20, "token": token, "page_data": page_data }) server.webapi( 'post', 'dsgroup/editsave', { "id": 'test_acl_group_' + str(i), "token": token, "page_data": page_data }) server.webapi('post', 'dsgroup/editcancel') except: tolog("precondition is failed\r\n")
def precondition_1(): try: # stop all migrate vol_request = server.webapi('get', 'volume?page=1&page_size=100') if isinstance(vol_request, dict): for vol in json.loads(vol_request["text"]): if 'adv_role' in vol.keys() and vol['adv_role'] == 'Source': server.webapi('post', 'migrate/' + str(vol['id']) + '/stop', {"location": 1}) pdId = find_pd_id() if len(pdId) > 0: # create pool server.webapi( 'post', 'pool', { "name": "T_migrate_0", "pds": pdId[:3], "raid_level": "raid5", 'ctrl_id': 1 }) server.webapi( 'post', 'pool', { "name": "T_migrate_1", "pds": pdId[3:6], "raid_level": "raid5", 'ctrl_id': 1 }) server.webapi( 'post', 'pool', { "name": "T_migrate_2", "pds": [pdId[6]], "raid_level": "raid0", 'ctrl_id': 1 }) server.webapi( 'post', 'pool', { "name": "T_migrate_3", "pds": [15], "raid_level": "raid0", 'ctrl_id': 1 }) # create source volume for i in range(10): if i <= 1: server.webapi( 'post', 'volume', { 'pool_id': 2, 'name': 'T_migrate_vol_' + str(i), 'capacity': '900GB', 'thin_prov': i }) elif i == 2 or i == 3: server.webapi( 'post', 'volume', { 'pool_id': 0, 'name': 'T_migrate_vol_' + str(i), 'capacity': '1GB', 'block': '64kb', 'sector': '4kb', 'compress': 'gzip', 'sync': 'disabled', 'logbias': 'throughput', 'thin_prov': 0 }) else: server.webapi( 'post', 'volume', { 'pool_id': 1, 'name': 'T_migrate_vol_' + str(i), 'capacity': '2GB', 'thin_prov': 0 }) server.webapi('post', 'volume/' + str(i) + '/export') server.webapi('post', 'snapshot', { "name": "test_migrate1", "type": 'volume', "source_id": 2 }) return except (): tolog('precondition is failed\r\n')
def precondition(): # create pool pdId = find_pd_id() server.webapi('post', 'pool', { "name": "test_ACL", "pds": pdId[:3], "raid_level": "raid5" }) # create nas share server.webapi('post', 'nasshare', { "pool_id": 0, "name": 'test_ACL_nasShare', "capacity": "10GB" }) # create DSUser for i in range(20): server.webapi('post', 'dsuser', { "id": 'test_ACL_' + str(i), "password": '******' }) # create group server.webapi('post', 'dsgroup/addgrpcancel') step1 = server.webapi('post', 'dsgroup/addgrpbegin', { "id": 'test_ACL_group', "page": 1, "page_size": 20 }) token = str(json.loads(step1["text"])[0]["token"]) step2 = server.webapi('post', 'dsgroup/addgrpnext', { "token": token, "page_data": [[1, 1001]], "page": 1, "page_size": 20 }) step3 = server.webapi('post', 'dsgroup/addgrpsave', { "id": 'test_ACL_group', "token": token, "page_data": [[1, 1001]] })
def post_dedicated_spare_revertible_0(): FailFlag = False tolog('add dedicated spare drive that is not revertible by api\r\n') # test data pdId2, pdId4 = find_pd_id() settings = { "dedicated": 'Dedicated', "revertible": 0, "pd_id": pdId4[-1], "pool_list": [0, 1] } # precondition server.webapi( 'post', 'pool', { 'name': 'test_spare1', 'raid_level': 'RAID5', 'pds': [pdId2[0], pdId2[1], pdId2[2]] }) server.webapi( 'post', 'pool', { 'name': 'test_spare2', 'raid_level': 'RAID5', 'pds': [pdId4[0], pdId4[1], pdId4[2]] }) tolog('Expect: ' + json.dumps(settings) + '\r\n') result = server.webapi('post', 'spare', settings) if isinstance(result, str): FailFlag = True tolog('Fail: ' + result + '\r\n') else: spareResponse = server.webapi('get', 'spare') if isinstance(spareResponse, dict): spareInfo = json.loads(spareResponse["text"]) tolog('Actual: ' + json.dumps(spareInfo[0]) + '\r\n') for value in settings.values(): if str(value) not in json.dumps(spareInfo[0]): FailFlag = True tolog("Fail: please check out parameter " + str(value) + '\r\n') server.webapiurl('post', 'phydrv', str(pdId2[0]) + '/offline') time.sleep(3) rbResponse = server.webapi('get', 'rebuild') if isinstance(rbResponse, str): FailFlag = True tolog('Fail: ' + rbResponse + '\r\n') if FailFlag: tolog(Fail) else: tolog(Pass)
def problem_body_add_group_step3(): # precondition: edit cancel server.webapi('post', 'dsgroup/editcancel') # step1 step1 = server.webapi('post', 'dsgroup/editbegin', { "page": 1, "page_size": 500 }) token = json.loads(step1["text"])[0]["token"] # step2 step2 = server.webapi('post', 'dsgroup/editbegin', { "token": token, "page_data": [[1, 1001]] }) tolog('Expect: missing body should return error\r\n') result1 = server.webapi('post', 'dsgroup/editsave') if isinstance(result1, dict): result_assert.FailFlag = True tolog('Fail: that return 200 when body is missing\r\n') server.webapi('post', 'dsgroup/editcancel') else: server.webapi('post', 'dsgroup/editcancel') tolog('Actual: ' + result1 + '\r\n') # step1 step1 = server.webapi('post', 'dsgroup/editbegin', { "page": 1, "page_size": 500 }) token = json.loads(step1["text"])[0]["token"] # step2 step2 = server.webapi('post', 'dsgroup/editbegin', { "token": token, "page_data": [[1, 1001]] }) tolog('Expect: body is empty that should return error\r\n') result2 = server.webapi('post', 'dsgroup/editsave', {}) if isinstance(result2, dict): result_assert.FailFlag = True tolog('Fail: that return 200 when body is empty\r\n') server.webapi('post', 'dsgroup/editcancel') else: server.webapi('post', 'dsgroup/editcancel') tolog('Actual: ' + result2 + '\r\n') result_assert.result_assert()
def addVolume(): FailFlag = False tolog('add volume by api use all of settings \r\n') # test data plId = findPlId() settingsList = [[ 'n', '12', 'Name_11', '1_name', '1' * 31, '2' * 32, 'a', '1', 'b' * 31, 'T' * 32 ], [plId[0] for i in range(10)], [ '1GB', '2GB', '3GB', '4GB', '5GB', '6GB', '9GB', '10GB', '1TB', '2TB' ], [ '512b', '1kb', '2kb', '4kb', '8kb', '16kb', '32kb', '64kb', '128kb', '512b' ], [ '512b', '1kb', '2kb', '4kb', '512b', '1kb', '2kb', '4kb', '512b', '1kb' ], [ 'on', 'off', 'lzjb', 'gzip', 'gzip-1', 'gzip-2', 'gzip-8', 'gzip-9', 'zle', 'lz4' ], [ 'standard', 'always', 'disabled', 'standard', 'always', 'disabled', 'standard', 'always', 'disabled', 'always' ], [0, 1, 0, 1, 0, 1, 0, 1, 0, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [0, 1, 0, 1, 0, 1, 0, 1, 0, 1]] checkpoint = [[ '512 Bytes', '1 KB', '2 KB', '4 KB', '512 Bytes', '1 KB', '2 KB', '4 KB', '512 Bytes', '1 KB' ], [ 'Disabled', 'Enabled', 'Disabled', 'Enabled', 'Disabled', 'Enabled', 'Disabled', 'Enabled', 'Disabled', 'Enabled' ], [ '512 Bytes', '1 KB', '2 KB', '4 KB', '8 KB', '16 KB', '32 KB', '64 KB', '128 KB', '512 Bytes' ]] for i in range(len(settingsList[5])): settings = { "name": settingsList[0][i], "pool_id": settingsList[1][i], "capacity": settingsList[2][i], "block": settingsList[3][i], "sector": settingsList[4][i], "compress": settingsList[5][i], "sync": settingsList[6][i], "thin_prov": settingsList[7][i], "quantity": settingsList[8][i] } tolog('Expect: ' + json.dumps(settings) + '\r\n') result = server.webapi('post', 'volume', settings) if isinstance(result, str): FailFlag = True tolog('Fail: ' + result + '\r\n') check = server.webapi('get', 'volume') checkResult = json.loads(check["text"]) for cr in checkResult: if cr["id"] == i: tolog('Actual: ' + json.dumps(cr) + '\r\n') if settings["name"] != cr["name"]: FailFlag = True tolog('Fail: please check out parameter name') if settings["pool_id"] != cr["pool_id"]: FailFlag = True tolog('Fail: please check out parameter pool_id') if settings["sync"] != cr["sync"]: FailFlag = True tolog('Fail: please check out parameter sync') if cr["sector"] != checkpoint[0][i]: FailFlag = True tolog('Fail: please check out parameter sector ') if cr["thin_prov"] != checkpoint[1][i]: FailFlag = True tolog('Fail: please check out parameter thin_prov') if cr["block"] != checkpoint[2][i]: FailFlag = True tolog('Fail: please check out parameter block') tolog('add volume by api use must settings \r\n') # test data mustParameters = { "name": 'Must_parameters', "capacity": '1GB', "pool_id": 0 } defaultValues = { "block": '8 KB', "sector": '512 Bytes', "compress": 'off', "sync": 'always' } tolog('Expect: ' + json.dumps(dict(mustParameters.items() + defaultValues.items())) + '\r\n') mustsettings = server.webapi('post', 'volume', mustParameters) if isinstance(mustsettings, str): FailFlag = True tolog('Fail: ' + mustsettings + '\r\n') check2 = server.webapi( 'get', "volume?page=1&page_size=50&search=name+like+'%Must_parameters%'") checkResult2 = json.loads(check2["text"])[0] tolog('Actual: ' + json.dumps(checkResult2) + '\r\n') if isinstance(check2, dict): if checkResult2["name"] != mustParameters["name"]: FailFlag = True tolog('Fail: please check out parameter name \r\n') if checkResult2["pool_id"] != mustParameters["pool_id"]: FailFlag = True tolog('Fail: please check out parameter pool_id \r\n') for key in defaultValues: if key != 'compress': if defaultValues[key] != checkResult2[key]: FailFlag = True tolog('Fail: please check out parameter ' + key + '\r\n') if FailFlag: tolog(Fail) else: tolog(Pass)
def modNASShare(c): Failflag = False tolog('Modify NASShare \r\n') # precondition server.webapi('post', 'nasshare/0/unmount') time.sleep(3) # test data parameters = { "name": ['test_modify', '1', 'N' * 31], "sync": ['standard', 'always', 'disabled'], "compress": ['lz4', 'gzip', 'zle'], "logbias": ['throughput', 'latency', 'throughput'], "thinprov": ['enable', 'disable', 'enable'] } thinprov = ['Enabled', 'Disabled', 'Enabled'] for i in range(3): settings = 'name=' + parameters["name"][i] + ',' + \ 'sync=' + parameters["sync"][i] + ',' + \ 'compress=' + parameters["compress"][i] + ',' + \ 'logbias=' + parameters["logbias"][i] + ',' + \ 'thinprov=' + parameters["thinprov"][i] tolog('Expect: The NASShare 0 can be modified \r\n') result = SendCmd(c, 'nasshare -a mod -i 0 -s "' + settings + '"') if 'Error (' in result: Failflag = True tolog('Fail: To modify NASShare 0 is failed') else: tolog('Actual: The NASShare 0 is modified \r\n') checkResult = SendCmd(c, 'nasshare -v -i 0') if parameters["name"][i] not in checkResult: Failflag = True tolog('Fail: please checkout parameter ' + parameters["name"][i] + '\r\n') if parameters["sync"][i] not in checkResult: Failflag = True tolog('Fail: please checkout parameter ' + parameters["sync"][i] + '\r\n') if parameters["compress"][i] not in checkResult: Failflag = True tolog('Fail: please checkout parameter ' + parameters["compress"][i] + '\r\n') if parameters["logbias"][i] not in checkResult: Failflag = True tolog('Fail: please checkout parameter ' + parameters["logbias"][i] + '\r\n') if thinprov[i] not in checkResult: Failflag = True tolog('Fail: please checkout parameter ' + parameters["thinprov"][i] + '\r\n') else: tolog('No NASShare can be used') if Failflag: tolog(Fail) else: tolog(Pass) return Failflag
def precondition(): pdId = find_pd_id() # to add target fabric_request = server.webapi('get', 'fcinitiator') try: fabric_info = json.loads(fabric_request["text"]) for fabric in fabric_info: if len(fabric["initiators"]) != 0: temp = fabric["initiators"] for t in temp: if t['wwnn'] == '25-00-00-01-55-59-ea-9d': server.webapi('post', 'target', {"name": '25-00-00-01-55-59-ea-9d'}) except: tolog('precondition is failed\n') ssd_id = [] # create pool server.webapi('post', 'pool', { "name": "test_phy_0", "pds": [pdId[0]], "raid_level": "RAID0" }) server.webapi('post', 'pool', { "name": "test_phy_1", "pds": pdId[1:4], "raid_level": "raid5" }) # create spare server.webapi('post', 'spare', { "pd_id": pdId[4], "dedicated": 'global', "revertible": 0 }) server.webapi( 'post', 'spare', { "pd_id": pdId[5], "dedicated": 'dedicated', "revertible": 0, "pool_list": [0] }) # create cache pd_request = server.webapi('get', 'phydrv') pd_info = json.loads(pd_request["text"]) for info in pd_info: if info["media_type"] == 'SSD': ssd_id.append(info["id"]) server.webapi('post', 'rcache/attach', {"pd_list": [ssd_id[0]]}) server.webapi('post', 'wcache/attach', { "pd_list": ssd_id[1:], "pool_list": [] })
def addDailyBgasched(): Failflag = False dayTypeParameters = { "status": 1, "start_time": 1439, "interval": 255, "day_start": 30, "month_start": 12, "year_start": 2037, "range_end": 2, "recurrence_count": 255 } for i in range(3): if bga_type[i] == 'rc': parameters = dict( dayTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 1, 'rc_pools': [0], "rc_fix": 0, "rc_pause": 0 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict( dayTypeParameters.items() + { 'bga_type': bga_type[i], "recurrence_type": "Daily", 'rc_pools': [0], "rc_fix": 0, "rc_pause": 0 }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'rc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() else: parameters = dict(dayTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 1 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict(dayTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': "Daily" }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'sc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) elif r["bga_type"] == 'brc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() return Failflag
def BgascheduleApiDelete(): Failflag = False ResponseInfo = server.webapi('get', 'bgaschedule') bgaInfo = json.loads(ResponseInfo['text']) tolog('To delete bgaschedule by api \r\n') if len(bgaInfo) > 1: for bga in bgaInfo: if bga['id'] == 'brc': # delete brc type tolog('Expect: delete brc type \r\n') server.webapiurl('delete', 'bgaschedule', 'brc') check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if 'brc' in r.values(): Failflag = True tolog('Fail: Did not delete brc type') else: tolog('Actual: brc type deletes successfully \r\n') break elif bga['id'] == 'rc_1': # delete rc type tolog('Expect: delete rc type\r\n') server.webapiurl('delete', 'bgaschedule', 'rc_1') check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if 'rc' in r.values(): Failflag = True tolog('Fail: Did not delete rc type') else: tolog('Actual: rc type deletes successfully\r\n') break elif bga['id'] == 'rc_2': # delete rc type tolog('Expect: delete rc type\r\n') server.webapiurl('delete', 'bgaschedule', 'rc_2') check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if 'rc' in r.values(): Failflag = True tolog('Fail: Did not delete rc type') else: tolog('Actual: rc type deletes successfully\r\n') break elif bga['id'] == 'sc': # delete sc type tolog('Expect: delete sc type\r\n') server.webapiurl('delete', 'bgaschedule', 'sc') check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if 'sc' in r.values(): Failflag = True tolog('Fail: Did not delete sc type') else: tolog('Actual: sc type deletes successfully\r\n') break if Failflag: tolog(Fail) else: tolog(Pass)
def BgascheduleApiPut(): Failflag = False tolog("To modify bgaschedule by api") # precondition: create different types of bgaschedule for bga_type in ['rc', 'brc', 'sc']: if bga_type == 'rc': server.webapi('post', 'bgaschedule', parameters={ 'bga_type': bga_type, 'status': 0, 'start_time': 0, 'recurrence_type': 1, "day_start": 1, "month_start": 1, "year_start": 1970, "range_end": 1, 'rc_fix': 0, 'rc_pause': 0, 'rc_pools': [0] }) else: server.webapi('post', 'bgaschedule', parameters={ 'bga_type': bga_type, 'status': 0, 'start_time': 0, 'recurrence_type': 1, "day_start": 1, "month_start": 1, "year_start": 1970, "range_end": 1 }) # testing data bga_id = ['rc_1', 'brc', 'sc'] dailySettingList = { "status": [0, 1, 1], "start_time": [0, 512, 1439], "recurrence_type": [1, 1, 1], "interval": [1, 125, 255], "day_start": [1, 16, 31], "month_start": [1, 6, 12], "year_start": [1970, 2017, 2037], "range_end": [1, 1, 1] } weeklySettingList = { "status": [0, 0, 1, 1, 0, 1, 1], "start_time": [0, 0, 512, 512, 1439, 1439, 513], "recurrence_type": [2, 2, 2, 2, 2, 2, 2], "interval": [1, 1, 25, 25, 52, 52, 2], "day_mask": [32, 68, 41, 85, 91, 111, 127], "day_start": [1, 1, 16, 16, 31, 31, 15], "month_start": [1, 1, 6, 6, 12, 12, 7], "year_start": [1970, 1970, 2017, 2017, 2037, 2037, 2020], "range_end": [1, 1, 1, 1, 1, 1, 1] } monthlySettingListPart = { "status": [0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1], "start_time": [0, 0, 512, 512, 1439, 1439, 513, 512, 1439, 1439, 513, 513], "month_mask": [4095, 4079, 3567, 3517, 3435, 2922, 2730, 2356, 2338, 2314, 2112, 32], "day_start": [1, 1, 16, 16, 25, 25, 15, 16, 31, 31, 15, 15], "month_start": [1, 1, 6, 6, 12, 12, 7, 6, 12, 12, 7, 8], "year_start": [ 1970, 1970, 2017, 2017, 2037, 2037, 2020, 2017, 2037, 2037, 2020, 2019 ], "range_end": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "recurrence_type": [3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] } # To modify monthly bgaschedule tolog('To modify monthly bgaschedule \r\n') for id in bga_id: if id == 'rc_1': monthlySettingList = dict( monthlySettingListPart.items() + { "day_pattern": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], "day_of_month": [1, 1, 16, 16, 31, 31, 15, 16, 28, 28, 15, 15], "rc_fix": [0, 0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1], "rc_pause": [1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1], "rc_pools": [[1], [1], [0], [0], [1], [1], [0], [0], [1], [1], [0], [0]] }.items()) expectResult = {} for i in range(12): for k in monthlySettingList: expectResult[k] = monthlySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'rc': tolog('Actual:' + json.dumps(r) + '\r\n') actualResult = r for key in expectResult: if expectResult[key] != 3: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) elif id == 'brc': monthlySettingList = dict( monthlySettingListPart.items() + { "day_pattern": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "day_of_week": [0, 0, 1, 1, 2, 2, 3, 4, 4, 5, 6, 6] }.items()) expectResult = {} for i in range(12): for k in monthlySettingList: expectResult[k] = monthlySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'brc': tolog('Actual:' + json.dumps(r) + '\r\n') actualResult = r for key in expectResult: if expectResult[key] != 3: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) elif id == 'sc': monthlySettingList = dict( monthlySettingListPart.items() + { "day_pattern": [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], "day_of_week": [0, 0, 1, 1, 2, 2, 3, 4, 4, 5, 6, 6] }.items()) expectResult = {} for i in range(12): for k in monthlySettingList: expectResult[k] = monthlySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'sc': tolog('Actual:' + json.dumps(r) + '\r\n') actualResult = r for key in expectResult: if expectResult[key] != 3: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) range_endList = [2, 3] for id in bga_id: for range_end in range_endList: if id == 'rc_1' and range_end == 2: for recurrence_count in [0, 255]: tolog('Expect: ' + json.dumps({ "range_end": 2, "recurrence_count": recurrence_count }) + '\r\n') server.webapi('put', 'bgaschedule/' + id, { "range_end": 2, "recurrence_count": recurrence_count }) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["id"] == "rc_1": actualResult = r tolog("Actual: " + json.dumps(actualResult) + '\r\n') if r["range_end"] != 2 and r[ "recurrence_count"] != 0: Failflag = True tolog( 'Fail: parameters "range_end" !=2 and "recurrence_count" != 0' ) elif id == 'brc' and range_end == 3: untilSetting = [[1, 31], [1, 12], [1970, 2037]] for i in range(2): tolog('Expect: ' + json.dumps({ "range_end": 3, "day_end": untilSetting[0][i], "month_end": untilSetting[1][i], "year_end": untilSetting[2][i] }) + '\r\n') server.webapi( 'put', 'bgaschedule/' + id, { "range_end": 3, "day_end": untilSetting[0][i], "month_end": untilSetting[1][i], "year_end": untilSetting[2][i] }) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["id"] == "brc": actualResult = r tolog("Actual: " + json.dumps(actualResult) + '\r\n') if r["range_end"] != 3 and r[ "year_end"] != untilSetting[2][i]: Failflag = True tolog( 'Fail: parameters "range_end" !=3 and "year_end" != ' + str(untilSetting[2][i])) elif id == 'sc' and range_end == 3: untilSetting = [[1, 31], [1, 12], [1970, 2037]] for i in range(2): tolog('Expect: ' + json.dumps({ "range_end": 3, "day_end": untilSetting[0][i], "month_end": untilSetting[1][i], "year_end": untilSetting[2][i] }) + '\r\n') server.webapi( 'put', 'bgaschedule/' + id, { "range_end": 3, "day_end": untilSetting[0][i], "month_end": untilSetting[1][i], "year_end": untilSetting[2][i] }) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["id"] == "sc": actualResult = r tolog("Actual: " + json.dumps(actualResult) + '\r\n') if r["range_end"] != 3 and r[ "month_end"] != untilSetting[1][i]: Failflag = True tolog( 'Fail: parameters "range_end" !=3 and "month_end" != ' + str(untilSetting[1][i])) # To modify weekly bgaschedule tolog('To modify weekly bgaschedule \r\n') for id in bga_id: if id == 'rc_1': weeklySetting = dict(weeklySettingList.items() + { "rc_fix": [0, 0, 1, 1, 0, 1, 1], "rc_pause": [1, 1, 0, 0, 0, 1, 1] }.items()) for i in range(7): expectResult = {} for k in weeklySetting: expectResult[k] = weeklySetting[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if r["id"] == 'rc_1': actualResult = r tolog('Actual: ' + json.dumps(actualResult) + '\r\n') for key in expectResult: if expectResult[key] != 2: if expectResult[key] != r[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(r[key])) elif id == 'brc': for i in range(7): expectResult = {} for k in weeklySettingList: expectResult[k] = weeklySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if r["id"] == 'brc': actualResult = r tolog('Actual: ' + json.dumps(actualResult) + '\r\n') for key in expectResult: if expectResult[key] != 2: if expectResult[key] != r[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(r[key])) elif id == 'sc': for i in range(7): expectResult = {} for k in weeklySettingList: expectResult[k] = weeklySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if r["id"] == 'sc': actualResult = r tolog('Actual: ' + json.dumps(actualResult) + '\r\n') for key in expectResult: if expectResult[key] != 2: if expectResult[key] != r[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(r[key])) # To modify daily bgaschedule tolog('To modify daily bgaschedule \r\n') for id in bga_id: if id == 'rc_1': dailySetting = dict(dailySettingList.items() + { "rc_fix": [0, 0, 1], "rc_pause": [1, 0, 1] }.items()) for i in range(3): expectResult = {} for k in dailySetting: expectResult[k] = dailySetting[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if r["id"] == 'rc_1': actualResult = r tolog('Actual: ' + json.dumps(actualResult) + '\r\n') for key in expectResult: if expectResult[key] != 1: if expectResult[key] != r[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(r[key])) elif id == 'brc': for i in range(3): expectResult = {} for k in dailySettingList: expectResult[k] = dailySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if r["id"] == 'brc': actualResult = r tolog('Actual: ' + json.dumps(actualResult) + '\r\n') for key in expectResult: if expectResult[key] != 1: if expectResult[key] != r[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(r[key])) elif id == 'sc': for i in range(3): expectResult = {} for k in dailySettingList: expectResult[k] = dailySettingList[k][i] tolog('Expect: ' + json.dumps(expectResult) + '\r\n') server.webapi('put', 'bgaschedule/' + id, expectResult) check = server.webapi('get', 'bgaschedule') result = json.loads(check["text"]) for r in result: if r["id"] == 'sc': actualResult = r tolog('Actual: ' + json.dumps(actualResult) + '\r\n') for key in expectResult: if expectResult[key] != 1: if expectResult[key] != r[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(r[key])) if Failflag: tolog(Fail) else: tolog(Pass)
def BgascheduleApiPost(): Failflag = False FailFlagList = [] tolog("To add bgaschedule by api\r\n") # precondition def deleteBgaschedule(): ResponseInfo = server.webapi('get', 'bgaschedule') bgaInfo = json.loads(ResponseInfo['text']) if len(bgaInfo) > 1: for bga in bgaInfo: if bga['id'] == 'brc': server.webapiurl('delete', 'bgaschedule', 'brc') elif bga['id'] == 'rc_1': server.webapiurl('delete', 'bgaschedule', 'rc_1') elif bga['id'] == 'rc_2': server.webapiurl('delete', 'bgaschedule', 'rc_2') elif bga['id'] == 'sc': server.webapiurl('delete', 'bgaschedule', 'sc') deleteBgaschedule() pdId = findPlId() if len(pdId) >= 5: server.webapi( 'post', 'pool', { 'name': 'testBgaschedApi1', 'sector': '512B', 'raid_level': 'RAID1', 'ctrl_id': 1, 'force_sync': 0, 'pds': [pdId[0], pdId[1]] }) server.webapi( 'post', 'pool', { 'name': 'testBgaschedApi2', 'sector': '512B', 'raid_level': 'RAID1', 'ctrl_id': 1, 'force_sync': 0, 'pds': [pdId[2], pdId[3]] }) server.webapi( 'post', 'pool', { 'name': 'testBgaschedApi3', 'sector': '512B', 'raid_level': 'RAID0', 'ctrl_id': 1, 'force_sync': 0, 'pds': [pdId[4]] }) bga_type = ['rc', 'brc', 'sc'] # add bgasched of daily type tolog('add bgasched of daily type\r\n') def addDailyBgasched(): Failflag = False dayTypeParameters = { "status": 1, "start_time": 1439, "interval": 255, "day_start": 30, "month_start": 12, "year_start": 2037, "range_end": 2, "recurrence_count": 255 } for i in range(3): if bga_type[i] == 'rc': parameters = dict( dayTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 1, 'rc_pools': [0], "rc_fix": 0, "rc_pause": 0 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict( dayTypeParameters.items() + { 'bga_type': bga_type[i], "recurrence_type": "Daily", 'rc_pools': [0], "rc_fix": 0, "rc_pause": 0 }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'rc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() else: parameters = dict(dayTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 1 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict(dayTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': "Daily" }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'sc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) elif r["bga_type"] == 'brc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() return Failflag FailFlagList.append(addDailyBgasched()) # add bgasched of weekly type tolog('add bgasched of weekly type \r\n') def addWeeklyBgasched(): Failflag = False weeklyTypeParameters = { "status": 0, "start_time": 0, "interval": 1, "day_mask": 127, "day_start": 1, "month_start": 1, "year_start": 1971, "range_end": 3, "day_end": 1, "month_end": 1, "year_end": 2018 } for i in range(3): if bga_type[i] == 'rc': parameters = dict( weeklyTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 2, 'rc_pools': [0], "rc_fix": 1, "rc_pause": 1 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict( weeklyTypeParameters.items() + { 'bga_type': bga_type[i], "recurrence_type": "Weekly", 'rc_pools': [0], "rc_fix": 1, "rc_pause": 1 }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'rc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() else: parameters = dict(weeklyTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 2 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict(weeklyTypeParameters.items() + { 'bga_type': bga_type[i], "recurrence_type": "Weekly" }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'sc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) elif r["bga_type"] == 'brc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() return Failflag FailFlagList.append(addWeeklyBgasched()) # add bgasched of monthly type tolog('add bgasched of monthly type\r\n') def addmonthlyBgasched(): Failflag = False monthlyTypeParameters = { "status": 1, "start_time": 0, "day_pattern": 0, "day_of_month": 5, "month_mask": 4095, "day_start": 28, "month_start": 9, "year_start": 2018, "range_end": 3, "day_end": 1, "month_end": 1, "year_end": 2019 } for i in range(3): if bga_type[i] == 'rc': parameters = dict( monthlyTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 3, 'rc_pools': [0], "rc_fix": 1, "rc_pause": 1 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict( monthlyTypeParameters.items() + { 'bga_type': bga_type[i], "recurrence_type": "Monthly", 'rc_pools': [0], "rc_fix": 1, "rc_pause": 1 }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'rc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() else: parameters = dict(monthlyTypeParameters.items() + { 'bga_type': bga_type[i], 'recurrence_type': 3 }.items()) server.webapi('post', 'bgaschedule', parameters) tolog('Expect:' + json.dumps(parameters) + '\r\n') expectResult = dict(monthlyTypeParameters.items() + { 'bga_type': bga_type[i], "recurrence_type": "Monthly" }.items()) check = server.webapi('get', 'bgaschedule') result = json.loads(check['text']) for r in result: if r["bga_type"] == 'sc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult.keys(): if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) elif r["bga_type"] == 'brc': actualResult = r tolog('Actual:' + json.dumps(r) + '\r\n') for key in expectResult: if expectResult[key] != actualResult[key]: Failflag = True tolog('Fail: parameters ' + str(expectResult[key]) + '!=' + str(actualResult[key])) deleteBgaschedule() return Failflag FailFlagList.append(addmonthlyBgasched()) for Flag in FailFlagList: if Flag == True: Failflag = True if Failflag: tolog(Fail) else: tolog(Pass)
def get_spare(): FailFlag = False # test data pdId2, pdId4 = find_pd_id() # precondition server.webapi( 'post', 'pool', { 'name': 'test_spare1', 'raid_level': 'RAID5', 'pds': [pdId2[0], pdId2[1], pdId2[2]] }) key = ["dedicated", "revertible", "pd_id", "pool_list"] values = [['Global', 0, pdId2[-1], []], ['Global', 1, pdId4[0], []], ['Dedicated', 0, pdId4[1], [0]], ['Dedicated', 1, pdId4[2], [0]]] for i in range(len(values)): tolog('Expect: ' + json.dumps(dict(zip(key, values[i]))) + '\r\n') pre = server.webapi('post', 'spare', dict(zip(key, values[i]))) if isinstance(pre, str): tolog('please check out precondition: ' + pre + '\r\n') # list all of spare tolog('list all of spare drive\r\n') result = server.webapi('get', 'spare') if isinstance(result, str): FailFlag = True tolog('Fail: ' + result + '\r\n') else: checkResult = json.loads(result["text"]) if len(checkResult) < 4: FailFlag = True tolog('Fail: please check out spare drive quantity\r\n') tolog('Actual:\r\n' + str(checkResult).replace('{', '').replace( 'u', '').replace('}', '\r\n') + '\r\n') # list specific spare drive tolog('list specific spare drive\r\n') spareInfo = json.loads(result["text"]) for spare in spareInfo: tolog('Expect: list spare drive ' + str(spare["id"]) + '\r\n') one = server.webapiurl('get', 'spare', str(spare["id"])) if isinstance(one, str): FailFlag = True tolog('Fail: ' + one + '\r\n') else: oneCheck = json.loads(one["text"])[0] tolog('Actual: ' + json.dumps(oneCheck) + '\r\n') if FailFlag: tolog(Fail) else: tolog(Pass)
def precondition(): try: clean_up_environment() pdId = find_pd_id() # create pool server.webapi('post', 'pool', { "name": "T_lunMap_P0", "pds": pdId[6:9], "raid_level": "raid5" }) # create volume and export it for i in range(3): server.webapi('post', 'volume', { 'pool_id': 0, 'name': 'T_lunMap_V' + str(i), 'capacity': '100GB' }) server.webapi('post', 'volume/' + str(i) + '/export') # create snapshot and export it for i in range(3): server.webapi('post', 'snapshot', { "name": "T_lunMap_SS" + str(i), "type": 'volume', "source_id": 2 }) server.webapi('post', 'snapshot/' + str(i) + '/export') # create clone and export it for i in range(3): server.webapi('post', 'clone', { "name": "T_lunMap_C" + str(i), "source_id": 2 }) server.webapi('post', 'clone/' + str(i) + '/export') # create initiator for i in range(4): server.webapi('post', 'initiator', { 'type': 'iSCSI', 'name': 'T.com' + str(i) }) server.webapi('post', 'initiator', { 'type': 'fc', 'name': '00-11-22-33-00-00-11-1' + str(i) }) except: tolog("precondition is failed\r\n") return
def find_pd_id(physical_capacity=None): pd_id = [] try: pd_request = server.webapi('get', 'phydrv') if isinstance(pd_request, dict): pd_info = json.loads(pd_request["text"]) str_pd_info = str(pd_info) # delete pool if 'Pool' in str_pd_info: vol_request = server.webapi('get', 'volume?page=1&page_size=100') if isinstance(vol_request, dict): for vol in json.loads(vol_request["text"]): if 'adv_role' in vol.keys( ) and vol['adv_role'] == 'Source' and vol[ 'adv_type'] == 'Migration': server.webapi( 'post', 'migrate/' + str(vol['id']) + '/stop', {"location": 1}) if vol['adv_type'] == 'Replication': replica_request = server.webapi('get', 'replica') if isinstance(replica_request, dict): for replica in json.loads( replica_request["text"]): server.webapi( 'post', 'replicaloc/' + str(replica["src_id"]) + '/stop') pool_response = server.webapi('get', 'pool') if isinstance(pool_response, dict): pool_info = json.loads(pool_response["text"]) if len(pool_info) != 0: for pool in pool_info: server.webapiurl('delete', 'pool', str(pool['id']) + '?force=1') else: tolog(str(pool_response)) # delete spare if 'Spare' in str_pd_info: spare_response = server.webapi('get', 'spare') if isinstance(spare_response, dict): spare_info = json.loads(spare_response["text"]) for spare in spare_info: server.webapiurl('delete', 'spare', str(spare["id"])) else: tolog(str(spare_response)) # delete read cache if 'ReadCache' in str_pd_info: read_ache_response = server.webapi('get', 'rcache') if isinstance(read_ache_response, dict): cache_info = json.loads( read_ache_response["text"])[0]["pd_list"][0] sdd_id = cache_info["pd_id"] server.webapi('post', 'rcache/detach', {"pd_list": [sdd_id]}) else: tolog(str(read_ache_response)) # delete write cache if 'WriteCache' in str_pd_info: server.webapi('post', 'wcache/detach', {"id": 'detach'}) else: tolog(str(pd_request)) # find pd id pdResponse = server.webapi('get', 'phydrv') if isinstance(pdResponse, dict): pdInfo = json.loads(pdResponse["text"]) if physical_capacity == None: for pd in pdInfo: if pd["cfg_status"] == 'Unconfigured' and pd[ "media_type"] == 'HDD': pd_id.append(pd["id"]) elif physical_capacity == '2TB': for pd in pdInfo: if pd["cfg_status"] == 'Unconfigured' and pd[ "physical_capacity"] == '2 TB' and pd[ "media_type"] == 'HDD': pd_id.append(pd["id"]) elif physical_capacity == '4TB': for pd in pdInfo: if pd["cfg_status"] == 'Unconfigured' and pd[ "physical_capacity"] == '4 TB' and pd[ "media_type"] == 'HDD': pd_id.append(pd["id"]) else: tolog(str(pdResponse)) pool_response1 = server.webapi('get', 'pool') ed_pool_info = json.loads(pool_response1['text']) for ed_pool in ed_pool_info: server.webapiurl('delete', 'pool', str(ed_pool['id']) + '?force=1') except: tolog('please check out physical drive ConfigStatus\n') return pd_id
def invalidSettingVolume(): FailFlag = False tolog('Verify invalid setting \r\n') # test data settingsList = [ # invalid setting pool_id ['legal_name1', 100, '1GB', '512b', '512b', 'off', 'always', 0, 1], ['legal_name2', 'test', '1GB', '512b', '512b', 'off', 'always', 0, 1], # invalid setting capacity ['legal_name3', 0, '1B', '512b', '512b', 'off', 'always', 0, 1], ['legal_name4', 0, 0, '512b', '512b', 'off', 'always', 0, 1], # invalid setting block ['legal_name5', 0, '1GB', '512GB', '512b', 'off', 'always', 0, 1], ['legal_name6', 0, '1GB', 0, '512b', 'off', 'always', 0, 1], # invalid setting sector ['legal_name7', 0, '1GB', '512b', '512GB', 'off', 'always', 0, 1], ['legal_name8', 0, '1GB', '512b', 0, 'off', 'always', 0, 1], # invalid setting compress ['legal_name9', 0, '1GB', '512b', '512b', 'test', 'always', 0, 1], ['legal_name10', 0, '1GB', '512b', '512b', 0, 'always', 0, 1], # invalid setting sync ['legal_name11', 0, '1GB', '512b', '512b', 'off', 'test', 0, 1], ['legal_name12', 0, '1GB', '512b', '512b', 'off', 0, 0, 1], # invalid setting thin_prov ['legal_name13', 0, '1GB', '512b', '512b', 'off', 'always', 2, 1], ['legal_name14', 0, '1GB', '512b', '512b', 'off', 'always', 'test', 1], # invalid setting quantity ['legal_name15', 0, '1GB', '512b', '512b', 'off', 'always', 0, 0], ['legal_name16', 0, '1GB', '512b', '512b', 'off', 'always', 0, -1], ['legal_name17', 0, '1GB', '512b', '512b', 'off', 'always', 0, 'test'] ] expectResult = [ 'invalid setting pool_id', 'invalid setting pool_id', 'invalid setting capacity', 'invalid setting capacity', 'invalid setting block', 'invalid setting block', 'invalid setting sector', 'invalid setting sector', 'invalid setting compress', 'invalid setting compress', 'invalid setting sync', 'invalid setting sync', 'invalid setting thin_prov', 'invalid setting thin_prov', 'invalid setting quantity', 'invalid setting quantity', 'invalid setting quantity' ] for i in range(len(settingsList)): settings = { "name": settingsList[i][0], "pool_id": settingsList[i][1], "capacity": settingsList[i][2], "block": settingsList[i][3], "sector": settingsList[i][4], "compress": settingsList[i][5], "sync": settingsList[i][6], "thin_prov": settingsList[i][7], "quantity": settingsList[i][8] } tolog('Expect: ' + expectResult[i] + '\r\n') result = server.webapi('post', 'volume', settings) if isinstance(result, dict): FailFlag = True tolog("Fail: " + json.dumps(settings) + '\r\n') else: tolog('Actual: ' + result + '\r\n') if FailFlag: tolog(Fail) else: tolog(Pass)
def precondition(): # stop all replication replica_request = server.webapi('get', 'replica') if isinstance(replica_request, dict): try: for replica in json.loads(replica_request["text"]): server.webapi('post', 'replicaloc/' + str(replica["src_id"]) + '/stop') time.sleep(3) except (TypeError, KeyError): tolog('precondition is failed\r\n') else: pdId = find_pd_id() # create pool if len(pdId) > 0: server.webapi( 'post', 'pool', { "name": "T_replication_0", "pds": pdId[:3], "raid_level": "raid5" }) server.webapi( 'post', 'pool', { "name": "T_replication_1", "pds": pdId[3:6], "raid_level": "raid5" }) server.webapi( 'post', 'pool', { "name": "T_replication_2", "pds": [pdId[6]], "raid_level": "raid0" }) server.webapi('post', 'pool', { "name": "T_replication_3", "pds": [15], "raid_level": "raid0" }) else: tolog('precondition is failed\r\n') # create source volume for i in range(6): if i <= 1: server.webapi( 'post', 'volume', { 'pool_id': 2, 'name': 'T_replication_vol_' + str(i), 'capacity': '900GB', 'thin_prov': i }) elif i == 2 or i == 3: server.webapi( 'post', 'volume', { 'pool_id': 0, 'name': 'T_replication_vol_' + str(i), 'capacity': '4GB', 'block': '64kb', 'sector': '4kb', 'compress': 'gzip', 'sync': 'disabled', 'logbias': 'throughput' }) elif i == 4 or i == 5: server.webapi( 'post', 'volume', { 'pool_id': 1, 'name': 'T_replication_vol_' + str(i), 'capacity': '1GB' }) return
def clean_up_environment(): server.webapi('delete', 'pool/0?force=1')
def find_pd_id(physical_capacity=None): pd_id = [] pd_request = server.webapi('get', 'phydrv') if isinstance(pd_request, dict): pd_info = json.loads(pd_request["text"]) str_pd_info = str(pd_info) # delete pool if 'Pool' in str_pd_info: pool_response = server.webapi('get', 'pool') if isinstance(pool_response, dict): pool_info = json.loads(pool_response["text"]) if len(pool_info) != 0: for pool in pool_info: server.webapiurl('delete', 'pool', str(pool['id']) + '?force=1') else: tolog(str(pool_response)) # delete spare if 'Spare' in str_pd_info: spare_response = server.webapi('get', 'spare') if isinstance(spare_response, dict): spare_info = json.loads(spare_response["text"]) for spare in spare_info: server.webapiurl('delete', 'spare', str(spare["id"])) else: tolog(str(spare_response)) # delete read cache if 'ReadCache' in str_pd_info: read_ache_response = server.webapi('get', 'rcache') if isinstance(read_ache_response, dict): cache_info = json.loads(read_ache_response["text"])[0]["pd_list"][0] sdd_id = cache_info["pd_id"] server.webapi('post', 'rcache/detach', {"pd_list": [sdd_id]}) else: tolog(str(read_ache_response)) # delete write cache if 'WriteCache' in str_pd_info: server.webapi('post', 'wcache/detach', {"id": 'detach'}) else: tolog(str(pd_request)) # find pd id pdResponse = server.webapi('get', 'phydrv') if isinstance(pdResponse, dict): pdInfo = json.loads(pdResponse["text"]) if physical_capacity == None: for pd in pdInfo: if pd["cfg_status"] == 'Unconfigured' and pd["media_type"] == 'HDD': pd_id.append(pd["id"]) elif physical_capacity == '2TB': for pd in pdInfo: if pd["cfg_status"] == 'Unconfigured' and pd["physical_capacity"] == '2 TB' and pd["media_type"] == 'HDD': pd_id.append(pd["id"]) elif physical_capacity == '4TB': for pd in pdInfo: if pd["cfg_status"] == 'Unconfigured' and pd["physical_capacity"] == '4 TB' and pd["media_type"] == 'HDD': pd_id.append(pd["id"]) else: tolog(str(pdResponse)) return pd_id
def add_group_and_verify_name_rules(): # precondition: create DSUser for i in range(10): server.webapi('post', 'dsuser', { "id": 'test_group_' + str(i), "password": '******' }) # test data name = ['a', '12', 'N' * 29, 't' * 30] for n in name: # precondition: cancel edit server.webapi('post', 'dsgroup/editcancel') tolog('Expect: add group ' + n + '\r\n') step1 = server.webapi('post', 'dsgroup/editbegin', { "page": 1, "page_size": 20 }) if isinstance(step1, str): result_assert.FailFlag = True tolog("Fail: " + step1 + '\r\n') else: # test data token = json.loads(step1["text"])[0]["token"] get_page_data = json.loads(step1["text"])[0]["page_data"] page_data = [[0, uid["uid"]] for uid in get_page_data] step2 = server.webapi('post', 'dsgroup/editnext', { "page": 1, "page_size": 20, "token": token, "page_data": page_data }) if isinstance(step2, str): result_assert.FailFlag = True tolog("Fail: " + step2 + '\r\n') else: step3 = server.webapi('post', 'dsgroup/editsave', { "id": n, "token": token, "page_data": page_data }) if isinstance(step3, str): result_assert.FailFlag = True tolog("Fail: " + step3 + '\r\n') else: check = server.webapi('get', 'dsgroup/' + n) checkResult = json.loads(check["text"])[0] tolog('Actual: ' + json.dumps(checkResult) + '\r\n') result_assert.result_assert()
def precondition(): pdId = find_pd_id() # create pool server.webapi('post', 'pool', {"name": "test_perf_pool", "pds": pdId[:3], "raid_level": "raid5"}) # create nasShare server.webapi('post', 'nasshare', {'pool_id': 0, 'name': 'test_perf_nas', 'capacity': '2GB'}) # create snapshot of NASShare type server.webapi('post', 'snapshot', {"name": "test_snap_nas_perf", "type": 'nasshare', "source_id": 0}) # create clone of NASShare type server.webapi('post', 'clone', {"source_id": 0, "name": 'test_perf_nas'}) # create volume server.webapi('post', 'volume', {'pool_id': 0, 'name': 'test_perf_volume', 'capacity': '2GB'}) # create snapshot of volume type server.webapi('post', 'snapshot', {"name": "test_snap_vol_perf", "type": 'volume', "source_id": 0}) # create clone of volume type server.webapi('post', 'clone', {"source_id": 1, "name": 'test_perf_vol'})