def doRequest(): headers = { 'Cookie': 'chenlie-gray:g=; operator_ticket=3a14f4467ee84ef2bc2de83b17cf13cb; operator_sign=8cce69c2426b685895ef6aa784b8142b; operator_timestamp=1596765919837; user_name=hang.yu06' } dailaySnapID = {"snapId": 4332724} unDailySanpID = {"snapId": 4332722} url = 'https://display-gray.corp.bianlifeng.com/chenlie/api/core/v3/queryShopSnapById' request_methode = 'GET' dailaySnap = HttpRequestHandel.doRequset(url, request_methode, headers, dailaySnapID) unDailySanp = HttpRequestHandel.doRequset(url, request_methode, headers, unDailySanpID) dailaySnap = dailaySnap.get("snap") unDailySanp = unDailySanp.get("snap") DailaySnap_9 = {} for k, v in dailaySnap.items(): newShelf = ShelfFilter(v) if ShelfFilter(v): DailaySnap_9[k] = ShelfFilter(newShelf) unDailySanp_9 = {} for k, v in unDailySanp.items(): newShelf = ShelfFilter(v) if ShelfFilter(v): unDailySanp_9[k] = ShelfFilter(newShelf) print(f'DailaySnap_9长度{len(DailaySnap_9)}') print(f'DailaySnap_9{DailaySnap_9}') print(f'unDailySanp_9长度{len(unDailySanp_9)}') print(f'unDailaySnap_9{unDailySanp_9}') diffResult = json_tools.diff(dailaySnap, unDailySanp_9) print(diffResult)
def json_cmp(self, result, prev): cmp = json_tools.diff(result, prev) if len(cmp): return False else: return True
def product_change(product,item): product_change = trans_product_change(product) spu = product['spu'] result = redis_db.hget("historical_product",spu) if result is not None: # print('旧数据,比对') historical_product = json.loads(result) change = json_tools.diff(dict(product_change), historical_product) if len(change) == 0: print('shopee_product update_time') cache_product_update_time.put(spu) else: print('shopee_product update,shopee_product_change insert') cache_product_update.put(json.dumps(dict(product),cls = DateEnconding)) cache_product_change_insert.put(json.dumps(dict(product),cls = DateEnconding)) # 子产品处理 good_sub_old(item,product) else: print('shopee_product insert') cache_product_insert.put(json.dumps(dict(product),cls = DateEnconding)) # 子产品处理 good_sub_new(item,product) redis_db.hset('historical_product',spu,json.dumps(dict(product_change),cls = DateEnconding))
def diff(): parser = OptionParser() parser.add_option("-c", "--color", dest="colors", action="store_true", help="Colorize the output", default=False) options, files = parser.parse_args() if len(files) < 2: print("Need at least 2 JSON files", file=sys.stderr) exit(-1) try: with open(files[0]) as f: local = json.load(f) except IOError: print('Local not found', file=sys.stderr) exit(-1) except KeyError: print('Path to file not specified', file=sys.stderr) exit(-1) try: with open(files[1]) as f: other = json.load(f) except IOError: print('Other not found', file=sys.stderr) exit(-1) except KeyError: print('Path to other file not specified', file=sys.stderr) exit(-1) res = json_tools.diff(local, other) json_tools.print_json(res, "/", options.colors)
def check_fanyilist(self, result, expvalue): ''' 检查fanyilist数据 ''' for i in xrange(len(expvalue)): assert 'mokuai' in result[i], u'检查fanyilist数据: 结果数据fanyilist中: 第%d项无mokuai字段' % i mokuai = result[i]['mokuai'] assert 'weidu' in result[i], u'检查fanyilist数据: 结果数据fanyilist中: %s模块无weidu字段' % mokuai weidu = result[i]['weidu'] assert 'mokuai' in expvalue[i], u'检查fanyilist数据: 期望数据fanyilist中: 第%d项无mokuai字段' % i expmokuai = expvalue[i]['mokuai'] assert 'weidu' in expvalue[i], u'检查fanyilist数据: 期望数据fanyilist中: %s模块无weidu字段' % expmokuai expweidu = expvalue[i]['weidu'] PrintLog('info', '[%s] 检查fanyilist数据: fanyilist中:第%s项: mokuai: %s\nexpmokuai: %s', threading.currentThread().getName(), i, mokuai, expmokuai) assert mokuai == expmokuai, u'检查fanyilist数据: fanyilist中: 第%d项mokuai字段数据与期望数据不一致' % i for j in xrange(len(weidu)): assert type(weidu[j]) is dict, u'检查fanyilist数据: 结果数据fanyilist中: %s模块weidu中: 第%d项格式不正确' % (mokuai, j) assert type(expweidu[j]) is dict, u'检查fanyilist数据: 期望数据fanyilist中: %s模块weidu中: 第%d项格式不正确' % (mokuai, j) assert 'weiduming' in weidu[j], u'检查fanyilist数据: 结果数据fanyilist中: %s模块weidu中: 第%d项中无weiduming字段' % (mokuai, j) assert 'weiduming' in expweidu[j], u'检查fanyilist数据: 期望数据fanyilist中: %s模块weidu中: 第%d项中无weiduming字段' % (mokuai, j) weiduming = weidu[j]['weiduming'] PrintLog('debug', '[%s] 检查fanyilist数据: fanyilist中: %s模块: %s维度: result: %s\nexpvalue: %s', threading.currentThread().getName(), mokuai, weiduming, weidu[j], expweidu[j]) for k in expweidu[j]: if type(expweidu[j][k]) is dict: PrintLog('info', '[%s] _检查fanyilist数据: fanyilist中: %s模块: %s维度: %s字段: %s\nexpvalue: %s', threading.currentThread().getName(), mokuai, weiduming, k, weidu[j][k], expweidu[j][k]) assert json_tools.diff(json.dumps(weidu[j][k]), json.dumps(expweidu[j][k])) == [], u'检查fanyilist数据: fanyilist中: mokuai: %s: weidu: %s %s字段数据与期望数据不一致' % (mokuai, weiduming, k) else: PrintLog('info', '[%s] 检查fanyilist数据: fanyilist中: %s模块: %s维度: %s字段: %s\nexpvalue: %s', threading.currentThread().getName(), mokuai, weiduming, k, weidu[j][k], expweidu[j][k]) assert weidu[j][k] == expweidu[j][k], u'检查fanyilist数据: fanyilist中: mokuai: %s: weidu: %s \n%s字段数据与期望数据不一致' % (mokuai, weiduming, k)
def askopenfile(self): self.file_path = askopenfilename(filetypes=(("Test Cases", "*.json"), ("All Files", "*.*"))) self.actual_text_box.insert(END, self.file_path) tester = Tester() test_case_json = tester.read_json(self.file_path) test_case = TestCase(test_case_json) actual_response = tester.send(test_case.request_url, test_case.request_method, {}) out_put_text = tester.make_it_pretty(actual_response.json()) self.actual_text_box.insert(END, out_put_text) self.actual_text_box.tag_add("actual", "1.0", str(len(out_put_text)) + ".0") self.actual_text_box.tag_config("actual", background="white", foreground="blue") expected_response = tester.make_it_pretty(test_case.response_content) self.expected_text_box.insert(END, expected_response) self.expected_text_box.tag_add("actual", "1.0", str(len(expected_response)) + ".0") self.expected_text_box.tag_config("actual", background="white", foreground="green") diff = json_tools.diff(expected_response, actual_response)
def good_sub_old(json_str,product): for model in json_str['models']: sub_product = sub_entity(model,product) product_sub_change = trans_product_sub_change(sub_product) spu = sub_product['spu'] sku = sub_product['sku'] key = '{}{}'.format(spu,sku) result = redis_db.hget("historical_product_sub",key) if result is not None: # print('旧数据,比对') historical_product_sub = json.loads(result) change = json_tools.diff(dict(product_sub_change), historical_product_sub) if len(change) == 0: # print('数据未变动,不做任何操作') print() else: # print('数据变动,更新 shopee_sub_product,并插入 shopee_sub_product_change 一条变更后新数据') # status = 1 为了遇到曾经下架再次上架的商品,进行状态的改变 status = 1 quantity = product['quantity'] if quantity == 0: status = 0 sub_product['status'] = status cache_sub_product_update.put(json.dumps(dict(sub_product),cls = DateEnconding)) cache_sub_product_change_insert.put(json.dumps(dict(sub_product),cls = DateEnconding)) else: cache_sub_product_insert.put(json.dumps(dict(sub_product),cls = DateEnconding)) redis_db.hset('historical_product_sub',key,json.dumps(dict(product_sub_change),cls = DateEnconding))
def __call__(self,environ,start_response): incoming = Request(environ) all_bodies = [] all_servers = [ self._master] + self._apprentices master_response = None master_body = None for server in all_servers: req = self.make_request(incoming, server) req.send() this_body = req.response.content try: this_response = json.loads(this_body) except ValueError, e: this_response = this_body if server == self._master: master_response = req.response master_body = this_response if not this_response == master_body: self._mismatch_log.info('apprentice %s failed fetching %s?%s:\n----\n%s\n---vs---\n%s' % (server, incoming.path,incoming.query_string, this_response, master_body)) if isinstance(this_response, dict) and isinstance(master_body, dict): diff = json_tools.diff(this_response,master_body) all_bodies.append(this_response)
def shop_change(shop): shop_change = trans_shop_change(shop) shop_id = shop['shopid'] result = redis_db.hget("historical_shops",shop_id) if result is not None: # print('旧数据,比对') historical_shop = json.loads(result) change = json_tools.diff(dict(shop_change), historical_shop) if len(change) == 0: print('shopee_shope update time') cache_shop_update_time.put(shop_id) else: print('shopee_shope update,hopee_shope_change insert') cache_shop_update.put(json.dumps(dict(shop),cls = DateEnconding)) cache_shop_change_insert.put(json.dumps(dict(shop),cls = DateEnconding)) else: print('shopee_shope,shopee_shope_change insert') cache_shop_insert.put(json.dumps(dict(shop),cls = DateEnconding)) # 无论是变更还是新增,都要同步到redis redis_db.hset('historical_shops',shop_id,json.dumps(dict(shop_change),cls = DateEnconding)) # 传入队列内开始遍历商铺所有商品 new_task = trans_task(shop) new_task['parse_type'] = 'goods_list' new_task['level'] = 1 queue_shopee.put(json.dumps(dict(new_task),cls = DateEnconding))
def are_same(self, actual, expected): diff = json_tools.diff(expected, actual) if len(diff) > 0: out_put_diff_name = "result/diff" + "_" + test_case.name.replace(" ", "_") + ".json" tester.write_json(out_put_diff_name, diff) raise Exception("differs from previous(expected) , %r" % json.dumps(diff)) return diff
def diff(urls): for url in urls: try: ppssj_res = get_json(ppssj + url) onepiece_res = get_json((wxbackend + url)) result = json_tools.diff(ppssj_res, onepiece_res) except: pass print(result)
def __diffResp(old_result, new_result): try: diff_result = json_tools.diff(old_result, new_result) return { "diff_result": diff_result, } except StatusCodeException as e: logging.error(e, "执行diff时异常") return 0
def are_same(self, actual, expected): diff = json_tools.diff(expected, actual) if len(diff) > 0: out_put_diff_name = "result/diff" + "_" + test_case.name.replace( " ", "_") + ".json" tester.write_json(out_put_diff_name, diff) raise Exception("differs from previous(expected) , %r" % json.dumps(diff)) return diff
def get_noise(primary, secondary): diffs = json_tools.diff(primary, secondary) noises = [] if diffs: for diff in diffs: noise = diff.get('replace') if noise: noises.append(noise) else: return noises return noises
def __verify(self, hope, res): """ 比对实际结果与预期结果 """ contrast_data = json.loads(hope) res_data = json.loads(res) json_tool_res = json_tools.diff(contrast_data, res_data) if json_tool_res == []: return "通过" else: return "失败"
def get_diffs(primary, candidate): json_diffs = json_tools.diff(primary, candidate) diffs = { "replace": [], "remove": [] } for diff in json_diffs: if 'replace' in diff: diffs['replace'].append(diff['replace']) elif 'remove' in diff: diffs['remove'].append(diff['remove']) else: return diffs return diffs
def test40DeleteTask(self): rest_url = self.base_url + "/3" r = requests.delete(rest_url, auth=self.basic_auth) target_result = { "result": True } real_result = r.json() diff_result = json_tools.diff(target_result, real_result) self.assertEquals([], diff_result)
def verify(self): """ 验证实际结果和期望是否一致 :return: 一致 True, 不一致 False """ if self.in_format == "xml": self.xml_trans() else: self.json_trans() self.xml_to_json() self.get_expected() result = json_tools.diff(self.real, self.expected) print(result) if len(result) == 0: return True else: return False
def test10TaskSpecific(self): rest_url = self.base_url + "/2" r = requests.get(rest_url,auth=self.basic_auth) target_result = { "task": { 'id': 2, 'title': u'Learn Python', 'description': u'Need to find a good Python tutorial on the web', 'done': False } } print r.content real_result = r.json() diff_result = json_tools.diff(target_result, real_result) self.assertEquals([], diff_result)
def checkBASE64_ExpDict(self, BASE64_ExpDict, unique_id): ''' 检查BASE64加密字段 ''' for table in BASE64_ExpDict: fields = BASE64_ExpDict[table].keys() values = BASE64_ExpDict[table].values() if not fields: continue PrintLog('debug', '[%s] 检查BASE64加密字段数据: 用例中读取的fields: %s\nvalues: %s', threading.currentThread().getName(), fields, values) query_where = (unique_id,) query_fields = '' for field in fields: query_fields = query_fields + field + ', ' query_str = 'SELECT ' + query_fields[:-2] + ' FROM ' + table + ' WHERE UniqueID = %s' PrintLog('debug', '[%s] 执行SQL查询: query_str: %s %s', threading.currentThread().getName(), query_str, query_where) self.curMy.execute(query_str, query_where) self.obj.connMy.commit() result = self.curMy.fetchone() #取查询结果第一条记录 if result is None: raise TableNoneError(u"%s is NONE" % table) expvalues = tuple(values) for i in range(len(fields)): expvalue = expvalues[i] de_result = EncryptLib.getde_base64(result[i]) PrintLog('debug', '[%s] 检查BASE64加密字段数据: de_result: %s\nexpvalue: %s', threading.currentThread().getName(), de_result, expvalue) if type(expvalue) is dict: try: de_resultDict = json_tools.loads(de_result) PrintLog('debug', '[%s] 检查BASE64加密字段数据: de_resultDict: %s', threading.currentThread().getName(), de_resultDict) except: raise AssertionError, u'_检查BASE64加密字段: %s字段数据与期望数据不一致' % fields[i] for key in expvalue: assert key in de_resultDict, u'检查BASE64加密字段: %s字段中无:%s字段' % (fields[i], key) if type(expvalue[key]) is dict: PrintLog('debug', '[%s] _检查BASE64加密字段数据: de_resultDict[%s]: %s expvalue[%s]: %s', threading.currentThread().getName(), key, de_resultDict[key], key, expvalue[key]) assert json_tools.diff(json.dumps(de_resultDict[key]), json.dumps(expvalue[key])) == [], u'_检查BASE64加密字段: %s字段中:%s字段数据与期望数据不一致' % (fields[i], key) else: PrintLog('debug', '[%s] 检查BASE64加密字段数据: de_resultDict[%s]: %s expvalue[%s]: %s', threading.currentThread().getName(), key, de_resultDict[key], key, expvalue[key]) assert de_resultDict[key] == expvalue[key], u'检查BASE64加密字段: %s字段中:%s字段数据与期望数据不一致' % (fields[i], key) else: PrintLog('debug', '[%s] 检查BASE64加密字段%s数据: de_result: %s expvalue: %s', threading.currentThread().getName(), fields[i], de_result, expvalue) assert de_result == expvalue, u'检查BASE64加密字段: %s字段数据与期望数据不一致' % fields[i]
def checkBASE64_ExpDict(self, BASE64_ExpDict, unique_id): ''' 检查BASE64加密字段 ''' for table in BASE64_ExpDict: fields = BASE64_ExpDict[table].keys() values = BASE64_ExpDict[table].values() if not fields: continue PrintLog('debug', '[%s] 检查BASE64加密字段数据: 用例中读取的fields: %s\nvalues: %s', threading.currentThread().getName(), fields, values) query_where = (unique_id,) query_fields = '' for field in fields: query_fields = query_fields + field + ', ' query_str = 'SELECT ' + query_fields[:-2] + ' FROM ' + table + ' WHERE UniqueID = %s' PrintLog('info', '[%s] 执行SQL查询: query_str: %s %s', threading.currentThread().getName(), query_str, query_where) self.curMy.execute(query_str, query_where) self.obj.connMy.commit() result = self.curMy.fetchone() #取查询结果第一条记录 if result is None: raise TableNoneError(u"%s is NONE" % table) expvalues = tuple(values) for i in range(len(fields)): expvalue = expvalues[i] de_result = EncryptLib.getde_base64(result[i]) PrintLog('info', '[%s] 检查BASE64加密字段数据: de_result: %s\nexpvalue: %s', threading.currentThread().getName(), de_result, expvalue) if type(expvalue) is dict: try: de_resultDict = json_tools.loads(de_result) PrintLog('info', '[%s] 检查BASE64加密字段数据: de_resultDict: %s', threading.currentThread().getName(), de_resultDict) except: raise AssertionError, u'_检查BASE64加密字段: %s字段数据与期望数据不一致' % fields[i] for key in expvalue: assert key in de_resultDict, u'检查BASE64加密字段: %s字段中无:%s字段' % (fields[i], key) if type(expvalue[key]) is dict: PrintLog('info', '[%s] _检查BASE64加密字段数据: de_resultDict[%s]: %s expvalue[%s]: %s', threading.currentThread().getName(), key, de_resultDict[key], key, expvalue[key]) assert json_tools.diff(json.dumps(de_resultDict[key]), json.dumps(expvalue[key])) == [], u'_检查BASE64加密字段: %s字段中:%s字段数据与期望数据不一致' % (fields[i], key) else: PrintLog('info', '[%s] 检查BASE64加密字段数据: de_resultDict[%s]: %s expvalue[%s]: %s', threading.currentThread().getName(), key, de_resultDict[key], key, expvalue[key]) assert de_resultDict[key] == expvalue[key], u'检查BASE64加密字段: %s字段中:%s字段数据与期望数据不一致' % (fields[i], key) else: PrintLog('info', '[%s] 检查BASE64加密字段%s数据: de_result: %s expvalue: %s', threading.currentThread().getName(), fields[i], de_result, expvalue) assert de_result == expvalue, u'检查BASE64加密字段: %s字段数据与期望数据不一致' % fields[i]
def askopenfile(self): self.file_path = askopenfilename(filetypes=(("Test Cases", "*.json"), ("All Files", "*.*"))) self.actual_text_box.insert(END, self.file_path) tester = Tester() test_case_json = tester.read_json(self.file_path) test_case = TestCase(test_case_json) actual_response = tester.send(test_case.request_url, test_case.request_method, {}) out_put_text = tester.make_it_pretty(actual_response.json()) self.actual_text_box.insert(END, out_put_text) self.actual_text_box.tag_add("actual", "1.0", str(len(out_put_text))+".0") self.actual_text_box.tag_config("actual", background="white", foreground="blue") expected_response = tester.make_it_pretty(test_case.response_content) self.expected_text_box.insert(END, expected_response) self.expected_text_box.tag_add("actual", "1.0", str(len(expected_response))+".0") self.expected_text_box.tag_config("actual", background="white", foreground="green") diff = json_tools.diff(expected_response, actual_response)
def test30UpdateTask(self): rest_url = self.base_url + "/2" headers = {"Content-Type":"application/json"} data ={"done":True} r = requests.put(rest_url, headers=headers, json=data, auth=self.basic_auth) target_result = { "task": { "description": u"Need to find a good Python tutorial on the web", "done": True, "id": 2, "title": u"Learn Python" } } print r.content real_result = r.json() diff_result = json_tools.diff(target_result, real_result) self.assertEquals([], diff_result) data = {"done":False} r = requests.put(rest_url, headers=headers, json=data, auth=self.basic_auth)
def update(self, request, *args, **kwargs): hostname = request.data['hostname'] before_host = Host.objects.get(hostname=hostname) before_data = self.get_serializer(before_host, partial=False).data host = before_data['hostname'] instance = self.get_object() serializer = self.get_serializer(instance, data=request.data, partial=False) serializer.is_valid(raise_exception=True) self.perform_update(serializer) after_data = serializer.data # records diff = removeNone(json_tools.diff(before_data, after_data)) Record.objects.create(name='hosts', asset=host, method='update', before=before_data, after=after_data, diff=diff, create_user=request.user) return Response(serializer.data)
def test20CreateTask(self): rest_url = self.base_url headers = {"Content-Type":"application/json"} data={"title":"Read a new Book"} r = requests.post(rest_url, headers=headers, json=data, auth=self.basic_auth) target_result = { "task": { "description": u"", "done": False, "id": 3, "title": u"Read a new Book" } } print r.content real_result = r.json() diff_result = json_tools.diff(target_result, real_result) self.assertEquals([], diff_result)
def diff_json(self, res_data, expect_result): # 对象转为字典 #res_data = res_data.json() res_data = res_data.text if expect_result != "": print("预期结果:", type(expect_result), expect_result) print("实际结果:", type(res_data), res_data) diff_result = json_tools.diff(expect_result, res_data) print("diff_result-------------------->>>", diff_result) if diff_result == []: print( "pass-----------------------------------------------------------" ) self.db.execute(self.result_pass) else: print( "fail-----------------------------------------------------------" ) self.db.execute(self.result_fail)
def test00Tasklist(self): rest_url = self.base_url r = requests.get(rest_url,auth=self.basic_auth) target_result = { "tasks": [ { 'uri': u"http://localhost:5000/todo/api/v1.0/tasks/1", 'title': u'Buy groceries', 'description': u'Milk, Cheese, Pizza, Fruit, Tylenol', 'done': False }, { 'uri': u"http://localhost:5000/todo/api/v1.0/tasks/2", 'title': u'Learn Python', 'description': u'Need to find a good Python tutorial on the web', 'done': False } ] } real_result = r.json() diff_result = json_tools.diff(target_result, real_result) self.assertEquals([], diff_result)
def compare_json(json_1, json_2): results = json_tools.diff(json_1, json_2) print(results)
def diffDict( self ): #compare two dictionaries and return to the result in the form of a list diff = json_tools.diff(self.old, self.new) return diff
def apply_stack(env, template_name, params={}): cfn_conn = boto3.client('cloudformation', region_name=constants.ENVIRONMENTS[env]['region']) s3_conn = boto3.client('s3', region_name=constants.ENVIRONMENTS[env]['region']) TemplateClass = TEMPLATES.get(template_name, None) if not TemplateClass: raise RuntimeError( '{} not a valid Template Class'.format(template_name)) template = TemplateClass(template_name, env, params) stack_args = { 'Capabilities': template.CAPABILITIES, 'Parameters': [{ 'ParameterKey': k, 'ParameterValue': v, 'UsePreviousValue': False } for k, v in params.items()], 'StackName': '{}-{}'.format(env, template_name), 'Tags': [{ 'Key': '{}:team'.format(constants.TAG), 'Value': template.TEAM['email'] }, { 'Key': '{}:environment'.format(constants.TAG), 'Value': env }], } if len(template.to_json()) < 51200: stack_args['TemplateBody'] = template.to_json() else: bucket = '{}-{}-infra'.format(constants.TAG, env) key = 'cfn/{}/{}-{}'.format( env, datetime.datetime.now().strftime('%Y%m%d-%H:%M'), template_name) s3_conn.put_object(Body=template.to_json(), Bucket=bucket, ContentType='application/json', Key=key) stack_args[ 'TemplateURL'] = 'https://s3.dualstack.{}.amazonaws.com/{}/{}'.format( constants.ENVIRONMENTS[env]['region'], bucket, key) if template: if stack_args['StackName'] in [ s['StackName'] for s in list_stacks(env) if s['StackStatus'] != 'DELETE_COMPLETE' ]: # stack exists, update stack_args.pop('Tags', None) # update_stack can't take Tags old = json.loads( json.dumps( cfn_conn.get_template( **{'StackName': stack_args['StackName']}) ['TemplateBody'])) new = json.loads(template.to_json()) print("Proposed changes:") print_reduced(json_tools.diff(old, new)) try: response = confirm_action(cfn_conn.update_stack, **stack_args) except botocore.exceptions.ClientError as e: if e.response['Error']['Code'] == 'ValidationError': print(e.response['Error']['Message']) sys.exit(0) except: raise else: # Create a new stack print('Creating a new stack: {}'.format(stack_args['StackName'])) print('Template:') print(template.to_json()) response = confirm_action(cfn_conn.create_stack, **stack_args) wait_for_completion(env, response['StackId'])
#coding=utf8 import json_tools print dir(json_tools) a = {"a":{"aa":{"aaa":333,"aaa2":3332},"b":22}} b = {"a":{"b":22, "aa":{"aaa2":339, "aaa":333}}} c = 1 d = 2 e = 'sdfsdf' f = 'sdfsdf' print json_tools.diff(a,b) #print json_tools.diff(c,d) #print json_tools.diff(e,f) data = '''{ "userbasicinfo": [ { "LoginFromOthers": "weixin", "istuandai": null, "name": 123, "bbb": true }, { "LoginFromOthers": "qq", "kong": null,
#!/usr/bin/env python # coding=utf-8 # author: zengyuetian import json_tools if __name__ == '__main__': a = {'left': 1, "right": {"a": [1, 2, {"b": "x", "a": 1}]}} b = {'right': {"a": [1, 2, {"a": 1, "b": 'x'}]}, "left": 1} result = json_tools.diff(a, b) print(result)
# -*- coding:utf-8 -*- #!/usr/bin/env python 3.7 # Python version 2.7.16 or 3.7.6 ''' # FileName: AssertJson.py # Author : MrYu # Desc: PyCharm # Date: 2021/2/27 9:05 ''' # 判断俩json是否相等 import operator import json_tools keyword = {"id": "100", "name": "苹果","info": {"uid":"2020","phoneName":["一代","Mate40"]}} keyword1 = {"id": "100","info": {"uid":"2020","phoneName":["一代","Mate40"]}, "name": "苹果"} # 方法一 调库 若不相等则数组非空 print(json_tools.diff(keyword, keyword1)) # 方法二 递归 def json_clear(keyword,keyword1): if type(keyword)==dict and type(keyword1)==dict: if len(keyword) == len(keyword1): # 优先对比所有的key值是否一致不一致立刻返回结果不走values判断 keys1 = keyword.keys() keys2 = keyword1.keys() equal = [] not_equal = [] if keys1==keys2: list_keys = [i for i in keys1] for i in range(len(list_keys)): if keyword[list_keys[i]]==keyword1[list_keys[i]]: equal.append(list_keys[i]) else:
''' This program does a "smart" diff of structured files (yaml or json). - loads the files - applies a remapping to the first file to avoid false pozitives - does a diff between the files - plucks out uninteresting differences - displays the resulted diff in the requested format ''' if __name__ == "__main__": parser = argparse.ArgumentParser(description='Compute a difference between serialized files.') parser.add_argument('-f', '--first', required=True, type=argparse.FileType('r'), help='First file to compare') parser.add_argument('-s', '--second', required=True, type=argparse.FileType('r'), help='Second file to compate') parser.add_argument('-r', '--regex', required=False, nargs="+", help='Regular expression to filter on') parser.add_argument('-m', '--map', required=False, type=argparse.FileType('r'), help='Remapping file') parser.add_argument('-o', '--output', required=False, choices=['json', 'yaml'], default='json', help='Output format') args = parser.parse_args() first = parse(args.first) second = parse(args.second) if args.map is not None: mapping = parse(args.map) first = do_remapping(first, mapping) diff = json_tools.diff(first, second) if args.regex is not None: for expr in args.regex: diff = filter(pluck(expr), diff) if args.output == 'yaml': print yaml.dump(diff, indent=2, default_flow_style=False) else: print json.dumps(diff, indent=2, ensure_ascii=True)
'applyReturnNum': None, 'childGoodsQty': None, 'ext01': None, 'ext02': 'UNIT-12', 'ext03': 'EA', 'ext04': '个', 'ext05': '1.00', 'ext06': '0', 'ext07': None, 'ext08': None, 'ext09': None, 'ext10': None, 'ext11': None, 'ext12': None, 'ext13': None, 'ext14': None, 'ext15': None, 'originalGoodsId': None, 'firstBillBomCode': None, 'srcBillBomCode': None }], 'coordinationOrderCode': None, 'isEc': None, 'otherOrders': [] } d = json_tools.diff(exp, res) for i in d: print(i)
def sync_remote_server(request, method): tgt = sapi.minions_status()['up'] arg = [ 'osfinger', 'ipv4', 'cpu_model', 'num_cpus', 'memory_info', 'disk_info' ] data = sapi.sync_remote_server(tgt=tgt, arg=arg) count = len(data) update_list = [] no_update_list = [] for k, v in data[0].items(): host_info = { 'hostname': k, 'os': v['osfinger'], 'cpu': '{} * {}'.format(v['cpu_model'], v['num_cpus']), 'memory': v['memory_info'], 'disk': '|'.join(v['disk_info']), 'ip': '|'.join(v['ipv4']) } if method == 'create': try: obj = Host.objects.get(hostname=k) except Host.DoesNotExist: obj = Host(**host_info) obj.save() # records Record.objects.create(name='hosts', asset=k, type=1, method='create', before='{}', after=host_info, create_user='******') else: try: obj = Host.objects.filter(hostname=k) obj_info = { 'hostname': k, 'os': obj[0].os, 'cpu': obj[0].cpu, 'memory': obj[0].memory, 'disk': obj[0].disk, 'ip': obj[0].ip } diff = removeNone(json_tools.diff(obj_info, host_info)) if diff: obj.update(**host_info) # records Record.objects.create(name='hosts', asset=k, type=1, method='update', before=obj_info, after=host_info, diff=diff, create_user='******') update_list.append(k) else: no_update_list.append(k) except Host.DoesNotExist: print("%s is not exist" % k) print("update_list: %s" % update_list) print("no_update_list: %s" % no_update_list) return Response({"results": data, "count": count})
def diffDict(self): #compare two dictionaries and return to the result in the form of a list diff = json_tools.diff(self.old, self.new) return diff
def jsonDiff(a,b): result=json_tools.diff(a,b) if result==[]: print("两个数据内容一致:%s"%result) else: print("两个数据内容存在不一致:%s"%result)