def parse_infos(self, response): data_string = data = response.xpath( '//script[@id="js-hydration"]/text()').get() data = chompjs.parse_js_object(data_string, json_params={'strict': False}) loader = ItemLoader(ImmobiliareItem(), response) # inspect_response(response, self) loader.add_value('image_urls', nested_lookup('medium', data)) data = nested_delete(data, 'multimedia') listing = nested_delete(nested_lookup('listing', data)[0], 'type') listing = nested_delete(listing, 'title') loader.add_value('listing', listing) loader.add_value('trovokasa', nested_lookup('trovakasa', data)) yield loader.load_item()
def delete_keys_from_dict(dictionary, keys): for key in keys: dictionary = nested_delete(dictionary, key) return dictionary
def remove_element_from_json(json_elem, json_file): with open(json_file) as f: data = json.load(f) data = nested_delete(data, json_elem) with open('new_' + json_file, 'w') as outfile: json.dump(data, outfile, ensure_ascii=True, indent=4)
def test_sample_data4(self): result1 = { "hardware_details": { "model_name": 'MacBook Pro', "memory": False } } self.assertEqual( result1, nested_delete(self.sample_data4, 'total_number_of_cores')) result2 = { "hardware_details": { "model_name": 'MacBook Pro', "total_number_of_cores": 0 } } self.assertEqual(result2, nested_delete(self.sample_data4, 'memory'))
def test_nested_delete_in_place_true(self): """nested_delete should mutate and return the original document""" before_id = id(self.sample_data1) result = nested_delete(self.sample_data1, "build_version", in_place=True) after_id = id(result) # the object ids should match. self.assertEqual(before_id, after_id)
def test_sample_data1(self): result = { "os_details": {"product_version": "10.13.6"}, "name": "Test", "date": "YYYY-MM-DD HH:MM:SS", } self.assertEqual(result, nested_delete(self.sample_data1, "build_version"))
def test_falseish_number(self): result = { "key1": { "key2": True, "key3": False, "key4": 5, } } self.assertEqual(result, nested_delete(self.sample_data5, "key5"))
def test_false_bool(self): result = { "key1": { "key2": True, "key4": 5, "key5": 0, } } self.assertEqual(result, nested_delete(self.sample_data5, "key3"))
def test_sample_data2(self): result = { "hardware_details": { "model_name": "MacBook Pro", "total_number_of_cores": "5", "memory": "16 GB", } } self.assertEqual(result, nested_delete(self.sample_data2, "processor_details"))
def test_sample_data2(self): result = { "hardware_details": { "model_name": 'MacBook Pro', "total_number_of_cores": '5', "memory": '16 GB' } } self.assertEqual(result, nested_delete(self.sample_data2, 'processor_details'))
def test_sample_data1(self): result = { "os_details": { "product_version": '10.13.6' }, "name": 'Test', "date": 'YYYY-MM-DD HH:MM:SS' } self.assertEqual(result, nested_delete(self.sample_data1, 'build_version'))
def test_nested_delete_in_place_false(self): """ nested_delete with in_place argument set to 'False' should mutate and return a copy of the original document """ before_id = id(self.sample_data1) result = nested_delete(self.sample_data1, 'build_version', in_place=False) after_id = id(result) # the object ids should _not_ match. self.assertNotEqual(before_id, after_id)
def resolve(self, app_config): jsonpath_expr = parse(f'$..{self.key}.`parent`') results = jsonpath_expr.find(app_config) count = len(results) if count > 0: logging.info(f'Needs to resolve {count} values by {self.key} module') provider = self.provider() resolved = {} [merge(resolved, unflatten({f'{match.full_path}': self.fetch(match.value[self.key], provider)}), strategy=Strategy.ADDITIVE) for match in results] return merge(nested_delete(app_config, self.key), resolved, strategy=Strategy.ADDITIVE) else: return app_config
def test_sample_data3(self): result = {"values": [{"checks": [{}]}]} self.assertEqual(result, nested_delete(self.sample_data3, "monitoring_zones"))
if currrent_schema_dir == schema_dir.name: schema_path = schema_dir / Path(value) else: schema_dir = schema_dir / currrent_schema_dir schema_path = schema_dir / Path(value).name schema[key] = resolve_file_references(load_json(schema_path), schema_dir) else: resolve_file_references(value, schema_dir) return schema def update_ref_parent(schema): if isinstance(schema, OrderedDict): if "$ref" in schema: schema.update( {key: value for key, value in schema['$ref'].items()}) for child in schema.values(): update_ref_parent(child) if isinstance(schema, list): for item in schema: update_ref_parent(item) return schema in_schema = load_json(in_schema_path) temp_schema = resolve_file_references(in_schema, SCHEMA_BASE_DIR) out_schema = nested_delete(update_ref_parent(temp_schema), '$ref') save_json(out_schema, out_schema_path, sort_keys=False)
def test03(self, steps): with steps.start(f"Re-run test to collect Post-state data", continue_=True) as step: # Create empty dictionary for storing all route results self.post_dic = {} # Loop over device dictionary for self.name, self.dev_name in self.devices.items(): log.info( f'******* Learning and Processing details for {self.name} *******' ) # create empty list to store route entries emdeded within complete_dic dictionary acl_entries = [] # create enbedded dictionary entry per device self.post_dic.update({self.name: []}) # learn routes from device acls = self.dev_name.learn('acl') try: acl_entries.append(acls.info) # Add group of routes to dictionary per-device self.post_dic[self.name] = acl_entries except AttributeError: pass with steps.start( f"Compare Pre-state to Post-state to very routes haven't changed", continue_=True) as step: #Verification # perfrom a pre vs post ACL compare diff = Diff(self.pre_dic, self.post_dic, exclude="statistics") diff.findDiff() diff = str(diff) if not diff: log.info(f'No ACL changes detected - Test Passed') else: log.info(f'ACL changes detected {diff}') for dev in self.devices.keys(): log.info(f'ACL Change Summary for device - {dev}') pre_list_of_acl = self.pre_dic[dev] post_list_of_acl = self.post_dic[dev] self.pre_acl_names = {} self.post_acl_names = {} # Start Pre state validation for acl_set in pre_list_of_acl: if 'acls' in acl_set: acls = acl_set['acls'] for acl in acls.keys(): self.pre_aces = [] self.acl_type = acls[acl]['type'] try: aces = acls[acl]['aces'] except KeyError: print( f"ACL {acl} doesn't have any entries") self.pre_acl_names.update({ acls[acl]['name']: { 'type': acls[acl]['type'], 'aces': None } }) continue for ace in aces.keys(): seq = aces[ace]['name'] self.pre_aces.append(aces[ace]) self.pre_acl_names.update({ acls[acl]['name']: { 'type': acls[acl]['type'], 'aces': self.pre_aces } }) else: self.pre_acl_names.update({ 'name': None, 'type': None, 'aces': None }) #Start Post state validation for acl_set in post_list_of_acl: if 'acls' in acl_set: acls = acl_set['acls'] for acl in acls.keys(): self.post_aces = [] self.acl_type = acls[acl]['type'] try: aces = acls[acl]['aces'] except KeyError: print( f"ACL {acl} doesn't have any entries") self.post_acl_names.update({ acls[acl]['name']: { 'type': acls[acl]['type'], 'aces': None } }) continue for ace in aces.keys(): seq = aces[ace]['name'] self.post_aces.append(aces[ace]) self.post_acl_names.update({ acls[acl]['name']: { 'type': acls[acl]['type'], 'aces': self.post_aces } }) else: self.post_acl_names.update({ 'name': None, 'type': None, 'aces': None }) # Start comparision #List of ACLs that were removed missing_acls = { x: y for x, y in self.pre_acl_names.items() if x not in self.post_acl_names.keys() } if missing_acls: for miss_acl in missing_acls.keys(): log.info( f"Hostname: {dev} --- ACL {miss_acl} is missing" ) else: pass # List of ACLs that were added added_acls = { x: y for x, y in self.post_acl_names.items() if x not in self.pre_acl_names.keys() } if added_acls: for add_acl in added_acls.keys(): log.info( f" Hostname: {dev} --- ACL {add_acl} was added" ) else: pass #Check for modified ACLs #Loop thru pre ACLs as primary for pre_acl_name in self.pre_acl_names.keys(): try: # process each pre ACE individually and compare to post pre_aces_list = self.pre_acl_names[pre_acl_name][ 'aces'] nested_lookup.nested_delete(pre_aces_list, 'statistics', in_place=True) #use pre-acl name as key to ensure we're comparing the same ACL name post_aces_list = self.post_acl_names[pre_acl_name][ 'aces'] nested_lookup.nested_delete(post_aces_list, 'statistics', in_place=True) #if ACL is removed and empty KeyError is thrown. except KeyError: continue if pre_aces_list and post_aces_list: for pre_acl in pre_aces_list: if pre_acl in post_aces_list: pass else: print(( f"Hostname: {dev} --- ACL {pre_acl_name} seq {pre_acl['name']} has been been modified" )) # Check for modified ACLs # Loop thru post ACLs as primary for post_acl_name in self.post_acl_names.keys(): try: # process each pre ACE individually and compare to post post_aces_list = self.post_acl_names[ post_acl_name]['aces'] nested_lookup.nested_delete(post_aces_list, 'statistics', in_place=True) # use pre-acl name as key to ensure we're comparing the same ACL name pre_aces_list = self.pre_acl_names[post_acl_name][ 'aces'] nested_lookup.nested_delete(pre_aces_list, 'statistics', in_place=True) #If ACL is removed/empty then KeyError is thrown except KeyError: continue if post_aces_list and pre_aces_list: for post_acl in post_aces_list: if post_acl in pre_aces_list: pass else: log.info(( f"Hostname: {dev} --- ACL {post_acl_name} seq {post_acl['name']} has been been modified" )) step.failed()