def test_cgus(self): """CGU formatter.""" # get conditions reformatted for result in self.search.results: # load result md = Metadata.clean_attributes(result) # empty or not, it should work if len(md.conditions): cgus_out = self.fmt.conditions(md.conditions) else: cgus_out = self.fmt.conditions(md.conditions) # test self.assertIsInstance(cgus_out, list) self.assertEqual(len(result.get("conditions")), len(cgus_out))
def test_limitations(self): """Limitations formatter.""" # filtered search for md in self.search.results: metadata = Metadata.clean_attributes(md) if metadata.limitations: # get limitations reformatted limitations_out = self.fmt.limitations(metadata.limitations) self.assertIsInstance(limitations_out, tuple) # fixtures limitations_out = self.fmt.limitations(fixture_limitations) self.assertIsInstance(limitations_out, tuple) self.assertEqual(len(limitations_out), 10) for i in limitations_out: self.assertIsInstance(i, dict) self.assertIn("description", i)
def test_conditions(self): """Conditions formatter.""" # filtered search for md in self.search.results: metadata = Metadata.clean_attributes(md) if metadata.conditions: # get conditions reformatted conditions_out = self.fmt.conditions(metadata.conditions) self.assertIsInstance(conditions_out, tuple) # fixtures conditions_out = self.fmt.conditions(fixture_conditions) self.assertIsInstance(conditions_out, tuple) self.assertEqual(len(conditions_out), 6) for i in conditions_out: self.assertIsInstance(i, dict) self.assertIn("description", i)
def test_metadata_export(self): """Test search results export""" # temp output file # out_docx = mkstemp(prefix="i2o_test_docx_") # load tags fixtures with open(self.search_all_includes, "r") as f: search = json.loads(f.read()) # load template tpl = DocxTemplate(self.word_template) # run for md in search.get("results")[:20]: metadata = Metadata.clean_attributes(md) # output path out_docx = mkstemp(prefix="i2o_test_docx_") out_docx_path = out_docx[1] + ".docx" # templating tpl = DocxTemplate(self.word_template) self.to_docx.md2docx(tpl, metadata) # save tpl.save(out_docx_path) del tpl
def test_specifications(self): """Specifications formatter.""" # filtered search for md in self.search.results: metadata = Metadata.clean_attributes(md) if metadata.specifications: # get specifications reformatted specs_out = self.fmt.specifications(metadata.specifications) self.assertIsInstance(specs_out, tuple) else: specs_no = self.fmt.specifications([]) self.assertIsInstance(specs_no, tuple) # fixtures specs_out = self.fmt.specifications(fixture_specifications) self.assertIsInstance(specs_out, tuple) self.assertEqual(len(specs_out), 2) for i in specs_out: self.assertIsInstance(i, dict) self.assertIn("conformant", i) self.assertIn("link", i) self.assertIn("name", i) self.assertIn("published", i)
username=environ.get("ISOGEO_USER_NAME"), password=environ.get("ISOGEO_USER_PASSWORD"), ) auth_timer = default_timer() src_md = isogeo.search( group=environ.get("ISOGEO_ORIGIN_WORKGROUP"), whole_results=True, query="catalog:{}".format(src_cat), include="all" ) # listing li_md_to_delete = [] for md in src_md.results: metadata = Metadata.clean_attributes(md) md_cat = [metadata.tags.get(tag) for tag in metadata.tags if tag.startswith("catalog:")] if trg_cat not in md_cat: li_md_to_delete.append(metadata._id) else: pass logger.info("------- {} source metadatas listed gonna be backuped then deleted -------".format(len(li_md_to_delete))) # ################# BACKUP MDs THAT ARE GONNA BE DELETED ####################### # instanciate backup manager backup_path = Path(r"./scripts/dijon/migration/_output/_backup_deleted") backup_mng = BackupManager(api_client=isogeo, output_folder=backup_path) # lauching backup amplitude = 50 bound_range = int(len(li_md_to_delete) / amplitude) li_bound = []
def filter_matching_metadatas(self, isogeo_search_results: list) -> tuple: """Filter search results basing on matching patterns. :param MetadataSearch isogeo_search_results: Isogeo search results (`MetadataSearch.results`) :returns: a tuple of objects with the updated attributes :rtype: tuple """ # out list di_out_objects = {} # parse attributes to replace for attribute, pattern in self.attributes_patterns.items(): logger.info("Searching into '{}' values...".format(attribute)) # counters empty = 0 ignored = 0 matched = 0 # parse metadatas for md in isogeo_search_results: # load metadata as object metadata = Metadata.clean_attributes(md) # get attribute value in_value = getattr(metadata, attribute) # check if attribute has a value if not isinstance(in_value, str): empty += 1 continue # special cases: check if title is different from the technical name if attribute == "title" and in_value == metadata.name: empty += 1 continue # check if the value matches the search if pattern[0] in str(in_value): logger.debug( "Value of '{}' to change spotted in {}: '{}'".format( attribute, metadata._id, in_value ) ) matched += 1 if metadata._id in di_out_objects: # object has already been previously updated updated_obj = di_out_objects.get(metadata._id) # apply replacement setattr( updated_obj, attribute, self.replacer(in_value, pattern) ) di_out_objects[metadata._id] = updated_obj else: setattr(metadata, attribute, self.replacer(in_value, pattern)) di_out_objects[metadata._id] = metadata else: ignored += 1 # log for this attribute logger.info( "{} metadatas do not contains a valid {}".format(empty, attribute) ) logger.info( "{} metadatas.{} DO NOT MATCH the pattern: {}".format( ignored, attribute, pattern[0] ) ) logger.info( "{} metadatas.{} MATCH the pattern: {}".format( matched, attribute, pattern[0] ) ) # return tuple of metadata to be updated return tuple(di_out_objects.values())