def test_user_with_no_security_level(self): component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", security_level=None, ) Component.from_obj(component_tag_version).save(refresh='true') with self.subTest('no security level'): res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk)) for lvl in self.security_levels[1:]: with self.subTest(f'security level {lvl}'): component_tag_version.security_level = lvl component_tag_version.save() Component.from_obj(component_tag_version).save(refresh='true') res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0)
def test_search_component(self): component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", ) Component.from_obj(component_tag_version).save(refresh='true') with self.subTest('without archive'): res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) structure_type = StructureType.objects.create() structure_template = Structure.objects.create(type=structure_type, is_template=True) archive_tag = Tag.objects.create() archive_tag_version = TagVersion.objects.create( tag=archive_tag, type=self.archive_type, elastic_index="archive", ) self.group1.add_object(archive_tag_version) structure, archive_tag_structure = structure_template.create_template_instance( archive_tag) Archive.from_obj(archive_tag_version).save(refresh='true') TagStructure.objects.create(tag=component_tag, parent=archive_tag_structure, structure=structure) Component.index_documents(remove_stale=True) with self.subTest('with archive'): res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk)) with self.subTest('with archive, non-active organization'): self.user.user_profile.current_organization = self.group2 self.user.user_profile.save() res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0)
def test_add_results_to_appraisal(self): component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( name='foo', tag=component_tag, type=self.component_type, elastic_index="component", ) Component.from_obj(component_tag_version).save(refresh='true') component_tag2 = Tag.objects.create() component_tag_version2 = TagVersion.objects.create( name='bar', tag=component_tag2, type=self.component_type, elastic_index="component", ) Component.from_obj(component_tag_version2).save(refresh='true') # test that we don't try to add structure units matched by query to job structure = Structure.objects.create( type=StructureType.objects.create(), is_template=False) structure_unit = StructureUnit.objects.create( name='foo', structure=structure, type=StructureUnitType.objects.create( structure_type=structure.type), ) StructureUnitDocument.from_obj(structure_unit).save(refresh='true') appraisal_job = AppraisalJob.objects.create() res = self.client.get(self.url, data={ 'q': 'foo', 'add_to_appraisal': appraisal_job.pk }) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertCountEqual(appraisal_job.tags.all(), [component_tag]) res = self.client.get(self.url, data={'add_to_appraisal': appraisal_job.pk}) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertCountEqual(appraisal_job.tags.all(), [component_tag, component_tag2])
def update(self, instance, validated_data): structure_unit = validated_data.pop('structure_unit', None) parent = validated_data.pop('parent', None) structure = validated_data.pop('structure', None) notes_data = validated_data.pop('notes', None) identifiers_data = validated_data.pop('identifiers', None) information_package = validated_data.pop('information_package', None) validated_data.pop('index', None) if identifiers_data is not None: NodeIdentifier.objects.filter(tag_version=instance).delete() self.create_identifiers(instance, identifiers_data) if notes_data is not None: NodeNote.objects.filter(tag_version=instance).delete() for note in notes_data: note.setdefault('create_date', timezone.now()) self.create_notes(instance, notes_data) if structure is not None: tag = instance.tag if structure_unit is not None: archive_structure = structure.tagstructure_set.first( ).get_root() parent = archive_structure elif parent is not None: parent_structure = parent.get_structures(structure).get() parent = parent_structure structure_unit = None TagStructure.objects.update_or_create(tag=tag, structure=structure, defaults={ 'parent': parent, 'structure_unit': structure_unit, }) instance.tag.information_package = information_package instance.tag.save() TagVersion.objects.filter(pk=instance.pk).update(**validated_data) instance.refresh_from_db() if instance.elastic_index == 'component': doc = Component.from_obj(instance) elif instance.elastic_index == 'document': doc = File.from_obj(instance) doc.save() return instance
def test_filter_on_archive_agent(self): agent = self.create_agent() component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", ) structure_type = StructureType.objects.create() structure_template = Structure.objects.create(type=structure_type, is_template=True) archive_tag = Tag.objects.create() archive_tag_version = TagVersion.objects.create( tag=archive_tag, type=self.archive_type, elastic_index="archive", ) structure, archive_tag_structure = structure_template.create_template_instance( archive_tag) Archive.from_obj(archive_tag_version).save(refresh='true') TagStructure.objects.create(tag=component_tag, parent=archive_tag_structure, structure=structure) AgentTagLink.objects.create( agent=agent, tag=archive_tag_version, type=AgentTagLinkRelationType.objects.create(), ) Component.from_obj(component_tag_version).save(refresh='true') res = self.client.get(self.url, {'agents': str(agent.pk)}) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk))
def test_user_with_multiple_security_levels(self): self.user.user_permissions.add( Permission.objects.get(codename='security_level_1'), Permission.objects.get(codename='security_level_3'), ) self.user = User.objects.get(pk=self.user.pk) component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", security_level=None, ) Component.from_obj(component_tag_version).save(refresh='true') with self.subTest('no security level'): res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk)) for lvl in self.security_levels: with self.subTest(f'security level {lvl}'): component_tag_version.security_level = lvl component_tag_version.save() Component.from_obj(component_tag_version).save(refresh='true') if lvl in [1, 3]: res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk)) else: res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0)
def parse_acts(self, ip, rootdir, errand, acts_root, parent, archive): for act_el in acts_root.xpath("*[local-name()='ArkivobjektHandling']"): tag_version = self.parse_act(act_el, errand, ip) act = Component.from_obj(tag_version, archive) tag_repr = TagStructure.objects.create( tag=tag_version.tag, parent=parent, structure=parent.structure, ) for doc_el in act_el.xpath("*[local-name()='Bilaga']"): yield self.parse_document(ip, rootdir, doc_el, act, tag_repr, archive) yield tag_version.tag, tag_version, tag_repr, act.to_dict( include_meta=True)
def update(self, instance: TagVersion, validated_data): structure_unit = validated_data.pop('structure_unit', None) parent = validated_data.pop('parent', None) structure = validated_data.pop('structure', None) notes_data = validated_data.pop('notes', None) identifiers_data = validated_data.pop('identifiers', None) information_package = validated_data.pop('information_package', instance.tag.information_package) appraisal_date = validated_data.pop('appraisal_date', instance.tag.appraisal_date) validated_data.pop('index', None) self.update_identifiers(instance, identifiers_data) self.update_notes(instance, notes_data) if structure is not None: tag = instance.tag if structure_unit is not None: archive_structure = structure.tagstructure_set.first().get_root() parent = archive_structure elif parent is not None: parent_structure = parent.get_structures(structure).get() parent = parent_structure structure_unit = None if parent or structure_unit: TagStructure.objects.update_or_create(tag=tag, structure=structure, defaults={ 'parent': parent, 'structure_unit': structure_unit, }) instance.tag.information_package = information_package instance.tag.appraisal_date = appraisal_date instance.tag.save() TagVersion.objects.filter(pk=instance.pk).update(**validated_data) instance.refresh_from_db() if instance.elastic_index == 'component': doc = Component.from_obj(instance) elif instance.elastic_index == 'document': doc = File.from_obj(instance) doc.save() return instance
def parse_volym(cls, el, archive_version, parent_tag_structure, structure_unit, agent, task=None, ip=None): logger.debug("Parsing volym...") ref_code = el.xpath("va:volnr", namespaces=cls.NSMAP)[0].text name = el.xpath("va:utseende", namespaces=cls.NSMAP)[0].text tag_type = cls.VOLUME_TYPE volym_id = uuid.uuid4() tag = Tag(information_package=ip, task=task) tag_version = TagVersion( pk=volym_id, tag=tag, elastic_index='component', reference_code=ref_code, name=name, create_date=cls.parse_volume_create_date(el), revise_date=cls.parse_volume_revise_date(el), import_date=timezone.now(), type=tag_type, ) tag_structure = TagStructure( tag=tag, structure_unit=structure_unit, structure=parent_tag_structure.structure, parent=parent_tag_structure, tree_id=parent_tag_structure.tree_id, lft=0, rght=0, level=0 ) agent_tag_link = AgentTagLink( agent=agent, tag_id=tag_version.id, type=cls.AGENT_TAG_LINK_RELATION_TYPE, ) doc = Component.from_obj(tag_version, archive=archive_version) doc.agents = [str(agent.pk)] logger.debug("Parsed volym: {}".format(tag_version.pk)) return doc.to_dict(include_meta=True), tag, tag_version, tag_structure, agent_tag_link
def parse_errands(self, ip, rootdir, archive, errands_root): archive_structure = archive.get_active_structure() structure = archive_structure.structure for errand in self.get_arkiv_objekt_arenden(errands_root): tag_version, structure_unit = self.parse_errand( errand, archive, ip, structure) tag_repr = TagStructure.objects.create( tag=tag_version.tag, structure_unit=structure_unit, structure=structure, parent=archive_structure, ) component = Component.from_obj(tag_version, archive) acts_root = self.get_acts_root(errand) if len(acts_root): for act in self.parse_acts(ip, rootdir, component, acts_root[0], tag_repr, archive): yield act yield tag_version.tag, tag_version, tag_repr, component.to_dict( include_meta=True)
def parse_act(self, act, errand): data_mappings = { 'name': ['Rubrik', 'ArkivobjektID'], 'status': 'StatusHandling', 'handlingstyp': 'Handlingstyp', 'klassreferens': 'KlassReferens', 'arkivobjekt_id': 'ArkivobjektID', } data = self.parse_mappings(data_mappings, act) data['avsandare'] = [] for avsandare in act.xpath("*[local-name()='Avsandare']"): data['avsandare'].append(self.parse_person(avsandare)) data['mottagare'] = [] for mottagare in act.xpath("*[local-name()='Mottagare']"): data['mottagare'].append(self.parse_person(mottagare)) data['agenter'] = [] for agent in act.xpath("*[local-name()='Agent']"): data['agenter'].append(self.parse_agent(agent)) data['restriktioner'] = [] for restriktion in act.xpath("*[local-name()='Restriktion']"): data['restriktioner'].append(self.parse_restriction(restriktion)) data['relationer'] = [] for relation in act.xpath("*[local-name()='HandlingRelation']"): data['relationer'].append(self.parse_relation(relation)) data['extra_ids'] = [] for extra_id in act.xpath("*[local-name()='ExtraID']"): parsed = self.parse_extra_id(extra_id) if parsed is not None: data['extra_ids'].append(parsed) try: data['gallring'] = self.parse_gallring(act.xpath("*[local-name()='Gallring']")[0]) except IndexError: pass data['egna_element'] = [] for egna_element in act.xpath("*[local-name()='EgnaElement']"): data['egna_element'].append(self.parse_egna_element(egna_element)) date_mappings = { 'dispatch_date': 'Expedierad', 'arrival_date': 'Inkommen', 'last_usage_date': 'SistaAnvandandetidpunkt', 'create_date': 'Skapad', 'preparation_date': 'Upprattad', } dates = self.parse_mappings(date_mappings, act) component_id = str(uuid.uuid4()) # act.get("Systemidentifierare") reference_code = act.xpath("*[local-name()='ArkivobjektID']")[0].text unit_ids = {'id': reference_code} parent = Node(id=errand.meta.id, index=errand._index._name) data.update(dates) return Component( _id=component_id, current_version=True, unit_ids=unit_ids, task_id=str(self.task.pk), parent=parent, type='Handling', reference_code=reference_code, archive=errand.archive, ip=errand.ip, **data )
def parse_volume(self, el, medium_type_logisk, task, ip=None): logger.debug("Parsing volume...") tag_type = self.VOLUME_TYPE ref_code = el.xpath("Volume.VolumeCode")[0].text name = el.xpath("Volume.Title")[0].text or "" date = el.xpath("Volume.Date")[0].text start_date = self._parse_year_string(date[:4]) if date and len(date) >= 4 else None end_date = self._parse_year_string(date[-4:], end=True) if date and len(date) == 4 else None short_name = el.xpath("VolumeType.ShortName")[0].text if short_name == 'L': medium_type = medium_type_logisk else: medium_type = None volume_id = str(uuid.uuid4()) agent_hash = self.build_agent_hash( el.xpath("ArchiveOrig.ArchiveOrigID")[0].text, el.xpath("ArchiveOrig.Name")[0].text, ) agent_id = cache.get(agent_hash) archive_hash = self.build_archive_hash( el.xpath("Archive.ArchiveID")[0].text, el.xpath("Archive.Name")[0].text, agent_hash, ) archive_tag_id = cache.get(archive_hash) archive_tag = Tag.objects.select_related( 'current_version' ).prefetch_related( 'structures' ).get( pk=archive_tag_id ) series_hash = self.build_series_hash( el.xpath("Series.SeriesID")[0].text, el.xpath("Series.Signum")[0].text, el.xpath("Series.Title")[0].text, archive_hash, ) unit_id = cache.get(series_hash) unit = StructureUnit.objects.get(pk=unit_id) tag = Tag.objects.create(information_package=ip, task=task) tag_version = TagVersion.objects.create( pk=volume_id, tag=tag, elastic_index='component', reference_code=ref_code, name=name, type=tag_type, create_date=None, import_date=timezone.now(), start_date=start_date, end_date=end_date, medium_type=medium_type, ) AgentTagLink.objects.create( agent_id=agent_id, tag=tag_version, type=self.tag_link_relation_type, ) related_id_match = self.VOLUME_RELATION_REGEX.search(name) if related_id_match: relation_cache_key = 'relation_{}'.format(volume_id) match_without_whitespace = re.sub(r'\s+', '', related_id_match.group(0)) cache.set(relation_cache_key, match_without_whitespace, 300) TagStructure.objects.create( tag=tag, structure_unit=unit, structure=unit.structure, parent=archive_tag.get_active_structure() ) doc = Component.from_obj(tag_version, archive=archive_tag.current_version) logger.debug("Parsed volume: {}".format(tag_version.pk)) return doc.to_dict(include_meta=True), tag_version
def create(self, validated_data): with transaction.atomic(): structure_unit = validated_data.pop('structure_unit', None) parent = validated_data.pop('parent', None) structure = validated_data.pop('structure', None) notes_data = validated_data.pop('notes', []) identifiers_data = validated_data.pop('identifiers', []) information_package = validated_data.pop('information_package', None) appraisal_date = validated_data.pop('appraisal_date', None) index = validated_data.pop('index') tag = Tag.objects.create( information_package=information_package, appraisal_date=appraisal_date, ) tag_structure = TagStructure(tag=tag) if structure_unit is not None: tag_structure.structure_unit = structure_unit tag_structure.structure = structure_unit.structure archive_structure = TagStructure.objects.filter(structure=structure_unit.structure).first().get_root() tag_structure.parent = archive_structure tag_structure.save() else: if structure is None: parent_structure = parent.get_active_structure() else: parent_structure = parent.get_structures(structure).get() tag_structure.parent = parent_structure tag_structure.structure = parent_structure.structure tag_structure.save() tag_version = TagVersion.objects.create( tag=tag, elastic_index=index, **validated_data, ) tag.current_version = tag_version tag.save() for agent_link in AgentTagLink.objects.filter(tag=tag_version): AgentTagLink.objects.create(tag=tag_version, agent=agent_link.agent, type=agent_link.type) tag_structure.refresh_from_db() # Prel remove code, does not need copy to related structures?? # if structure_unit is None: # structure_unit = tag_structure.get_ancestors( # include_self=True # ).filter(structure_unit__isnull=False).get().structure_unit # related_units = structure_unit.related_structure_units.filter( # structure__is_template=False # ).exclude( # structure=tag_structure.structure # ) # logger.debug('ComponentWriteSerializer - create - tag: {tag}, tag_structure: {tag_structure}, # tag_structure.structure: {tag_structure_structure}, # tag_structure.structure_unit: {tag_structure_structure_unit}, # related_units: {related_units}'.format( # tag=tag, tag_structure=tag_structure, tag_structure_structure=tag_structure.structure, # tag_structure_structure_unit=tag_structure.structure_unit, related_units=related_units # )) # for related in related_units: # new_unit = related if tag_structure.structure_unit is not None else None # logger.debug('ComponentWriteSerializer - create (for related) - related: {related}, # new_unit: {new_unit}, related.structure: {related_structure}'.format( # related=related, new_unit=new_unit, related_structure=related.structure # )) # new_tag_structure = tag_structure.copy_to_new_structure(related.structure, new_unit=new_unit) # logger.debug('ComponentWriteSerializer - create (for related 2) - new_tag_structure: # {new_tag_structure}'.format( # new_tag_structure=new_tag_structure # )) self.create_identifiers(tag_version, identifiers_data) self.create_notes(tag_version, notes_data) tag_version.refresh_from_db() doc = Component.from_obj(tag_version) doc.save() return tag
def test_filter_appraisal_date(self): component_tag = Tag.objects.create( appraisal_date=make_aware(datetime(year=2020, month=1, day=1))) component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", ) doc = Component.from_obj(component_tag_version) doc.save(refresh='true') with self.subTest('2020-01-01 is after or equal to 2020-01-01'): res = self.client.get(self.url, data={'appraisal_date_after': '2020-01-01'}) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) with self.subTest('2020-01-01 not after 2020-01-02'): res = self.client.get(self.url, data={'appraisal_date_after': '2020-01-02'}) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0) with self.subTest('2020-01-01 not before 2019-12-31'): res = self.client.get(self.url, data={'appraisal_date_before': '2019-12-31'}) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0) with self.subTest('2020-01-01 between 2019-01-01 and 2020-01-01'): res = self.client.get(self.url, data={ 'appraisal_date_after': '2019-01-01', 'appraisal_date_before': '2020-01-01', }) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) with self.subTest('2020-01-01 between 2020-01-01 and 2020-12-31'): res = self.client.get(self.url, data={ 'appraisal_date_after': '2020-01-01', 'appraisal_date_before': '2020-12-31', }) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) with self.subTest('2020-01-01 not between 2020-01-02 and 2020-12-31'): res = self.client.get(self.url, data={ 'appraisal_date_after': '2020-01-02', 'appraisal_date_before': '2020-12-31', }) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0) with self.subTest('2020-01-01 not between 2019-01-01 and 2019-12-31'): res = self.client.get(self.url, data={ 'appraisal_date_after': '2019-01-01', 'appraisal_date_before': '2019-12-31', }) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0) with self.subTest('invalid range 2020-12-31 - 2020-01-01'): res = self.client.get(self.url, data={ 'appraisal_date_after': '2020-12-31', 'appraisal_date_before': '2020-01-01', }) self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def parse_errand(self, errand, archive, ip, structure): try: ip_id = ip.pk except AttributeError: if ip is not None: raise ip_id = None unit_reference_code = errand.xpath("*[local-name()='KlassReferens']")[0].text try: structure_unit = StructureUnit.objects.get(structure=structure, reference_code=unit_reference_code) except StructureUnit.DoesNotExist: logger.exception('Structure unit {} not found in {}'.format(unit_reference_code, structure)) raise data_mappings = { 'name': ['Arendemening', 'Rubrik', 'ArkivobjektID'], 'status': 'StatusArande', 'arendetyp': 'ArendeTyp', 'klassreferens': 'KlassReferens', 'arkivobjekt_id': 'ArkivobjektID', } data = self.parse_mappings(data_mappings, errand) try: motpart = errand.xpath("*[local-name()='Motpart']")[0] data['motpart'] = self.parse_person(motpart) except IndexError: pass data['relationer'] = [] for relation in errand.xpath("*[local-name()='ArendeRelation']"): data['relationer'].append(self.parse_relation(relation)) data['agenter'] = [] for agent in errand.xpath("*[local-name()='Agent']"): data['agenter'].append(self.parse_agent(agent)) data['restriktioner'] = [] for restriktion in errand.xpath("*[local-name()='Restriktion']"): data['restriktioner'].append(self.parse_restriction(restriktion)) data['egna_element'] = [] for egna_element in errand.xpath("*[local-name()='EgnaElement']"): data['egna_element'].append(self.parse_egna_element(egna_element)) date_mappings = { 'decision_date': 'Beslutat', 'dispatch_date': 'Expedierad', 'arrival_date': 'Inkommen', 'last_usage_date': 'SistaAnvandandetidpunkt', 'create_date': 'Skapad', 'preparation_date': 'Upprattad', 'ended_date': 'Avslutat', } dates = self.parse_mappings(date_mappings, errand) personal_identification_numbers = [] initiators = [] for initiator in errand.xpath("*/*[local-name()='EgetElement' and @Namn='Initierare']"): initiator_obj = self.parse_initiator(initiator) initiators.append(initiator_obj) try: personal_identification_numbers.append(initiator_obj['personal_identification_number']) except KeyError: pass data.update(dates) component_id = str(uuid.uuid4()) # errand.get("Systemidentifierare") reference_code = errand.xpath("*[local-name()='ArkivobjektID']")[0].text unit_ids = {'id': reference_code} return Component( _id=component_id, current_version=True, unit_ids=unit_ids, structure_unit=str(structure_unit.pk), type=u'Ärende', reference_code=reference_code, archive=str(archive.pk), ip=ip_id, task_id=str(self.task.pk), **data ), structure_unit
def get_component(self, unitid): query = Component.search().query("bool", must=self._get_node_query(unitid)) doc = query.execute().hits[0] return TagVersion.objects.get(pk=doc._id)
def create(self, validated_data): with transaction.atomic(): structure_unit = validated_data.pop('structure_unit', None) parent = validated_data.pop('parent', None) structure = validated_data.pop('structure', None) notes_data = validated_data.pop('notes', []) identifiers_data = validated_data.pop('identifiers', []) information_package = validated_data.pop('information_package', None) index = validated_data.pop('index') tag = Tag.objects.create(information_package=information_package) tag_structure = TagStructure(tag=tag) if structure_unit is not None: tag_structure.structure_unit = structure_unit tag_structure.structure = structure_unit.structure archive_structure = TagStructure.objects.filter(structure=structure_unit.structure).first().get_root() tag_structure.parent = archive_structure tag_structure.save() else: if structure is None: parent_structure = parent.get_active_structure() else: parent_structure = parent.get_structures(structure).get() tag_structure.parent = parent_structure tag_structure.structure = parent_structure.structure tag_structure.save() tag_version = TagVersion.objects.create( tag=tag, elastic_index=index, **validated_data, ) tag.current_version = tag_version tag.save() for agent_link in AgentTagLink.objects.filter(tag=tag_version): AgentTagLink.objects.create(tag=tag_version, agent=agent_link.agent, type=agent_link.type) tag_structure.refresh_from_db() if structure_unit is None: structure_unit = tag_structure.get_ancestors( include_self=True ).filter(structure_unit__isnull=False).get().structure_unit related_units = structure_unit.related_structure_units.filter( structure__is_template=False ).exclude( structure=tag_structure.structure ) for related in related_units: new_unit = related if tag_structure.structure_unit is not None else None tag_structure.copy_to_new_structure(related.structure, new_unit=new_unit) self.create_identifiers(self, identifiers_data) self.create_notes(self, notes_data) doc = Component.from_obj(tag_version) doc.save() return tag