def test_search_component(self): component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", ) Component.from_obj(component_tag_version).save(refresh='true') with self.subTest('without archive'): res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) structure_type = StructureType.objects.create() structure_template = Structure.objects.create(type=structure_type, is_template=True) archive_tag = Tag.objects.create() archive_tag_version = TagVersion.objects.create( tag=archive_tag, type=self.archive_type, elastic_index="archive", ) self.group1.add_object(archive_tag_version) structure, archive_tag_structure = structure_template.create_template_instance( archive_tag) Archive.from_obj(archive_tag_version).save(refresh='true') TagStructure.objects.create(tag=component_tag, parent=archive_tag_structure, structure=structure) Component.index_documents(remove_stale=True) with self.subTest('with archive'): res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk)) with self.subTest('with archive, non-active organization'): self.user.user_profile.current_organization = self.group2 self.user.user_profile.save() res = self.client.get(self.url) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 0)
def update(self, instance: TagVersion, validated_data): structures = validated_data.pop('structures', []) notes_data = validated_data.pop('notes', None) identifiers_data = validated_data.pop('identifiers', None) appraisal_date = validated_data.pop('appraisal_date', instance.tag.appraisal_date) self.update_identifiers(instance, identifiers_data) self.update_notes(instance, notes_data) with transaction.atomic(): for structure in structures: if not TagStructure.objects.filter(tag=instance.tag, structure__template=structure).exists(): structure_instance, _ = structure.create_template_instance(instance.tag) for instance_unit in structure_instance.units.all(): StructureUnitDocument.from_obj(instance_unit).save() instance.tag.appraisal_date = appraisal_date instance.tag.save() TagVersion.objects.filter(pk=instance.pk).update(**validated_data) instance.refresh_from_db() doc = Archive.from_obj(instance) doc.save() return instance
def update(self, instance, validated_data): structures = validated_data.pop('structures', []) notes_data = validated_data.pop('notes', None) identifiers_data = validated_data.pop('identifiers', None) if identifiers_data is not None: NodeIdentifier.objects.filter(tag_version=instance).delete() self.create_identifiers(instance, identifiers_data) if notes_data is not None: NodeNote.objects.filter(tag_version=instance).delete() for note in notes_data: note.setdefault('create_date', timezone.now()) self.create_notes(instance, notes_data) with transaction.atomic(): for structure in structures: if not TagStructure.objects.filter(tag=instance.tag, structure__template=structure).exists(): structure_instance, _ = structure.create_template_instance(instance.tag) for instance_unit in structure_instance.units.all(): StructureUnitDocument.from_obj(instance_unit).save() TagVersion.objects.filter(pk=instance.pk).update(**validated_data) instance.refresh_from_db() doc = Archive.from_obj(instance) doc.save() return instance
def parse_arkiv(cls, el, agent, task=None, ip=None): logger.info("Parsing arkiv...") name = el.xpath("va:arkivnamn", namespaces=cls.NSMAP)[0].text tag_type = cls.ARCHIVE_TYPE start_year = el.xpath("va:tidarkivf", namespaces=cls.NSMAP)[0].text start_date = None if start_year is not None: start_date = datetime( year=int(start_year), month=1, day=1, tzinfo=pytz.UTC, ) end_year = el.xpath("va:tidarkivt", namespaces=cls.NSMAP)[0].text end_date = None if end_year is not None: end_date = datetime( year=int(end_year), month=12, day=31, tzinfo=pytz.UTC, ) tag = Tag.objects.create(information_package=ip, task=task) tag_version = TagVersion.objects.create( tag=tag, elastic_index='archive', type=tag_type, name=name, create_date=cls.parse_archive_create_date(el), revise_date=cls.parse_archive_revise_date(el), import_date=timezone.now(), start_date=start_date, end_date=end_date, ) structure = Structure.objects.create( name="Arkivförteckning för {}".format(name), type=cls.STRUCTURE_TYPE, is_template=True, published=True, published_date=timezone.now(), version='1.0', task=task, ) structure_instance, tag_structure = structure.create_template_instance(tag) for instance_unit in structure_instance.units.all(): StructureUnitDocument.from_obj(instance_unit).save() agent_tag_link = AgentTagLink.objects.create( agent=agent, tag_id=tag_version.id, type=cls.AGENT_TAG_LINK_RELATION_TYPE, ) doc = Archive.from_obj(tag_version) doc.agents = [str(agent.pk)] logger.info("Parsed arkiv: {}".format(tag_version.pk)) return doc.to_dict(include_meta=True), tag, tag_version, tag_structure, agent_tag_link
def get_archive(id): # try to get from cache first cache_key = 'archive_%s' % id cached = cache.get(cache_key) if cached: return cached archive = Archive.get(id=id) archive_data = archive.to_dict() cache.set(cache_key, archive_data) return archive_data
def test_filter_on_archive_agent(self): agent = self.create_agent() component_tag = Tag.objects.create() component_tag_version = TagVersion.objects.create( tag=component_tag, type=self.component_type, elastic_index="component", ) structure_type = StructureType.objects.create() structure_template = Structure.objects.create(type=structure_type, is_template=True) archive_tag = Tag.objects.create() archive_tag_version = TagVersion.objects.create( tag=archive_tag, type=self.archive_type, elastic_index="archive", ) structure, archive_tag_structure = structure_template.create_template_instance( archive_tag) Archive.from_obj(archive_tag_version).save(refresh='true') TagStructure.objects.create(tag=component_tag, parent=archive_tag_structure, structure=structure) AgentTagLink.objects.create( agent=agent, tag=archive_tag_version, type=AgentTagLinkRelationType.objects.create(), ) Component.from_obj(component_tag_version).save(refresh='true') res = self.client.get(self.url, {'agents': str(agent.pk)}) self.assertEqual(res.status_code, status.HTTP_200_OK) self.assertEqual(len(res.data['hits']), 1) self.assertEqual(res.data['hits'][0]['_id'], str(component_tag_version.pk))
def create(self, validated_data): with transaction.atomic(): agent = validated_data.pop('archive_creator') structures = validated_data.pop('structures') notes_data = validated_data.pop('notes', []) identifiers_data = validated_data.pop('identifiers', []) use_uuid_as_refcode = validated_data.pop('use_uuid_as_refcode', False) tag_version_id = uuid.uuid4() if use_uuid_as_refcode: validated_data['reference_code'] = str(tag_version_id) tag = Tag.objects.create() tag_version = TagVersion.objects.create( pk=tag_version_id, tag=tag, elastic_index='archive', **validated_data, ) tag.current_version = tag_version tag.save() for structure in structures: structure_instance, _ = structure.create_template_instance(tag) for instance_unit in structure_instance.units.all(): StructureUnitDocument.from_obj(instance_unit).save() org = self.context[ 'request'].user.user_profile.current_organization org.add_object(tag) org.add_object(tag_version) tag_link_type, _ = AgentTagLinkRelationType.objects.get_or_create( creator=True, defaults={'name': 'creator'}) AgentTagLink.objects.create(agent=agent, tag=tag_version, type=tag_link_type) self.create_identifiers(self, identifiers_data) self.create_notes(self, notes_data) doc = Archive.from_obj(tag_version) doc.save() return tag
def parse_archive(self, el, task=None, ip=None): name = el.xpath('ObjectParts/General/Archive.Name')[0].text orig_name = el.xpath('ObjectParts/General/ArchiveOrig.Name')[0].text create_date = self.parse_archive_create_date(el) revise_date = self.parse_archive_revise_date(el) tag_type = self.ARCHIVE_TYPE tag = Tag.objects.create(information_package=ip, task=task) archive_id = uuid.uuid4() tag_version = TagVersion.objects.create( pk=archive_id, tag=tag, reference_code=el.xpath('ObjectParts/General/Archive.RefCode')[0].text, type=tag_type, name=name, elastic_index='archive', create_date=create_date, revise_date=revise_date, import_date=timezone.now(), start_date=self.parse_archive_start_date(el), end_date=self.parse_archive_end_date(el), ) inst_code = el.xpath("ObjectParts/General/ArchiveInst.InstCode")[0].text archive_klara_id = el.xpath("ObjectParts/General/Archive.ArchiveID")[0].text NodeIdentifier.objects.create( identifier="{}/{}".format(inst_code, archive_klara_id), tag_version=tag_version, type=self.node_identifier_type_klara, ) history_note_text = el.xpath("ObjectParts/History/Archive.History")[0].text if history_note_text: NodeNote.objects.create( text=html.unescape(history_note_text), tag_version=tag_version, type=self.node_note_type_historik, create_date=timezone.now(), # TODO: use something else to get the date? revise_date=timezone.now(), # TODO: use something else to get the date? ) rule_convention_type, _ = RuleConventionType.objects.get_or_create( name=el.xpath("ObjectParts/General/ArchiveType.Name")[0].text, ) structure = Structure.objects.create( name="Arkivförteckning för {}".format(orig_name), type=self.STRUCTURE_TYPE, is_template=True, published=True, published_date=timezone.now(), version='1.0', create_date=create_date, rule_convention_type=rule_convention_type, task=task, ) structure_instance, tag_structure = structure.create_template_instance(tag) for instance_unit in structure_instance.units.all(): StructureUnitDocument.from_obj(instance_unit).save() agent_hash = self.build_agent_hash( el.xpath('ObjectParts/General/Archive.ArchiveOrigID')[0].text, orig_name, ) agent_id = cache.get(agent_hash) AgentTagLink.objects.create( agent_id=agent_id, tag=tag_version, type=self.tag_link_relation_type, ) doc = Archive.from_obj(tag_version).to_dict(include_meta=True) return doc, tag, tag_version, tag_structure, inst_code