def test_deleted_relationships(db, dummy_event): event = dummy_event assert not event.contributions assert not event.sessions s = Session(event=event, title='s') sd = Session(event=event, title='sd', is_deleted=True) c = Contribution(event=event, title='c', session=sd, duration=timedelta(minutes=30)) cd = Contribution(event=event, title='cd', session=sd, duration=timedelta(minutes=30), is_deleted=True) sc = SubContribution(contribution=c, title='sc', duration=timedelta(minutes=10)) scd = SubContribution(contribution=c, title='scd', duration=timedelta(minutes=10), is_deleted=True) db.session.flush() db.session.expire_all() # reload all the objects from the db event = Event.get(event.id) s = Session.get(s.id) sd = Session.get(sd.id) c = Contribution.get(c.id) cd = Contribution.get(cd.id) sc = SubContribution.get(sc.id) scd = SubContribution.get(scd.id) # deleted items should not be in the lists assert event.sessions == [s] assert event.contributions == [c] assert sd.contributions == [c] assert c.subcontributions == [sc] # the other direction should work fine even in case of deletion assert s.event == event assert sd.event == event assert c.event == event assert cd.event == event assert sc.contribution == c assert scd.contribution == c
def test_event_export(db, dummy_event, monkeypatch): monkeypatch.setattr('indico.modules.events.export.now_utc', lambda: as_utc(datetime(2017, 8, 24, 9, 0, 0))) f = BytesIO() dummy_event.created_dt = as_utc(datetime(2017, 8, 24, 0, 0, 0)) dummy_event.start_dt = as_utc(datetime(2017, 8, 24, 10, 0, 0)) dummy_event.end_dt = as_utc(datetime(2017, 8, 24, 12, 0, 0)) s = Session(event=dummy_event, title='sd', is_deleted=True) Contribution(event=dummy_event, title='c1', duration=timedelta(minutes=30)) Contribution(event=dummy_event, title='c2', session=s, duration=timedelta(minutes=30), is_deleted=True) db.session.flush() export_event(dummy_event, f) f.seek(0) with open(os.path.join(os.path.dirname(__file__), 'export_test_1.yaml'), 'r') as ref_file: data_yaml_content = ref_file.read() # check composition of tarfile and data.yaml content with tarfile.open(fileobj=f) as tarf: assert tarf.getnames() == ['data.yaml'] assert tarf.extractfile('data.yaml').read() == data_yaml_content
def test_modify_relationship_with_deleted(db, dummy_event): event = dummy_event c = Contribution(event=event, title='c', duration=timedelta(minutes=30)) cd = Contribution(event=event, title='cd', duration=timedelta(minutes=30), is_deleted=True) db.session.flush() assert event.contributions == [c] c2 = Contribution(title='c2', duration=timedelta(minutes=30)) # this should hard-delete c but not touch cd since it's not in the relationship event.contributions = [c2] db.session.flush() assert set(Contribution.find_all()) == {cd, c2}
def test_event_attachment_export(db, dummy_event, dummy_attachment): s = Session(event=dummy_event, title='sd', is_deleted=True) Contribution(event=dummy_event, title='c1', duration=timedelta(minutes=30)) Contribution(event=dummy_event, title='c2', session=s, duration=timedelta(minutes=30), is_deleted=True) dummy_attachment.folder.event = dummy_event dummy_attachment.folder.linked_event = dummy_event dummy_attachment.folder.link_type = LinkType.event dummy_attachment.file.save(BytesIO(b'hello world')) db.session.flush() f = BytesIO() export_event(dummy_event, f) f.seek(0) with tarfile.open(fileobj=f) as tarf: data_file = tarf.extractfile('data.yaml') data = yaml.unsafe_load(data_file) objs = data['objects'] event_uid = objs[0][1]['id'][1] # check that the exported metadata contains all the right objects assert [obj[0] for obj in objs] == [ u'events.events', u'events.sessions', u'events.contributions', u'events.contributions', u'attachments.folders', u'attachments.attachments', u'attachments.files' ] # check that the attached file's metadata is included assert objs[5][1]['title'] == 'dummy_attachment' assert objs[5][1]['folder_id'] is not None assert objs[4][1]['title'] == 'dummy_folder' assert objs[4][1]['linked_event_id'][1] == event_uid file_ = objs[6][1]['__file__'][1] assert file_['filename'] == 'dummy_file.txt' assert file_['content_type'] == 'text/plain' assert file_['size'] == 11 assert file_['md5'] == '5eb63bbbe01eeed093cb22bb8f5acdc3' # check that the file itself was included (and verify content) assert tarf.getnames() == [ '00000000-0000-4000-8000-000000000013', 'data.yaml' ] assert tarf.extractfile( '00000000-0000-4000-8000-000000000013').read() == 'hello world'
def _create_new_contribution(self, event, old_contrib, preserve_session=True, excluded_attrs=None): attrs = (get_simple_column_attrs(Contribution) | {'own_room', 'own_venue'}) - {'abstract_id'} if excluded_attrs is not None: attrs -= excluded_attrs new_contrib = Contribution() new_contrib.populate_from_attrs(old_contrib, attrs) new_contrib.subcontributions = list( self._clone_subcontribs(old_contrib.subcontributions)) new_contrib.acl_entries = clone_principals(ContributionPrincipal, old_contrib.acl_entries, self._event_role_map) new_contrib.references = list( self._clone_references(ContributionReference, old_contrib.references)) new_contrib.person_links = list( self._clone_person_links(ContributionPersonLink, old_contrib.person_links)) new_contrib.field_values = list( self._clone_fields(old_contrib.field_values)) if old_contrib.type is not None: new_contrib.type = self._contrib_type_map[old_contrib.type] if preserve_session: if old_contrib.session is not None: new_contrib.session = self._session_map[old_contrib.session] if old_contrib.session_block is not None: new_contrib.session_block = self._session_block_map[ old_contrib.session_block] event.contributions.append(new_contrib) return new_contrib
def _process(self): defaults = self._get_form_defaults(location_parent=self.session_block) form = ContributionEntryForm(obj=defaults, to_schedule=True, **self._get_form_params()) if form.validate_on_submit(): contrib = Contribution() with track_time_changes(auto_extend=True, user=session.user) as changes: with flash_if_unregistered(self.event_new, lambda: contrib.person_links): contrib = create_contribution( self.event_new, form.data, session_block=self.session_block, extend_parent=True) entry = contrib.timetable_entry notifications = get_time_changes_notifications( changes, tzinfo=self.event_new.tzinfo, entry=entry) return jsonify_data( entries=[serialize_entry_update(entry, session_=self.session)], notifications=notifications) self.commit = False return jsonify_template('events/contributions/forms/contribution.html', form=form, fields=form._display_fields)
def _clone_contribs(self, new_event): attrs = (get_simple_column_attrs(Contribution) | {'own_room', 'own_venue'}) - {'abstract_id'} query = (Contribution.query.with_parent(self.old_event) .options(undefer('_last_friendly_subcontribution_id'), joinedload('own_venue'), joinedload('own_room').lazyload('*'), joinedload('session'), joinedload('session_block').lazyload('session'), joinedload('type'), subqueryload('acl_entries'), subqueryload('subcontributions').joinedload('references'), subqueryload('references'), subqueryload('person_links'), subqueryload('field_values'))) for old_contrib in query: contrib = Contribution() contrib.populate_from_attrs(old_contrib, attrs) contrib.subcontributions = list(self._clone_subcontribs(old_contrib.subcontributions)) contrib.acl_entries = clone_principals(ContributionPrincipal, old_contrib.acl_entries) contrib.references = list(self._clone_references(ContributionReference, old_contrib.references)) contrib.person_links = list(self._clone_person_links(ContributionPersonLink, old_contrib.person_links)) contrib.field_values = list(self._clone_fields(old_contrib.field_values)) if old_contrib.type is not None: contrib.type = self._contrib_type_map[old_contrib.type] if old_contrib.session is not None: contrib.session = self._session_map[old_contrib.session] if old_contrib.session_block is not None: contrib.session_block = self._session_block_map[old_contrib.session_block] new_event.contributions.append(contrib) self._contrib_map[old_contrib] = contrib