Exemplo n.º 1
0
def conference_room_emails():
    start_date, end_date = _get_start_end_date()
    date_filter = db.and_(
        db.cast(Reservation.start_dt, db.Date) >= start_date,
        db.cast(Reservation.start_dt, db.Date) <= end_date)
    start_dt = as_utc(datetime.combine(start_date, time()))
    end_dt = as_utc(datetime.combine(end_date, time()))
    events_by_room = {}
    for room in CERNCronjobsPlugin.settings.get('rooms'):
        query = (Event.query.filter(~Event.is_deleted,
                                    Event.happens_between(start_dt, end_dt),
                                    Event.own_room_id == room.id).order_by(
                                        Event.start_dt))
        events_by_room[room] = _group_by_date(query)

    res_events_by_room = {}
    for room in CERNCronjobsPlugin.settings.get('reservation_rooms'):
        res_events_by_room[room] = _group_by_date(
            _get_reservations_query(date_filter, room_id=room.id))

    category_ids = [
        int(category['id'])
        for category in CERNCronjobsPlugin.settings.get('categories')
    ]
    committees = _get_category_events_query(start_dt, end_dt, category_ids)

    template = get_plugin_template_module(
        'conference_room_email.html',
        events_by_room=events_by_room,
        res_events_by_room=res_events_by_room,
        committees_by_date=_group_by_date(committees))
    recipients = CERNCronjobsPlugin.settings.get('conf_room_recipients')
    if recipients:
        _send_email(recipients, template)
Exemplo n.º 2
0
def test_import_contributions_changes(db, dummy_event, dummy_user):
    original_start_dt = as_utc(datetime(2017, 11, 27, 8, 0, 0))
    original_end_dt = as_utc(datetime(2017, 11, 27, 12, 0, 0))
    dummy_event.start_dt = original_start_dt
    dummy_event.end_dt = original_end_dt

    # Change of end time
    csv = b'\n'.join([b'2017-11-27T08:00,10,First contribution,,,,',
                      b'2017-11-27T08:10:00,10,Second contribution,John,Doe,ACME Inc.,[email protected]',
                      b'2017-11-27T11:30,60,Third contribution,Guinea Albert,Pig,,[email protected]'])

    contributions, changes = import_contributions_from_csv(dummy_event, BytesIO(csv))
    new_end_dt = as_utc(datetime(2017, 11, 27, 12, 30, 0))
    assert dummy_event.end_dt == new_end_dt
    assert changes == {
        'duration': [(timedelta(hours=4), timedelta(hours=4, minutes=30))],
        'end_dt': [(original_end_dt, new_end_dt)]
    }

    # reset date/time
    dummy_event.start_dt = original_start_dt
    dummy_event.end_dt = original_end_dt

    # Change of start/end date
    csv = b'\n'.join([b'2017-11-26T08:00,10,First contribution,,,,',
                      b'2017-11-27T08:10:00,10,Second contribution,John,Doe,ACME Inc.,[email protected]',
                      b'2017-11-28T11:30,60,Third contribution,Guinea Albert,Pig,,[email protected]'])

    contributions, changes = import_contributions_from_csv(dummy_event, BytesIO(csv))
    new_start_dt = as_utc(datetime(2017, 11, 26, 8, 0, 0))
    new_end_dt = as_utc(datetime(2017, 11, 28, 12, 30, 0))
    assert dummy_event.start_dt == new_start_dt
    assert dummy_event.end_dt == new_end_dt
    assert len(changes) == 3
Exemplo n.º 3
0
    def _process(self):
        form = RequestListFilterForm(request.args, csrf_enabled=False)
        results = None
        if request.args and form.validate():
            reverse = form.direction.data == 'desc'
            talks = form.granularity.data == 'talks'
            from_dt = as_utc(get_day_start(
                form.start_date.data)) if form.start_date.data else None
            to_dt = as_utc(get_day_end(
                form.end_date.data)) if form.end_date.data else None
            states = {form.state.data} if form.state.data is not None else None
            results = find_requests(talks=talks,
                                    from_dt=from_dt,
                                    to_dt=to_dt,
                                    states=states)
            if not talks:
                results = [(req, req.event, req.event.start_dt)
                           for req in results]
            results = group_list(results,
                                 lambda x: x[2].date(),
                                 itemgetter(2),
                                 sort_reverse=reverse)
            results = OrderedDict(
                sorted(results.viewitems(), key=itemgetter(0),
                       reverse=reverse))

        return WPAudiovisualManagers.render_template('request_list.html',
                                                     form=form,
                                                     results=results)
Exemplo n.º 4
0
 def _process(self):
     form = RequestListFilterForm(request.args, csrf_enabled=False)
     results = None
     if form.validate_on_submit():
         reverse = form.direction.data == 'desc'
         from_dt = as_utc(get_day_start(
             form.start_date.data)) if form.start_date.data else None
         to_dt = as_utc(get_day_end(
             form.end_date.data)) if form.end_date.data else None
         results = find_requests(from_dt=from_dt, to_dt=to_dt)
         results = [(req, req.event, req.event.start_dt, contribs,
                     session_blocks)
                    for req, contribs, session_blocks in results]
         results = group_list(results,
                              lambda x: x[2].date(),
                              itemgetter(2),
                              sort_reverse=reverse)
         results = OrderedDict(
             sorted(results.viewitems(), key=itemgetter(0),
                    reverse=reverse))
     return WPVCAssistance.render_template(
         'request_list.html',
         form=form,
         results=results,
         action=url_for('.request_list'),
         vc_capable_rooms=get_vc_capable_rooms(),
         within_working_hours=start_time_within_working_hours)
Exemplo n.º 5
0
def test_import_contributions_changes(db, dummy_event, dummy_user):
    original_start_dt = as_utc(datetime(2017, 11, 27, 8, 0, 0))
    original_end_dt = as_utc(datetime(2017, 11, 27, 12, 0, 0))
    dummy_event.start_dt = original_start_dt
    dummy_event.end_dt = original_end_dt

    # Change of end time
    csv = b'\n'.join([b'2017-11-27T08:00,10,First contribution,,,,',
                      b'2017-11-27T08:10:00,10,Second contribution,John,Doe,ACME Inc.,[email protected]',
                      b'2017-11-27T11:30,60,Third contribution,Guinea Albert,Pig,,[email protected]'])

    contributions, changes = import_contributions_from_csv(dummy_event, BytesIO(csv))
    new_end_dt = as_utc(datetime(2017, 11, 27, 12, 30, 0))
    assert dummy_event.end_dt == new_end_dt
    assert changes == {
        'duration': [(timedelta(hours=4), timedelta(hours=4, minutes=30))],
        'end_dt': [(original_end_dt, new_end_dt)]
    }

    # reset date/time
    dummy_event.start_dt = original_start_dt
    dummy_event.end_dt = original_end_dt

    # Change of start/end date
    csv = b'\n'.join([b'2017-11-26T08:00,10,First contribution,,,,',
                      b'2017-11-27T08:10:00,10,Second contribution,John,Doe,ACME Inc.,[email protected]',
                      b'2017-11-28T11:30,60,Third contribution,Guinea Albert,Pig,,[email protected]'])

    contributions, changes = import_contributions_from_csv(dummy_event, BytesIO(csv))
    new_start_dt = as_utc(datetime(2017, 11, 26, 8, 0, 0))
    new_end_dt = as_utc(datetime(2017, 11, 28, 12, 30, 0))
    assert dummy_event.start_dt == new_start_dt
    assert dummy_event.end_dt == new_end_dt
    assert len(changes) == 3
Exemplo n.º 6
0
def test_event_export(db, dummy_event, monkeypatch):
    monkeypatch.setattr('indico.modules.events.export.now_utc',
                        lambda: as_utc(datetime(2017, 8, 24, 9, 0, 0)))

    f = BytesIO()
    dummy_event.created_dt = as_utc(datetime(2017, 8, 24, 0, 0, 0))
    dummy_event.start_dt = as_utc(datetime(2017, 8, 24, 10, 0, 0))
    dummy_event.end_dt = as_utc(datetime(2017, 8, 24, 12, 0, 0))

    s = Session(event=dummy_event, title='sd', is_deleted=True)
    Contribution(event=dummy_event, title='c1', duration=timedelta(minutes=30))
    Contribution(event=dummy_event,
                 title='c2',
                 session=s,
                 duration=timedelta(minutes=30),
                 is_deleted=True)
    db.session.flush()
    export_event(dummy_event, f)
    f.seek(0)

    with open(os.path.join(os.path.dirname(__file__), 'export_test_1.yaml'),
              'r') as ref_file:
        data_yaml_content = ref_file.read()

    # check composition of tarfile and data.yaml content
    with tarfile.open(fileobj=f) as tarf:
        assert tarf.getnames() == ['data.yaml']
        assert tarf.extractfile('data.yaml').read() == data_yaml_content
Exemplo n.º 7
0
 def __call__(self, form, field):
     if field.data is None:
         return
     linked_field = form[self.linked_field]
     if linked_field.data is None:
         return
     linked_field_dt = as_utc(linked_field.data)
     field_dt = as_utc(field.data)
     if self.not_before and field_dt < linked_field_dt:
         raise ValidationError(_("{} can't be before than {}").format(field.label, linked_field.label))
     if self.not_after and field_dt > linked_field_dt:
         raise ValidationError(_("{} can't be after than {}").format(field.label, linked_field.label))
Exemplo n.º 8
0
 def _process(self):
     form = RequestListFilterForm(request.args)
     results = None
     if form.validate_on_submit():
         reverse = form.direction.data == 'desc'
         from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None
         to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None
         results = _find_requests(from_dt=from_dt, to_dt=to_dt)
         results = group_list(results, lambda req: dateutil.parser.parse(req['requested_at']).date(),
                              sort_reverse=reverse)
         results = OrderedDict(sorted(results.viewitems(), reverse=reverse))
     return WPRoomAssistance.render_template('request_list.html', form=form, results=results,
                                             parse_dt=dateutil.parser.parse)
Exemplo n.º 9
0
def get_room_events(room, start_dt, end_dt, repeat_frequency, repeat_interval):
    occurrences = ReservationOccurrence.create_series(start_dt, end_dt, (repeat_frequency, repeat_interval))
    excluded_categories = rb_settings.get('excluded_categories')
    return (Event.query
            .filter(~Event.is_deleted,
                    Event.own_room == room,
                    db.or_(Event.happens_between(as_utc(occ.start_dt), as_utc(occ.end_dt)) for occ in occurrences),
                    Event.timezone == config.DEFAULT_TIMEZONE,
                    db.and_(Event.category_id != cat['id'] for cat in excluded_categories),
                    Event.acl_entries.any(db.and_(EventPrincipal.type == PrincipalType.user,
                                                  EventPrincipal.user_id == session.user.id,
                                                  EventPrincipal.full_access)))
            .all())
Exemplo n.º 10
0
def seminar_emails():
    start_date, end_date = _get_start_end_date()
    start_dt = as_utc(datetime.combine(start_date, time()))
    end_dt = as_utc(datetime.combine(end_date, time()))
    seminar_categories = CERNCronjobsPlugin.settings.get('seminar_categories')
    if not seminar_categories:
        return
    category_ids = [int(category['id']) for category in seminar_categories]
    query = _get_category_events_query(start_dt, end_dt, category_ids)
    template = get_plugin_template_module('seminar_emails.html', events_by_date=_group_by_date(query))
    recipients = CERNCronjobsPlugin.settings.get('seminar_recipients')
    if recipients:
        _send_email(recipients, template)
Exemplo n.º 11
0
def get_room_events(room, start_dt, end_dt, repeat_frequency, repeat_interval):
    occurrences = ReservationOccurrence.create_series(
        start_dt, end_dt, (repeat_frequency, repeat_interval))
    excluded_categories = rb_settings.get('excluded_categories')
    return (Event.query.filter(
        ~Event.is_deleted, Event.own_room == room,
        db.or_(
            Event.happens_between(as_utc(occ.start_dt), as_utc(occ.end_dt))
            for occ in occurrences), Event.timezone == config.DEFAULT_TIMEZONE,
        db.and_(Event.category_id != cat['id'] for cat in excluded_categories),
        Event.acl_entries.any(
            db.and_(EventPrincipal.type == PrincipalType.user,
                    EventPrincipal.user_id == session.user.id,
                    EventPrincipal.full_access))).all())
Exemplo n.º 12
0
 def __call__(self, form, field):
     if field.data is None:
         return
     linked_field = form[self.linked_field]
     if linked_field.data is None:
         return
     linked_field_dt = as_utc(linked_field.data)
     field_dt = as_utc(field.data)
     if self.not_before and field_dt < linked_field_dt:
         raise ValidationError(_("{} can't be before {}").format(field.label, linked_field.label))
     if self.not_after and field_dt > linked_field_dt:
         raise ValidationError(_("{} can't be after {}").format(field.label, linked_field.label))
     if self.not_equal and field_dt == linked_field_dt:
         raise ValidationError(_("{} can't be equal to {}").format(field.label, linked_field.label))
Exemplo n.º 13
0
 def _process(self):
     form = VCRoomListFilterForm(request.args, csrf_enabled=False)
     results = None
     if request.args and form.validate():
         reverse = form.direction.data == 'desc'
         from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None
         to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None
         results = find_event_vc_rooms(from_dt=from_dt, to_dt=to_dt, distinct=True)
         results = group_list((r for r in results if r.event_new),
                              key=lambda r: r.event_new.start_dt.date(),
                              sort_by=lambda r: r.event_new.start_dt,
                              sort_reverse=reverse)
         results = OrderedDict(sorted(results.viewitems(), key=itemgetter(0), reverse=reverse))
     return WPVCService.render_template('vc_room_list.html', form=form, results=results,
                                        action=url_for('.vc_room_list'))
Exemplo n.º 14
0
 def _process(self):
     form = VCRoomListFilterForm(request.args)
     results = None
     if request.args and form.validate():
         reverse = form.direction.data == 'desc'
         from_dt = as_utc(get_day_start(form.start_date.data)) if form.start_date.data else None
         to_dt = as_utc(get_day_end(form.end_date.data)) if form.end_date.data else None
         results = find_event_vc_rooms(from_dt=from_dt, to_dt=to_dt, distinct=True)
         results = group_list((r for r in results if r.event),
                              key=lambda r: r.event.getStartDate().date(),
                              sort_by=lambda r: r.event.getStartDate(),
                              sort_reverse=reverse)
         results = OrderedDict(sorted(results.viewitems(), key=itemgetter(0), reverse=reverse))
     return WPVCService.render_template('vc_room_list.html', form=form, results=results,
                                        action=url_for('.vc_room_list'))
Exemplo n.º 15
0
 def get_latest(self, form, field):
     latest = self.latest(form, field) if callable(
         self.latest) else self.latest
     if latest == 'now':
         self.latest_now = True
         return now_utc().replace(second=59, microsecond=999)
     return as_utc(latest) if latest else latest
Exemplo n.º 16
0
 def get_earliest(self, form, field):
     earliest = self.earliest(form, field) if callable(
         self.earliest) else self.earliest
     if earliest == 'now':
         self.earliest_now = True
         return now_utc().replace(second=0, microsecond=0)
     return as_utc(earliest) if earliest else earliest
Exemplo n.º 17
0
 def __call__(self, form, field):
     if field.data is None:
         return
     field_dt = as_utc(field.data)
     earliest_dt = self.get_earliest(form, field)
     latest_dt = self.get_latest(form, field)
     if field_dt != field.object_data:
         if earliest_dt and field_dt < earliest_dt:
             if self.earliest_now:
                 msg = _("'{}' can't be in the past ({})").format(
                     field.label, field.timezone)
             else:
                 dt = format_datetime(earliest_dt, timezone=field.timezone)
                 msg = _("'{}' can't be before {} ({})").format(
                     field.label, dt, field.timezone)
             raise ValidationError(msg)
         if latest_dt and field_dt > latest_dt:
             if self.latest_now:
                 msg = _("'{}' can't be in the future ({})").format(
                     field.label, field.timezone)
             else:
                 dt = format_datetime(latest_dt, timezone=field.timezone)
                 msg = _("'{}' can't be after {} ({})").format(
                     field.label, dt, field.timezone)
             raise ValidationError(msg)
Exemplo n.º 18
0
 def get_earliest(self, form, field):
     earliest = self.earliest(form, field) if callable(
         self.earliest) else self.earliest
     if earliest == 'now':
         self.earliest_now = True
         return now_utc().replace(second=0, microsecond=0)
     return as_utc(earliest) if earliest else earliest
Exemplo n.º 19
0
 def get_latest(self, form, field):
     latest = self.latest(form, field) if callable(
         self.latest) else self.latest
     if latest == 'now':
         self.latest_now = True
         return now_utc().replace(second=59, microsecond=999)
     return as_utc(latest) if latest else latest
Exemplo n.º 20
0
 def __call__(self, form, field):
     if field.data is None:
         return
     field_dt = as_utc(field.data)
     earliest_dt = self.get_earliest(form, field)
     latest_dt = self.get_latest(form, field)
     if field_dt != field.object_data:
         if earliest_dt and field_dt < earliest_dt:
             if self.earliest_now:
                 msg = _("'{}' can't be in the past ({})").format(
                     field.label, field.timezone)
             else:
                 dt = format_datetime(earliest_dt, timezone=field.timezone)
                 msg = _("'{}' can't be before {} ({})").format(
                     field.label, dt, field.timezone)
             raise ValidationError(msg)
         if latest_dt and field_dt > latest_dt:
             if self.latest_now:
                 msg = _("'{}' can't be in the future ({})").format(
                     field.label, field.timezone)
             else:
                 dt = format_datetime(latest_dt, timezone=field.timezone)
                 msg = _("'{}' can't be after {} ({})").format(
                     field.label, dt, field.timezone)
             raise ValidationError(msg)
Exemplo n.º 21
0
 def __init__(self, result_id, title, location, start_date, materials, authors, description):
     self.id = result_id
     self.title = title
     self.location = location
     self._start_date = as_utc(start_date) if start_date else None
     self.materials = materials
     self.authors = authors
     self.description = description
Exemplo n.º 22
0
 def __init__(self, result_id, title, location, start_date, materials, authors, description):
     self.id = result_id
     self.title = title
     self.location = location
     self._start_date = as_utc(start_date) if start_date else None
     self.materials = materials
     self.authors = authors
     self.description = description
Exemplo n.º 23
0
def get_matching_events(start_dt, end_dt, repeat_frequency, repeat_interval):
    """Get events suitable for booking linking.

    This finds events that overlap with an occurrence of a booking
    with the given dates where the user is a manager.
    """
    occurrences = ReservationOccurrence.create_series(start_dt, end_dt, (repeat_frequency, repeat_interval))
    excluded_categories = rb_settings.get('excluded_categories')
    return (Event.query
            .filter(~Event.is_deleted,
                    ~Event.room_reservation_links.any(ReservationLink.reservation.has(Reservation.is_accepted)),
                    db.or_(Event.happens_between(as_utc(occ.start_dt), as_utc(occ.end_dt)) for occ in occurrences),
                    Event.timezone == config.DEFAULT_TIMEZONE,
                    db.and_(Event.category_id != cat.id for cat in excluded_categories),
                    Event.acl_entries.any(db.and_(EventPrincipal.type == PrincipalType.user,
                                                  EventPrincipal.user_id == session.user.id,
                                                  EventPrincipal.full_access)))
            .all())
Exemplo n.º 24
0
def test_import_contributions_errors(db, dummy_event):
    original_start_dt = as_utc(datetime(2017, 11, 27, 8, 0, 0))
    original_end_dt = as_utc(datetime(2017, 11, 27, 12, 0, 0))
    dummy_event.start_dt = original_start_dt
    dummy_event.end_dt = original_end_dt

    e = _check_importer_exception(dummy_event, b',,Test,,,,,')
    assert 'malformed' in e.message
    assert 'Row 1' in e.message

    e = _check_importer_exception(dummy_event, b',,,,,,')
    assert 'title' in e.message

    e = _check_importer_exception(dummy_event, b'2010-23-02T00:00:00,,Test,,,,')
    assert 'parse date' in e.message

    e = _check_importer_exception(dummy_event, b'2010-02-23T00:00:00,15min,Test,,,,')
    assert 'parse duration' in e.message
Exemplo n.º 25
0
def test_import_contributions_errors(db, dummy_event):
    original_start_dt = as_utc(datetime(2017, 11, 27, 8, 0, 0))
    original_end_dt = as_utc(datetime(2017, 11, 27, 12, 0, 0))
    dummy_event.start_dt = original_start_dt
    dummy_event.end_dt = original_end_dt

    e = _check_importer_exception(dummy_event, b',,Test,,,,,')
    assert 'malformed' in e.message
    assert 'Row 1' in e.message

    e = _check_importer_exception(dummy_event, b',,,,,,')
    assert 'title' in e.message

    e = _check_importer_exception(dummy_event, b'2010-23-02T00:00:00,,Test,,,,')
    assert 'parse date' in e.message

    e = _check_importer_exception(dummy_event, b'2010-02-23T00:00:00,15min,Test,,,,')
    assert 'parse duration' in e.message
Exemplo n.º 26
0
def test_import_contributions(dummy_event, dummy_user):
    dummy_event.start_dt = as_utc(datetime(2017, 11, 27, 8, 0, 0))
    dummy_event.end_dt = as_utc(datetime(2017, 11, 27, 12, 0, 0))

    csv = b'\n'.join([
        b'2017-11-27T08:00,10,First contribution,,,,',
        b',,Second contribution,John,Doe,ACME Inc.,[email protected]',
        b'2017-11-27T08:30,15,Third contribution,Guinea Albert,Pig,,[email protected]'
    ])

    contributions, changes = import_contributions_from_csv(
        dummy_event, BytesIO(csv))
    assert len(contributions) == 3

    assert contributions[0].start_dt == dummy_event.start_dt
    assert contributions[0].duration == timedelta(minutes=10)
    assert contributions[0].title == 'First contribution'
    assert len(contributions[0].speakers) == 0

    assert contributions[1].start_dt is None
    assert contributions[1].duration == timedelta(minutes=20)
    assert contributions[1].title == 'Second contribution'
    speakers = contributions[1].speakers
    assert len(speakers) == 1
    assert speakers[0].full_name == 'John Doe'
    assert speakers[0].affiliation == 'ACME Inc.'
    assert speakers[0].email == '*****@*****.**'

    assert contributions[2].start_dt == as_utc(datetime(
        2017, 11, 27, 8, 30, 0))
    assert contributions[2].duration == timedelta(minutes=15)
    assert contributions[2].title == 'Third contribution'
    speakers = contributions[2].speakers
    assert len(speakers) == 1
    # name comes from PersonLink, not user
    assert speakers[0].full_name == 'Guinea Albert Pig'
    assert not speakers[0].affiliation
    assert speakers[0].email == '*****@*****.**'
    assert speakers[0].person.user == dummy_user

    assert not changes
Exemplo n.º 27
0
 def _move_entry(self, data):
     rv = {}
     if data.get('parent_id'):
         rv['old'] = self.serializer.serialize_timetable_entry(self.entry)
         parent_timetable_entry = self.event.timetable_entries.filter_by(id=data['parent_id']).one()
         move_timetable_entry(self.entry, parent=parent_timetable_entry)
         rv['session'] = rv['slotEntry'] = self.serializer.serialize_session_block_entry(parent_timetable_entry)
     elif data.get('day'):
         rv['old'] = self.serializer.serialize_timetable_entry(self.entry)
         new_date = as_utc(dateutil.parser.parse(data['day']))
         move_timetable_entry(self.entry, day=new_date)
     return rv
Exemplo n.º 28
0
 def _move_entry(self, data):
     rv = {}
     if data.get('parent_id'):
         rv['old'] = self.serializer.serialize_timetable_entry(self.entry)
         parent_timetable_entry = self.event_new.timetable_entries.filter_by(id=data['parent_id']).one()
         move_timetable_entry(self.entry, parent=parent_timetable_entry)
         rv['session'] = rv['slotEntry'] = self.serializer.serialize_session_block_entry(parent_timetable_entry)
     elif data.get('day'):
         rv['old'] = self.serializer.serialize_timetable_entry(self.entry)
         new_date = as_utc(dateutil.parser.parse(data['day']))
         move_timetable_entry(self.entry, day=new_date)
     return rv
Exemplo n.º 29
0
def test_event_export(db, dummy_event, monkeypatch):
    monkeypatch.setattr('indico.modules.events.export.now_utc', lambda: as_utc(datetime(2017, 8, 24, 9, 0, 0)))

    f = BytesIO()
    dummy_event.created_dt = as_utc(datetime(2017, 8, 24, 0, 0, 0))
    dummy_event.start_dt = as_utc(datetime(2017, 8, 24, 10, 0, 0))
    dummy_event.end_dt = as_utc(datetime(2017, 8, 24, 12, 0, 0))

    s = Session(event=dummy_event, title='sd', is_deleted=True)
    Contribution(event=dummy_event, title='c1', duration=timedelta(minutes=30))
    Contribution(event=dummy_event, title='c2', session=s, duration=timedelta(minutes=30), is_deleted=True)
    db.session.flush()
    export_event(dummy_event, f)
    f.seek(0)

    with open(os.path.join(os.path.dirname(__file__), 'export_test_1.yaml'), 'r') as ref_file:
        data_yaml_content = ref_file.read()

    # check composition of tarfile and data.yaml content
    with tarfile.open(fileobj=f) as tarf:
        assert tarf.getnames() == ['data.yaml']
        assert tarf.extractfile('data.yaml').read() == data_yaml_content
Exemplo n.º 30
0
def test_event_import(db, dummy_user):
    with open(os.path.join(os.path.dirname(__file__), 'export_test_2.yaml'),
              'r') as ref_file:
        data_yaml_content = ref_file.read().replace('{version}',
                                                    indico.__version__)

    data_yaml = BytesIO(data_yaml_content.encode('utf-8'))
    tar_buffer = BytesIO()

    # User should be matched by e-mail
    dummy_user.email = '*****@*****.**'
    db.session.flush()

    # create a tar file artificially, using the provided YAML
    with tarfile.open(mode='w', fileobj=tar_buffer) as tarf:
        tar_info = tarfile.TarInfo('data.yaml')
        tar_info.size = len(data_yaml_content)
        tarf.addfile(tar_info, data_yaml)
        tar_info = tarfile.TarInfo('00000000-0000-4000-8000-00000000001c')
        tar_info.size = 11
        tarf.addfile(tar_info, BytesIO(b'hello world'))

    tar_buffer.seek(0)
    e = import_event(tar_buffer, create_users=False)
    # Check that event metadata is fine
    assert e.title == 'dummy#0'
    assert e.creator == dummy_user
    assert e.created_dt == as_utc(datetime(2017, 8, 24, 15, 28, 42, 652626))
    assert e.start_dt == as_utc(datetime(2017, 8, 24, 10, 0, 0))
    assert e.end_dt == as_utc(datetime(2017, 8, 24, 12, 0, 0))
    # Check that attachment metadata is fine
    assert get_attached_items(e)['files'] == []
    folder = get_attached_items(e)['folders'][0]
    assert folder.title == 'dummy_folder'
    attachment = folder.attachments[0]
    assert attachment.title == 'dummy_attachment'
    # Check that the actual file is accessible
    assert attachment.file.open().read() == 'hello world'
Exemplo n.º 31
0
def test_import_contributions(dummy_event, dummy_user):
    dummy_event.start_dt = as_utc(datetime(2017, 11, 27, 8, 0, 0))
    dummy_event.end_dt = as_utc(datetime(2017, 11, 27, 12, 0, 0))

    csv = b'\n'.join([b'2017-11-27T08:00,10,First contribution,,,,',
                      b',,Second contribution,John,Doe,ACME Inc.,[email protected]',
                      b'2017-11-27T08:30,15,Third contribution,Guinea Albert,Pig,,[email protected]'])

    contributions, changes = import_contributions_from_csv(dummy_event, BytesIO(csv))
    assert len(contributions) == 3

    assert contributions[0].start_dt == dummy_event.start_dt
    assert contributions[0].duration == timedelta(minutes=10)
    assert contributions[0].title == 'First contribution'
    assert len(contributions[0].speakers) == 0

    assert contributions[1].start_dt is None
    assert contributions[1].duration == timedelta(minutes=20)
    assert contributions[1].title == 'Second contribution'
    speakers = contributions[1].speakers
    assert len(speakers) == 1
    assert speakers[0].full_name == 'John Doe'
    assert speakers[0].affiliation == 'ACME Inc.'
    assert speakers[0].email == '*****@*****.**'

    assert contributions[2].start_dt == as_utc(datetime(2017, 11, 27, 8, 30, 0))
    assert contributions[2].duration == timedelta(minutes=15)
    assert contributions[2].title == 'Third contribution'
    speakers = contributions[2].speakers
    assert len(speakers) == 1
    # name comes from PersonLink, not user
    assert speakers[0].full_name == 'Guinea Albert Pig'
    assert not speakers[0].affiliation
    assert speakers[0].email == '*****@*****.**'
    assert speakers[0].person.user == dummy_user

    assert not changes
Exemplo n.º 32
0
    def migrate_blockings(self):
        state_map = {
            None: BlockedRoom.State.pending,
            False: BlockedRoom.State.rejected,
            True: BlockedRoom.State.accepted
        }

        print cformat('%{white!}migrating blockings')
        for old_blocking_id, old_blocking in self.rb_root['RoomBlocking'][
                'Blockings'].iteritems():
            b = Blocking(id=old_blocking.id,
                         created_by_id=self.merged_avatars.get(
                             old_blocking._createdBy, old_blocking._createdBy),
                         created_dt=as_utc(old_blocking._utcCreatedDT),
                         start_date=old_blocking.startDate,
                         end_date=old_blocking.endDate,
                         reason=convert_to_unicode(old_blocking.message))

            print cformat(u'- %{cyan}{}').format(b.reason)
            for old_blocked_room in old_blocking.blockedRooms:
                br = BlockedRoom(
                    state=state_map[old_blocked_room.active],
                    rejected_by=old_blocked_room.rejectedBy,
                    rejection_reason=convert_to_unicode(
                        old_blocked_room.rejectionReason),
                )
                room = Room.get(get_room_id(old_blocked_room.roomGUID))
                room.blocked_rooms.append(br)
                b.blocked_rooms.append(br)
                print cformat(u'  %{blue!}Room:%{reset} {} ({})').format(
                    room.full_name,
                    BlockedRoom.State(br.state).title)

            for old_principal in old_blocking.allowed:
                principal_id = old_principal._id
                if old_principal._type == 'Avatar':
                    principal_id = int(
                        self.merged_avatars.get(old_principal._id,
                                                old_principal._id))
                    principal_type = 'User'
                else:
                    principal_type = 'Group'
                bp = BlockingPrincipal(
                    _principal=[principal_type, principal_id])
                b._allowed.add(bp)
                print cformat(u'  %{blue!}Allowed:%{reset} {}({})').format(
                    bp.entity_type, bp.entity_id)
            db.session.add(b)
        db.session.commit()
Exemplo n.º 33
0
def test_event_import(db, dummy_user):
    with open(os.path.join(os.path.dirname(__file__), 'export_test_2.yaml'), 'r') as ref_file:
        data_yaml_content = ref_file.read()

    data_yaml = BytesIO(data_yaml_content.encode('utf-8'))
    tar_buffer = BytesIO()

    # User should be matched by e-mail
    dummy_user.email = '*****@*****.**'
    db.session.flush()

    # create a tar file artificially, using the provided YAML
    with tarfile.open(mode='w', fileobj=tar_buffer) as tarf:
        tar_info = tarfile.TarInfo('data.yaml')
        tar_info.size = len(data_yaml_content)
        tarf.addfile(tar_info, data_yaml)
        tar_info = tarfile.TarInfo('00000000-0000-4000-8000-00000000001c')
        tar_info.size = 11
        tarf.addfile(tar_info, BytesIO(b'hello world'))

    tar_buffer.seek(0)
    e = import_event(tar_buffer, create_users=False)
    # Check that event metadata is fine
    assert e.title == 'dummy#0'
    assert e.creator == dummy_user
    assert e.created_dt == as_utc(datetime(2017, 8, 24, 15, 28, 42, 652626))
    assert e.start_dt == as_utc(datetime(2017, 8, 24, 10, 0, 0))
    assert e.end_dt == as_utc(datetime(2017, 8, 24, 12, 0, 0))
    # Check that attachment metadata is fine
    assert get_attached_items(e)['files'] == []
    folder = get_attached_items(e)['folders'][0]
    assert folder.title == 'dummy_folder'
    attachment = folder.attachments[0]
    assert attachment.title == 'dummy_attachment'
    # Check that the actual file is accessible
    assert attachment.file.open().read() == 'hello world'
Exemplo n.º 34
0
    def migrate_blockings(self):
        state_map = {
            None: BlockedRoom.State.pending,
            False: BlockedRoom.State.rejected,
            True: BlockedRoom.State.accepted,
        }

        print cformat("%{white!}migrating blockings")
        for old_blocking_id, old_blocking in self.rb_root["RoomBlocking"]["Blockings"].iteritems():
            b = Blocking(
                id=old_blocking.id,
                created_by_id=self.merged_avatars.get(old_blocking._createdBy, old_blocking._createdBy),
                created_dt=as_utc(old_blocking._utcCreatedDT),
                start_date=old_blocking.startDate,
                end_date=old_blocking.endDate,
                reason=convert_to_unicode(old_blocking.message),
            )

            print cformat(u"- %{cyan}{}").format(b.reason)
            for old_blocked_room in old_blocking.blockedRooms:
                br = BlockedRoom(
                    state=state_map[old_blocked_room.active],
                    rejected_by=old_blocked_room.rejectedBy,
                    rejection_reason=convert_to_unicode(old_blocked_room.rejectionReason),
                )
                room = Room.get(get_room_id(old_blocked_room.roomGUID))
                room.blocked_rooms.append(br)
                b.blocked_rooms.append(br)
                print cformat(u"  %{blue!}Room:%{reset} {} ({})").format(
                    room.full_name, BlockedRoom.State(br.state).title
                )

            for old_principal in old_blocking.allowed:
                principal_id = old_principal._id
                if old_principal._type == "Avatar":
                    principal_id = int(self.merged_avatars.get(old_principal._id, old_principal._id))
                    principal_type = "User"
                else:
                    principal_type = "Group"
                bp = BlockingPrincipal(_principal=[principal_type, principal_id])
                b._allowed.add(bp)
                print cformat(u"  %{blue!}Allowed:%{reset} {}({})").format(bp.entity_type, bp.entity_id)
            db.session.add(b)
        db.session.commit()
Exemplo n.º 35
0
def migrate_blockings(rb_root, avatar_id_map):
    state_map = {
        None: BlockedRoom.State.pending,
        False: BlockedRoom.State.rejected,
        True: BlockedRoom.State.accepted
    }

    print cformat('%{white!}migrating blockings')
    for old_blocking_id, old_blocking in rb_root['RoomBlocking']['Blockings'].iteritems():
        b = Blocking(
            id=old_blocking.id,
            created_by_id=avatar_id_map.get(old_blocking._createdBy, old_blocking._createdBy),
            created_dt=as_utc(old_blocking._utcCreatedDT),
            start_date=old_blocking.startDate,
            end_date=old_blocking.endDate,
            reason=convert_to_unicode(old_blocking.message)
        )

        print cformat(u'- %{cyan}{}').format(b.reason)
        for old_blocked_room in old_blocking.blockedRooms:
            br = BlockedRoom(
                state=state_map[old_blocked_room.active],
                rejected_by=old_blocked_room.rejectedBy,
                rejection_reason=convert_to_unicode(old_blocked_room.rejectionReason),
            )
            room = Room.get(get_room_id(old_blocked_room.roomGUID))
            room.blocked_rooms.append(br)
            b.blocked_rooms.append(br)
            print cformat(u'  %{blue!}Room:%{reset} {} ({})').format(room.full_name,
                                                                     BlockedRoom.State(br.state).title)

        for old_principal in old_blocking.allowed:
            principal_id = old_principal._id
            if old_principal._type == 'Avatar':
                principal_id = avatar_id_map.get(old_principal._id, old_principal._id)
            bp = BlockingPrincipal(
                entity_type=old_principal._type,
                entity_id=principal_id
            )
            b.allowed.append(bp)
            print cformat(u'  %{blue!}Allowed:%{reset} {}({})').format(bp.entity_type, bp.entity_id)
        db.session.add(b)
    db.session.commit()
Exemplo n.º 36
0
 def __init__(self, *args, **kwargs):
     self.event = kwargs.pop('event')
     super().__init__(*args, **kwargs)
     contrib_choices = [
         (contrib.id, '{} (#{}, {})'.format(
             contrib.title, contrib.friendly_id,
             format_datetime(contrib.start_dt, timezone=self.event.tzinfo)))
         for contrib in sorted(self.event.contributions,
                               key=lambda c: (c.title, c.start_dt or as_utc(
                                   datetime(1970, 1, 1))))
         if contrib.start_dt is not None
     ]
     blocks = (SessionBlock.query.filter(
         SessionBlock.session.has((Session.event == self.event)
                                  & ~Session.is_deleted)).all())
     block_choices = [(block.id, '{} ({})'.format(
         block.full_title,
         format_datetime(block.start_dt, timezone=self.event.tzinfo)))
                      for block in sorted(
                          blocks, key=attrgetter('full_title', 'start_dt'))]
     self.contribution.choices = [('', _("Please select a contribution"))
                                  ] + contrib_choices
     self.block.choices = [('', _("Please select a session block"))
                           ] + block_choices
Exemplo n.º 37
0
    def _migrate_abstract(self, old_abstract):
        submitter = self.user_from_legacy(old_abstract._submitter._user,
                                          system_user=True)
        submitted_dt = old_abstract._submissionDate
        modified_dt = (old_abstract._modificationDate if
                       (submitted_dt - old_abstract._modificationDate) >
                       timedelta(seconds=10) else None)
        description = getattr(old_abstract, '_fields', {}).get('content', '')
        description = convert_to_unicode(
            getattr(description, 'value',
                    description))  # str or AbstractFieldContent

        type_ = old_abstract._contribTypes[0]
        type_id = None
        try:
            type_id = self.event_ns.legacy_contribution_type_map[
                type_].id if type_ else None
        except KeyError:
            self.print_warning(
                'Abstract {} - invalid contrib type {}, setting to None'.
                format(old_abstract._id,
                       convert_to_unicode(getattr(type_, '_name',
                                                  str(type_)))))

        abstract = Abstract(friendly_id=int(old_abstract._id),
                            title=convert_to_unicode(old_abstract._title),
                            description=description,
                            submitter=submitter,
                            submitted_dt=submitted_dt,
                            submitted_contrib_type_id=type_id,
                            submission_comment=convert_to_unicode(
                                old_abstract._comments),
                            modified_dt=modified_dt)
        self.print_info('%[white!]Abstract %[cyan]{}%[reset]: {}'.format(
            abstract.friendly_id, abstract.title))
        self.event.abstracts.append(abstract)
        self.event_ns.abstract_map[old_abstract] = abstract

        accepted_type_id = None
        accepted_track_id = None

        old_contribution = getattr(old_abstract, '_contribution', None)
        if old_contribution:
            assert old_contribution.__class__.__name__ == 'AcceptedContribution'
            if old_abstract._currentStatus.__class__.__name__ == 'AbstractStatusAccepted':
                old_contrib_type = old_abstract._currentStatus._contribType
                try:
                    accepted_type_id = (
                        self.event_ns.
                        legacy_contribution_type_map[old_contrib_type].id
                        if old_contrib_type else None)
                except KeyError:
                    self.print_warning(
                        '%[yellow!]Contribution {} - invalid contrib type {}, setting to None'
                        .format(old_contribution.id,
                                convert_to_unicode(old_contrib_type._name)))

                old_accepted_track = old_abstract._currentStatus._track
                accepted_track_id = int(
                    old_accepted_track.id) if old_accepted_track else None

        if old_contribution and old_contribution.id is not None:
            self.event_ns.legacy_contribution_abstracts[
                old_contribution] = abstract

        try:
            accepted_track = (
                self.event_ns.track_map_by_id.get(accepted_track_id)
                if accepted_track_id is not None else None)
        except KeyError:
            self.print_error(
                '%[yellow!]Abstract #{} accepted in invalid track #{}'.format(
                    abstract.friendly_id, accepted_track_id))
            accepted_track = None

        # state
        old_state = old_abstract._currentStatus
        old_state_name = old_state.__class__.__name__
        self.event_ns.old_abstract_state_map[abstract] = old_state
        abstract.state = self.STATE_MAP[old_state_name]

        if abstract.state == AbstractState.accepted:
            abstract.accepted_contrib_type_id = accepted_type_id
            abstract.accepted_track = accepted_track

        if abstract.state in self.JUDGED_STATES:
            abstract.judge = self.user_from_legacy(old_state._responsible,
                                                   system_user=True)
            abstract.judgment_dt = as_utc(old_state._date)

        # files
        for old_attachment in getattr(old_abstract, '_attachments',
                                      {}).itervalues():
            storage_backend, storage_path, size, md5 = self._get_local_file_info(
                old_attachment)
            if storage_path is None:
                self.print_error(
                    '%[red!]File not found on disk; skipping it [{}]'.format(
                        convert_to_unicode(old_attachment.fileName)))
                continue
            content_type = mimetypes.guess_type(
                old_attachment.fileName)[0] or 'application/octet-stream'
            filename = secure_filename(
                convert_to_unicode(old_attachment.fileName), 'attachment')
            attachment = AbstractFile(filename=filename,
                                      content_type=content_type,
                                      size=size,
                                      md5=md5,
                                      storage_backend=storage_backend,
                                      storage_file_id=storage_path)
            abstract.files.append(attachment)

        # internal comments
        for old_comment in old_abstract._intComments:
            comment = AbstractComment(
                user=self.user_from_legacy(old_comment._responsible,
                                           system_user=True),
                text=convert_to_unicode(old_comment._content),
                created_dt=old_comment._creationDate,
                modified_dt=old_comment._modificationDate)
            abstract.comments.append(comment)

        # tracks
        reallocated = set(r._track for r in getattr(
            old_abstract, '_trackReallocations', {}).itervalues())
        for old_track in old_abstract._tracks.values():
            abstract.reviewed_for_tracks.add(
                self.event_ns.track_map.get(old_track))
            if old_track not in reallocated:
                abstract.submitted_for_tracks.add(
                    self.event_ns.track_map.get(old_track))

        # reviews/judgments
        self._migrate_abstract_reviews(abstract, old_abstract)
        # persons
        self._migrate_abstract_persons(abstract, old_abstract)
        # email log
        self._migrate_abstract_email_log(abstract, old_abstract)

        # contribution/abstract fields
        abstract.field_values = list(
            self._migrate_abstract_field_values(old_abstract))
        return abstract
Exemplo n.º 38
0
    def _migrate_abstract_reviews(self, abstract, old_abstract):
        if not hasattr(old_abstract, '_trackJudgementsHistorical'):
            self.print_warning(
                '%[blue!]Abstract {} %[yellow]had no judgment history!%[reset]'
                .format(old_abstract._id))
            return

        history = old_abstract._trackJudgementsHistorical
        if not hasattr(history, 'iteritems'):
            self.print_warning(
                'Abstract {} had corrupt judgment history ({}).'.format(
                    old_abstract._id, history))
            return
        for track_id, old_judgments in history.iteritems():
            seen_judges = set()
            for old_judgment in old_judgments:
                if old_judgment is None:
                    continue
                if old_judgment.__class__.__name__ == 'AbstractUnMarkedAsDuplicated':
                    # we don't have "unmarked as duplicate" anymore
                    continue

                try:
                    track = self.event_ns.track_map_by_id[int(
                        old_judgment._track.id)]
                except KeyError:
                    self.print_warning(
                        '%[blue!]Abstract {} %[yellow]judged in invalid track {}%[reset]'
                        .format(old_abstract._id, int(old_judgment._track.id)))
                    continue

                judge = (self.global_ns.avatar_merged_user.get(
                    old_judgment._responsible.id)
                         if old_judgment._responsible else None)
                if not judge:
                    self.print_warning(
                        '%[blue!]Abstract {} %[yellow]had an empty judge ({})!%[reset]'
                        .format(old_abstract._id, old_judgment))
                    continue
                elif judge in seen_judges:
                    self.print_warning(
                        "%[blue!]Abstract {}: %[yellow]judge '{}' seen more than once ({})!%[reset]"
                        .format(old_abstract._id, judge, old_judgment))
                    continue

                seen_judges.add(judge)

                try:
                    created_dt = as_utc(old_judgment._date)
                except AttributeError:
                    created_dt = self.event.start_dt
                review = AbstractReview(created_dt=created_dt,
                                        proposed_action=self.ACTION_MAP[
                                            old_judgment.__class__.__name__],
                                        comment=convert_to_unicode(
                                            old_judgment._comment))
                if review.proposed_action == AbstractAction.accept:
                    review.proposed_contribution_type = self.event_ns.legacy_contribution_type_map.get(
                        old_judgment._contribType)
                elif review.proposed_action == AbstractAction.change_tracks:
                    review.proposed_tracks = {
                        self.event_ns.track_map[t]
                        for t in old_judgment._proposedTracks
                        if t in self.event_ns.track_map
                    }
                elif review.proposed_action == AbstractAction.mark_as_duplicate:
                    self.event_ns.as_duplicate_reviews[
                        review] = old_judgment._originalAbst

                review.user = judge
                review.track = track

                answered_questions = set()
                for old_answer in getattr(old_judgment, '_answers', []):
                    if old_answer._question in answered_questions:
                        self.print_warning(
                            "%[blue!]Abstract {}: %[yellow]question answered more than once!"
                            .format(abstract.friendly_id))
                        continue
                    try:
                        question = self.question_map[old_answer._question]
                    except KeyError:
                        question = self._migrate_question(old_answer._question,
                                                          is_deleted=True)
                        self.print_warning(
                            "%[blue!]Abstract {}: %[yellow]answer for deleted question"
                            .format(abstract.friendly_id))
                    rating = AbstractReviewRating(
                        question=question,
                        value=self._convert_scale(old_answer))
                    review.ratings.append(rating)
                    answered_questions.add(old_answer._question)

                abstract.reviews.append(review)
Exemplo n.º 39
0
 def safe_last_login_dt(self):
     """last_login_dt that is safe for sorting (no None values)"""
     return self.last_login_dt or as_utc(datetime(1970, 1, 1))
Exemplo n.º 40
0
def test_format_skeleton(skeleton, expected):
    dt = as_utc(datetime(2021, 2, 8)).astimezone(timezone('Europe/Zurich'))
    assert format_skeleton(dt, skeleton, 'en_GB', 'Europe/Zurich') == expected
Exemplo n.º 41
0
    def migrate_reservations(self):
        print cformat("%{white!}migrating reservations")
        i = 1
        for rid, v in self.rb_root["Reservations"].iteritems():
            room = Room.get(v.room.id)
            if room is None:
                print cformat("  %{red!}skipping resv for dead room {0.room.id}: {0.id} ({0._utcCreatedDT})").format(v)
                continue

            repeat_frequency, repeat_interval = RepeatMapping.convert_legacy_repeatability(v.repeatability)
            booked_for_id = getattr(v, "bookedForId", None)

            r = Reservation(
                id=v.id,
                created_dt=as_utc(v._utcCreatedDT),
                start_dt=utc_to_local(v._utcStartDT),
                end_dt=utc_to_local(v._utcEndDT),
                booked_for_id=self.merged_avatars.get(booked_for_id, booked_for_id) or None,
                booked_for_name=convert_to_unicode(v.bookedForName),
                contact_email=convert_to_unicode(v.contactEmail),
                contact_phone=convert_to_unicode(getattr(v, "contactPhone", None)),
                created_by_id=self.merged_avatars.get(v.createdBy, v.createdBy) or None,
                is_cancelled=v.isCancelled,
                is_accepted=v.isConfirmed,
                is_rejected=v.isRejected,
                booking_reason=convert_to_unicode(v.reason),
                rejection_reason=convert_to_unicode(getattr(v, "rejectionReason", None)),
                repeat_frequency=repeat_frequency,
                repeat_interval=repeat_interval,
                uses_vc=getattr(v, "usesAVC", False),
                needs_vc_assistance=getattr(v, "needsAVCSupport", False),
                needs_assistance=getattr(v, "needsAssistance", False),
            )

            for eq_name in getattr(v, "useVC", []):
                eq = room.location.get_equipment_by_name(eq_name)
                if eq:
                    r.used_equipment.append(eq)

            occurrence_rejection_reasons = {}
            if getattr(v, "resvHistory", None):
                for h in reversed(v.resvHistory._entries):
                    ts = as_utc(parse_dt_string(h._timestamp))

                    if len(h._info) == 2:
                        possible_rejection_date, possible_rejection_reason = h._info
                        m = re.match(
                            r"Booking occurrence of the (\d{1,2} \w{3} \d{4}) rejected", possible_rejection_reason
                        )
                        if m:
                            d = datetime.strptime(m.group(1), "%d %b %Y")
                            occurrence_rejection_reasons[d] = possible_rejection_reason[9:].strip("'")

                    el = ReservationEditLog(
                        timestamp=ts, user_name=h._responsibleUser, info=map(convert_to_unicode, h._info)
                    )
                    r.edit_logs.append(el)

            notifications = getattr(v, "startEndNotification", []) or []
            excluded_days = getattr(v, "_excludedDays", []) or []
            ReservationOccurrence.create_series_for_reservation(r)
            for occ in r.occurrences:
                occ.notification_sent = occ.date in notifications
                occ.is_rejected = r.is_rejected
                occ.is_cancelled = r.is_cancelled or occ.date in excluded_days
                occ.rejection_reason = (
                    convert_to_unicode(occurrence_rejection_reasons[occ.date])
                    if occ.date in occurrence_rejection_reasons
                    else None
                )

            event_id = getattr(v, "_ReservationBase__owner", None)
            if hasattr(event_id, "_Impersistant__obj"):  # Impersistant object
                event_id = event_id._Impersistant__obj
            if event_id is not None:
                event = self.zodb_root["conferences"].get(event_id)
                if event:
                    # For some stupid reason there are bookings in the database which have a completely unrelated parent
                    guids = getattr(event, "_Conference__roomBookingGuids", [])
                    if any(int(x.id) == v.id for x in guids if x.id is not None):
                        r.event_id = int(event_id)
                    else:
                        print cformat("  %{red}event {} does not contain booking {}").format(event_id, v.id)

            print cformat("- [%{cyan}{}%{reset}/%{green!}{}%{reset}]  %{grey!}{}%{reset}  {}").format(
                room.location_name, room.name, r.id, r.created_dt.date()
            )

            room.reservations.append(r)
            db.session.add(room)
            i = (i + 1) % 1000
            if not i:
                db.session.commit()
        db.session.commit()
Exemplo n.º 42
0
 def getTimestamp(self):
     if isinstance(self._timestamp, (int, float)):
         return as_utc(datetime.fromtimestamp(self._timestamp))
     return self._timestamp
Exemplo n.º 43
0
    def _migrate_abstracts(self):
        old_by_id = {oa.friendly_id: oa for oa in self.event.old_abstracts}
        abstract_map = {}
        old_abstract_state_map = {}
        as_duplicate_reviews = set()
        for zodb_abstract in self.amgr._abstracts.itervalues():
            old_abstract = old_by_id[int(zodb_abstract._id)]
            submitter = self._user_from_legacy(zodb_abstract._submitter._user, janitor=True)
            submitted_dt = zodb_abstract._submissionDate
            modified_dt = (zodb_abstract._modificationDate
                           if (submitted_dt - zodb_abstract._modificationDate) > timedelta(seconds=10)
                           else None)
            try:
                accepted_track = (self.track_map_by_id[old_abstract.accepted_track_id]
                                  if old_abstract.accepted_track_id is not None
                                  else None)
            except KeyError:
                self.importer.print_error(cformat('%{yellow!}Abstract #{} accepted in invalid track #{}')
                                          .format(old_abstract.friendly_id, old_abstract.accepted_track_id),
                                          event_id=self.event.id)
                accepted_track = None
            abstract = Abstract(id=old_abstract.id,
                                friendly_id=old_abstract.friendly_id,
                                title=convert_to_unicode(zodb_abstract._title),
                                description=old_abstract.description,
                                submitter=submitter,
                                submitted_dt=submitted_dt,
                                submitted_contrib_type_id=old_abstract.type_id,
                                submission_comment=convert_to_unicode(zodb_abstract._comments),
                                modified_dt=modified_dt)
            self.importer.print_info(cformat('%{white!}Abstract:%{reset} {}').format(abstract.title))
            self.event.abstracts.append(abstract)
            abstract_map[zodb_abstract] = abstract

            # files
            for old_attachment in getattr(zodb_abstract, '_attachments', {}).itervalues():
                storage_backend, storage_path, size = self.importer._get_local_file_info(old_attachment)
                if storage_path is None:
                    self.importer.print_error(cformat('%{red!}File not found on disk; skipping it [{}]')
                                              .format(convert_to_unicode(old_attachment.fileName)),
                                              event_id=self.event.id)
                    continue
                content_type = mimetypes.guess_type(old_attachment.fileName)[0] or 'application/octet-stream'
                filename = secure_filename(convert_to_unicode(old_attachment.fileName), 'attachment')
                attachment = AbstractFile(filename=filename, content_type=content_type, size=size,
                                          storage_backend=storage_backend, storage_file_id=storage_path)
                abstract.files.append(attachment)

            # internal comments
            for old_comment in zodb_abstract._intComments:
                comment = AbstractComment(user=self._user_from_legacy(old_comment._responsible),
                                          text=convert_to_unicode(old_comment._content),
                                          created_dt=old_comment._creationDate,
                                          modified_dt=old_comment._modificationDate)
                abstract.comments.append(comment)

            # state
            old_state = zodb_abstract._currentStatus
            old_state_name = old_state.__class__.__name__
            old_abstract_state_map[abstract] = old_state
            abstract.state = self.STATE_MAP[old_state_name]
            if abstract.state == AbstractState.accepted:
                abstract.accepted_contrib_type_id = old_abstract.accepted_type_id
                abstract.accepted_track = accepted_track

            if abstract.state in self.JUDGED_STATES:
                abstract.judge = self._user_from_legacy(old_state._responsible, janitor=True)
                abstract.judgment_dt = as_utc(old_state._date)

            # tracks
            reallocated = set(r._track for r in getattr(zodb_abstract, '_trackReallocations', {}).itervalues())
            for old_track in zodb_abstract._tracks.values():
                abstract.reviewed_for_tracks.add(self.track_map[old_track])
                if old_track not in reallocated:
                    abstract.submitted_for_tracks.add(self.track_map[old_track])

            # judgments (reviews)
            self._migrate_abstract_reviews(abstract, zodb_abstract, old_abstract, as_duplicate_reviews)
            # persons
            self._migrate_abstract_persons(abstract, zodb_abstract)
            # email log
            self._migrate_abstract_email_log(abstract, zodb_abstract)

        # merges/duplicates
        for abstract in self.event.abstracts:
            old_state = old_abstract_state_map[abstract]
            if abstract.state == AbstractState.merged:
                abstract.merged_into = abstract_map[old_state._target]
            elif abstract.state == AbstractState.duplicate:
                abstract.duplicate_of = abstract_map[old_state._original]

        # mark-as-duplicate judgments
        for review, old_abstract in as_duplicate_reviews:
            try:
                review.proposed_related_abstract = abstract_map[old_abstract]
            except KeyError:
                self.importer.print_error(cformat('%{yellow!}Abstract #{} marked as duplicate of invalid abstract #{}')
                                          .format(review.abstract.friendly_id, old_abstract._id),
                                          event_id=self.event.id)
                # delete the review; it would violate our CHECKs
                review.abstract = None
                # not needed but avoids some warnings about the object not in the session
                review.track = None
                review.user = None
Exemplo n.º 44
0
def ensure_tzinfo(dt):
    return as_utc(dt) if dt.tzinfo is None else dt
Exemplo n.º 45
0
    def _migrate_abstract_reviews(self, abstract, zodb_abstract, old_abstract, as_duplicate_reviews):
        old_judgments = {(j.track_id, j.judge): j for j in old_abstract.judgments}
        for old_track_id, zodb_judgments in getattr(zodb_abstract, '_trackJudgementsHistorical', {}).iteritems():
            seen_judges = set()
            for zodb_judgment in zodb_judgments:
                if zodb_judgment is None:
                    continue
                if zodb_judgment.__class__.__name__ == 'AbstractUnMarkedAsDuplicated':
                    # we don't have "unmarked as duplicate" anymore
                    continue
                try:
                    track = self.track_map_by_id[int(zodb_judgment._track.id)]
                except KeyError:
                    self.importer.print_warning(
                        cformat('%{blue!}Abstract {} {yellow}judged in invalid track {}%{reset}').format(
                            zodb_abstract._id, int(zodb_judgment._track.id)), event_id=self.event.id)
                    continue
                judge = self._user_from_legacy(zodb_judgment._responsible)
                if not judge:
                    # self.importer.print_warning(
                    #     cformat('%{blue!}Abstract {} {yellow}had an empty judge ({})!%{reset}').format(
                    #         zodb_abstract._id, zodb_judgment), event_id=self.event.id)
                    continue
                elif judge in seen_judges:
                    # self.importer.print_warning(
                    #     cformat("%{blue!}Abstract {}: {yellow}judge '{}' seen more than once ({})!%{reset}")
                    #         .format(zodb_abstract._id, judge, zodb_judgment), event_id=self.event.id)
                    continue

                seen_judges.add(judge)
                try:
                    created_dt = as_utc(zodb_judgment._date)
                except AttributeError:
                    created_dt = self.event.start_dt
                review = AbstractReview(created_dt=created_dt,
                                        proposed_action=self.ACTION_MAP[zodb_judgment.__class__.__name__],
                                        comment=convert_to_unicode(zodb_judgment._comment))
                if review.proposed_action == AbstractAction.accept:
                    try:
                        old_judgment = old_judgments[int(old_track_id), judge]
                    except KeyError:
                        self.importer.print_error(cformat('%{yellow!}Abstract #{} has no new judgment for {} / {}')
                                                  .format(abstract.friendly_id, int(old_track_id), judge),
                                                  event_id=self.event.id)
                        continue
                    review.proposed_contribution_type = old_judgment.accepted_type
                    review.proposed_track = self.track_map_by_id[old_judgment.track_id]
                elif review.proposed_action == AbstractAction.change_tracks:
                    review.proposed_tracks = {self.track_map[t] for t in zodb_judgment._proposedTracks}
                elif review.proposed_action == AbstractAction.mark_as_duplicate:
                    as_duplicate_reviews.add((review, zodb_judgment._originalAbst))

                review.user = judge
                review.track = track

                answered_questions = set()
                for old_answer in getattr(zodb_judgment, '_answers', []):
                    if old_answer._question in answered_questions:
                        self.importer.print_warning(
                            cformat("%{blue!}Abstract {}: {yellow}question answered more than once!").format(
                                abstract.friendly_id), event_id=self.event.id)
                        continue
                    try:
                        question = self.question_map[old_answer._question]
                    except KeyError:
                        question = self._migrate_question(old_answer._question, is_deleted=True)
                        self.importer.print_warning(
                            cformat("%{blue!}Abstract {}: {yellow}answer for deleted question").format(
                                abstract.friendly_id), event_id=self.event.id)
                    rating = AbstractReviewRating(question=question, value=self._convert_scale(old_answer))
                    review.ratings.append(rating)
                    answered_questions.add(old_answer._question)

                abstract.reviews.append(review)
Exemplo n.º 46
0
    def migrate_reservations(self):
        print cformat('%{white!}migrating reservations')
        i = 1
        for rid, v in self.rb_root['Reservations'].iteritems():
            room = Room.get(v.room.id)
            if room is None:
                print cformat(
                    '  %{red!}skipping resv for dead room {0.room.id}: {0.id} ({0._utcCreatedDT})'
                ).format(v)
                continue

            repeat_frequency, repeat_interval = RepeatMapping.convert_legacy_repeatability(
                v.repeatability)
            booked_for_id = getattr(v, 'bookedForId', None)

            r = Reservation(
                id=v.id,
                created_dt=as_utc(v._utcCreatedDT),
                start_dt=utc_to_local(v._utcStartDT),
                end_dt=utc_to_local(v._utcEndDT),
                booked_for_id=self.merged_avatars.get(booked_for_id,
                                                      booked_for_id) or None,
                booked_for_name=convert_to_unicode(v.bookedForName),
                contact_email=convert_to_unicode(v.contactEmail),
                contact_phone=convert_to_unicode(
                    getattr(v, 'contactPhone', None)),
                created_by_id=self.merged_avatars.get(v.createdBy, v.createdBy)
                or None,
                is_cancelled=v.isCancelled,
                is_accepted=v.isConfirmed,
                is_rejected=v.isRejected,
                booking_reason=convert_to_unicode(v.reason),
                rejection_reason=convert_to_unicode(
                    getattr(v, 'rejectionReason', None)),
                repeat_frequency=repeat_frequency,
                repeat_interval=repeat_interval,
                uses_vc=getattr(v, 'usesAVC', False),
                needs_vc_assistance=getattr(v, 'needsAVCSupport', False),
                needs_assistance=getattr(v, 'needsAssistance', False))

            for eq_name in getattr(v, 'useVC', []):
                eq = room.location.get_equipment_by_name(eq_name)
                if eq:
                    r.used_equipment.append(eq)

            occurrence_rejection_reasons = {}
            if getattr(v, 'resvHistory', None):
                for h in reversed(v.resvHistory._entries):
                    ts = as_utc(parse_dt_string(h._timestamp))

                    if len(h._info) == 2:
                        possible_rejection_date, possible_rejection_reason = h._info
                        m = re.match(
                            r'Booking occurrence of the (\d{1,2} \w{3} \d{4}) rejected',
                            possible_rejection_reason)
                        if m:
                            d = datetime.strptime(m.group(1), '%d %b %Y')
                            occurrence_rejection_reasons[
                                d] = possible_rejection_reason[9:].strip('\'')

                    el = ReservationEditLog(timestamp=ts,
                                            user_name=h._responsibleUser,
                                            info=map(convert_to_unicode,
                                                     h._info))
                    r.edit_logs.append(el)

            notifications = getattr(v, 'startEndNotification', []) or []
            excluded_days = getattr(v, '_excludedDays', []) or []
            ReservationOccurrence.create_series_for_reservation(r)
            for occ in r.occurrences:
                occ.notification_sent = occ.date in notifications
                occ.is_rejected = r.is_rejected
                occ.is_cancelled = r.is_cancelled or occ.date in excluded_days
                occ.rejection_reason = (
                    convert_to_unicode(occurrence_rejection_reasons[occ.date])
                    if occ.date in occurrence_rejection_reasons else None)

            event_id = getattr(v, '_ReservationBase__owner', None)
            if hasattr(event_id, '_Impersistant__obj'):  # Impersistant object
                event_id = event_id._Impersistant__obj
            if event_id is not None:
                event = self.zodb_root['conferences'].get(event_id)
                if event:
                    # For some stupid reason there are bookings in the database which have a completely unrelated parent
                    guids = getattr(event, '_Conference__roomBookingGuids', [])
                    if any(
                            int(x.id) == v.id for x in guids
                            if x.id is not None):
                        r.event_id = int(event_id)
                    else:
                        print cformat(
                            '  %{red}event {} does not contain booking {}'
                        ).format(event_id, v.id)

            print cformat(
                '- [%{cyan}{}%{reset}/%{green!}{}%{reset}]  %{grey!}{}%{reset}  {}'
            ).format(room.location_name, room.name, r.id, r.created_dt.date())

            room.reservations.append(r)
            db.session.add(room)
            i = (i + 1) % 1000
            if not i:
                db.session.commit()
        db.session.commit()
Exemplo n.º 47
0
    (datetime(2015, 11, 12, 17, 30), '%Y-%m-%d', '2015-11-12'),
    (datetime(1015, 11, 12, 17, 30), '%Y-%m-%d %H:%M', '1015-11-12 17:30'),
))
def test_strftime_all_years(dt, fmt, expected):
    assert strftime_all_years(dt, fmt) == expected


dt = datetime
tz = timezone('Europe/Zurich')
iterdays_test_data = (
    (dt(2015, 1, 1, 10, 30).date(), dt(2015, 1, 1, 12, 30), True, None, None, 1),
    (dt(2015, 1, 1, 10, 30), dt(2014, 1, 1, 12, 30), True, None, None, 0),
    (dt(2015, 1, 1, 10, 30), dt(2015, 1, 1, 12, 30), True, None, None, 1),
    (dt(2017, 10, 13), dt(2017, 10, 19), True, None, None, 5),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, None, None, 7),
    (dt(2017, 10, 13), dt(2017, 10, 19), True, [dt(2017, 10, 17).date()], None, 1),
    (dt(2017, 10, 13), dt(2017, 10, 19), True, [dt(2017, 10, 14).date()], None, 0),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, [dt(2017, 10, 14).date()], None, 1),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, None, [dt(2017, 10, 14).date(), dt(2017, 10, 16).date()], 5),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, [dt(2017, 10, 15).date()], [dt(2017, 10, 14).date()], 1),
    (dt(2017, 10, 28, 10, 30), dt(2017, 10, 31, 12, 30), True, None, [dt(2017, 10, 28, 10, 30)], 2),
    (as_utc(dt(2017, 10, 28)).astimezone(tz), as_utc(dt(2017, 10, 31)).astimezone(tz), True, None, None, 2),
    (as_utc(dt(2017, 3, 26)).astimezone(tz), as_utc(dt(2017, 3, 28)).astimezone(tz), True, None, None, 2),
)


@pytest.mark.parametrize(('from_', 'to', 'skip_weekends', 'day_whitelist', 'day_blacklist', 'expected'),
                         iterdays_test_data)
def test_iterdays(from_, to, skip_weekends, day_whitelist, day_blacklist, expected):
    assert len(list(iterdays(from_, to, skip_weekends, day_whitelist, day_blacklist))) == expected
Exemplo n.º 48
0
    def _migrate_abstract_reviews(self, abstract, zodb_abstract, old_abstract,
                                  as_duplicate_reviews):
        old_judgments = {(j.track_id, j.judge): j
                         for j in old_abstract.judgments}
        for old_track_id, zodb_judgments in getattr(
                zodb_abstract, '_trackJudgementsHistorical', {}).iteritems():
            seen_judges = set()
            for zodb_judgment in zodb_judgments:
                if zodb_judgment is None:
                    continue
                if zodb_judgment.__class__.__name__ == 'AbstractUnMarkedAsDuplicated':
                    # we don't have "unmarked as duplicate" anymore
                    continue
                try:
                    track = self.track_map_by_id[int(zodb_judgment._track.id)]
                except KeyError:
                    self.importer.print_warning(cformat(
                        '%{blue!}Abstract {} {yellow}judged in invalid track {}%{reset}'
                    ).format(zodb_abstract._id, int(zodb_judgment._track.id)),
                                                event_id=self.event.id)
                    continue
                judge = self._user_from_legacy(zodb_judgment._responsible)
                if not judge:
                    # self.importer.print_warning(
                    #     cformat('%{blue!}Abstract {} {yellow}had an empty judge ({})!%{reset}').format(
                    #         zodb_abstract._id, zodb_judgment), event_id=self.event.id)
                    continue
                elif judge in seen_judges:
                    # self.importer.print_warning(
                    #     cformat("%{blue!}Abstract {}: {yellow}judge '{}' seen more than once ({})!%{reset}")
                    #         .format(zodb_abstract._id, judge, zodb_judgment), event_id=self.event.id)
                    continue

                seen_judges.add(judge)
                try:
                    created_dt = as_utc(zodb_judgment._date)
                except AttributeError:
                    created_dt = self.event.start_dt
                review = AbstractReview(created_dt=created_dt,
                                        proposed_action=self.ACTION_MAP[
                                            zodb_judgment.__class__.__name__],
                                        comment=convert_to_unicode(
                                            zodb_judgment._comment))
                if review.proposed_action == AbstractAction.accept:
                    try:
                        old_judgment = old_judgments[int(old_track_id), judge]
                    except KeyError:
                        self.importer.print_error(cformat(
                            '%{yellow!}Abstract #{} has no new judgment for {} / {}'
                        ).format(abstract.friendly_id, int(old_track_id),
                                 judge),
                                                  event_id=self.event.id)
                        continue
                    review.proposed_contribution_type = old_judgment.accepted_type
                    review.proposed_track = self.track_map_by_id[
                        old_judgment.track_id]
                elif review.proposed_action == AbstractAction.change_tracks:
                    review.proposed_tracks = {
                        self.track_map[t]
                        for t in zodb_judgment._proposedTracks
                    }
                elif review.proposed_action == AbstractAction.mark_as_duplicate:
                    as_duplicate_reviews.add(
                        (review, zodb_judgment._originalAbst))

                review.user = judge
                review.track = track

                answered_questions = set()
                for old_answer in getattr(zodb_judgment, '_answers', []):
                    if old_answer._question in answered_questions:
                        self.importer.print_warning(cformat(
                            "%{blue!}Abstract {}: {yellow}question answered more than once!"
                        ).format(abstract.friendly_id),
                                                    event_id=self.event.id)
                        continue
                    try:
                        question = self.question_map[old_answer._question]
                    except KeyError:
                        question = self._migrate_question(old_answer._question,
                                                          is_deleted=True)
                        self.importer.print_warning(cformat(
                            "%{blue!}Abstract {}: {yellow}answer for deleted question"
                        ).format(abstract.friendly_id),
                                                    event_id=self.event.id)
                    rating = AbstractReviewRating(
                        question=question,
                        value=self._convert_scale(old_answer))
                    review.ratings.append(rating)
                    answered_questions.add(old_answer._question)

                abstract.reviews.append(review)
Exemplo n.º 49
0
    (datetime(2015, 11, 12, 17, 30), '%Y-%m-%d', '2015-11-12'),
    (datetime(1015, 11, 12, 17, 30), '%Y-%m-%d %H:%M', '1015-11-12 17:30'),
))
def test_strftime_all_years(dt, fmt, expected):
    assert strftime_all_years(dt, fmt) == expected


dt = datetime
tz = timezone('Europe/Zurich')
iterdays_test_data = (
    (dt(2015, 1, 1, 10, 30).date(), dt(2015, 1, 1, 12, 30), True, None, None, 1),
    (dt(2015, 1, 1, 10, 30), dt(2014, 1, 1, 12, 30), True, None, None, 0),
    (dt(2015, 1, 1, 10, 30), dt(2015, 1, 1, 12, 30), True, None, None, 1),
    (dt(2017, 10, 13), dt(2017, 10, 19), True, None, None, 5),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, None, None, 7),
    (dt(2017, 10, 13), dt(2017, 10, 19), True, [dt(2017, 10, 17).date()], None, 1),
    (dt(2017, 10, 13), dt(2017, 10, 19), True, [dt(2017, 10, 14).date()], None, 0),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, [dt(2017, 10, 14).date()], None, 1),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, None, [dt(2017, 10, 14).date(), dt(2017, 10, 16).date()], 5),
    (dt(2017, 10, 13), dt(2017, 10, 19), False, [dt(2017, 10, 15).date()], [dt(2017, 10, 14).date()], 1),
    (dt(2017, 10, 28, 10, 30), dt(2017, 10, 31, 12, 30), True, None, [dt(2017, 10, 28, 10, 30)], 2),
    (as_utc(dt(2017, 10, 28)).astimezone(tz), as_utc(dt(2017, 10, 31)).astimezone(tz), True, None, None, 2),
    (as_utc(dt(2017, 3, 26)).astimezone(tz), as_utc(dt(2017, 3, 28)).astimezone(tz), True, None, None, 2),
)


@pytest.mark.parametrize(('from_', 'to', 'skip_weekends', 'day_whitelist', 'day_blacklist', 'expected'),
                         iterdays_test_data)
def test_iterdays(from_, to, skip_weekends, day_whitelist, day_blacklist, expected):
    assert len(list(iterdays(from_, to, skip_weekends, day_whitelist, day_blacklist))) == expected
Exemplo n.º 50
0
def ensure_tzinfo(dt):
    return as_utc(dt) if dt.tzinfo is None else dt
Exemplo n.º 51
0
 def getTimestamp(self):
     if isinstance(self._timestamp, (int, float)):
         return as_utc(datetime.fromtimestamp(self._timestamp))
     return self._timestamp
Exemplo n.º 52
0
 def safe_last_login_dt(self):
     """last_login_dt that is safe for sorting (no None values)"""
     return self.last_login_dt or as_utc(datetime(1970, 1, 1))