예제 #1
0
    def test_cleanup_directory(self):
        preview = self.create_event()
        preview.modification_date = DateTime(datetime.utcnow() - timedelta(days=2, microseconds=1))
        preview.reindexObject(idxs=["modified"])

        published = self.create_event()
        published.submit()
        published.publish()
        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=["start", "end"])

        archived = self.create_event()
        archived.submit()
        archived.publish()
        archived.archive()
        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=["start", "end"])

        cleanup_scheduler.cleanup_directory(self.directory, dryrun=True)
        self.assertEqual(len(self.catalog.catalog()), 4)
        cleanup_scheduler.cleanup_directory(self.directory, dryrun=False)
        self.assertEqual(published.state, "archived")
        self.assertEqual(len(self.catalog.catalog()), 2)
    def test_remove_archived_events(self):

        run = lambda: maintenance.remove_archived_events(
            self.directory, dryrun=True
        )

        archived = self.create_event()
        self.assertEqual(run(), [])

        archived.submit()
        self.assertEqual(run(), [])

        archived.publish()
        self.assertEqual(run(), [])

        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=['start', 'end'])
        self.assertEqual(run(), [])

        archived.archive()
        self.assertEqual(run(), [archived.id])

        archived.start = to_utc(datetime.utcnow() - timedelta(days=10))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=10))
        archived.reindexObject(idxs=['start', 'end'])

        self.assertEqual(run(), [])
예제 #3
0
    def test_cleanup_directory(self):
        preview = self.create_event()
        preview.modification_date = DateTime(datetime.utcnow() -
                                             timedelta(days=2, microseconds=1))
        preview.reindexObject(idxs=['modified'])

        published = self.create_event()
        published.submit()
        published.publish()
        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=['start', 'end'])

        archived = self.create_event()
        archived.submit()
        archived.publish()
        archived.archive()
        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=['start', 'end'])

        cleanup_scheduler.cleanup_directory(self.directory, dryrun=True)
        self.assertEqual(len(self.catalog.catalog()), 4)
        cleanup_scheduler.cleanup_directory(self.directory, dryrun=False)
        self.assertEqual(published.state, 'archived')
        self.assertEqual(len(self.catalog.catalog()), 2)
예제 #4
0
    def event_by_id_and_date(self, id, date):

        real = self.catalog.catalog(
            path={'query': self.catalog.path, 'depth': 1},
            object_provides=IEventsDirectoryItem.__identifier__,
            review_state=self.state,
            id=id
        )[0].getObject()

        # there is currently no way to easily look up the event by date
        # if it has been split over dates already (which is what happens
        # when the events are indexed)

        # therefore we need to currently loop over all events to find the
        # right one. certainly this can be optimized.

        # however, tests on a 10k sites with 60% of all events being recurrent
        # indicate that it's not that big of a problem. spawning events is
        # quite fast and it only happens for 10 items per request

        # still, I would prefer some kind of lookup here

        min_date = date - timedelta(days=1)
        max_date = date + timedelta(days=1)

        for item in self.spawn_events([real], min_date, max_date):
            start = dates.to_utc(
                dates.delete_timezone(item.local_start)
            ).replace(second=0, microsecond=0)
            if start == date:
                return item

        assert False, "lookup for %s failed" % id
예제 #5
0
    def event_by_id_and_date(self, id, date):

        real = self.catalog.catalog(
            path={
                'query': self.catalog.path,
                'depth': 1
            },
            object_provides=IEventsDirectoryItem.__identifier__,
            review_state=self.state,
            id=id)[0].getObject()

        # there is currently no way to easily look up the event by date
        # if it has been split over dates already (which is what happens
        # when the events are indexed)

        # therefore we need to currently loop over all events to find the
        # right one. certainly this can be optimized.

        # however, tests on a 10k sites with 60% of all events being recurrent
        # indicate that it's not that big of a problem. spawning events is
        # quite fast and it only happens for 10 items per request

        # still, I would prefer some kind of lookup here

        min_date = date - timedelta(days=1)
        max_date = date + timedelta(days=1)

        for item in self.spawn_events([real], min_date, max_date):
            start = dates.to_utc(dates.delete_timezone(
                item.local_start)).replace(second=0, microsecond=0)
            if start == date:
                return item

        assert False, "lookup for %s failed" % id
예제 #6
0
    def __init__(self, start, end, recurrence="", timezone='Europe/Zurich',
                 whole_day=False):

        # if this is a whole day, the times are adjusted (this is what
        # plone.app.event would do on the dexterity type)
        if whole_day:
            start = datetime(start.year, start.month, start.day)
            end = datetime(end.year, end.month, end.day, 23, 59, 59)

        # the given date is implicitly of the given timezone, so enforce
        # that and then convert to utc as this is what an EventItem actually
        # stores.
        self.start = dates.to_utc(dates.as_timezone(start, timezone))
        self.end = dates.to_utc(dates.as_timezone(end, timezone))

        self.recurrence = recurrence
        self.timezone = timezone
        self.whole_day = whole_day
예제 #7
0
def occurrences(item, min_date, max_date):
    """ Returns the occurrences for item between min and max date.
    Will return a list with a single item if the given item has no recurrence.

    """

    if not isinstance(item.start, datetime):
        item_start = dates.to_utc(datetime.utcfromtimestamp(item.start))
    else:
        item_start = item.start

    if not isinstance(item.end, datetime):
        item_end = dates.to_utc(datetime.utcfromtimestamp(item.end))
    else:
        item_end = item.end

    if not item.recurrence:

        if not overlaps(min_date, max_date, item_start, item_end):
            return []
        else:
            return [Occurrence(item, item_start, item_end)]

    tz = pytz.timezone(item.timezone)
    local_start = tz.normalize(item_start)

    _occurrences = recurrence_sequence_ical(
        start=local_start,
        recrule=item.recurrence,
        from_=min_date,
        until=max_date
    )

    result = []
    duration = item_end - item_start

    for start in _occurrences:
        start = utcoffset_normalize(start, dstmode=DSTADJUST)
        result.append(Occurrence(item, start, start + duration))

    return result
예제 #8
0
    def test_remove_past_imported_events(self):

        run_dry = lambda: cleanup_scheduler.remove_past_imported_events(
            self.directory, dryrun=True)
        run = lambda: cleanup_scheduler.remove_past_imported_events(
            self.directory, dryrun=False)

        imported = self.create_event()
        imported.submit()
        imported.publish()
        alsoProvides(imported, IExternalEvent)
        imported.reindexObject()
        self.assertEqual(run(), [])

        hidden = self.create_event()
        hidden.submit()
        hidden.publish()
        alsoProvides(hidden, IExternalEvent)
        hidden.hide()
        hidden.reindexObject()
        self.assertEqual(run(), [])

        # Age events
        imported.start = to_utc(datetime.utcnow() - timedelta(days=10))
        imported.end = to_utc(datetime.utcnow() - timedelta(days=10))
        imported.reindexObject(idxs=['start', 'end'])
        hidden.start = to_utc(datetime.utcnow() - timedelta(days=10))
        hidden.end = to_utc(datetime.utcnow() - timedelta(days=10))
        hidden.reindexObject(idxs=['start', 'end'])

        # Test dryruns
        ids = run_dry()
        self.assertEqual(len(ids), 2)
        self.assertTrue(imported.id in ids)
        self.assertTrue(hidden.id in ids)

        # Test run
        self.assertEqual(len(self.catalog.catalog()), 3)
        self.assertEqual(run(), [])
        self.assertEqual(len(self.catalog.catalog()), 1)
예제 #9
0
def occurrences(item, min_date, max_date):
    """ Returns the occurrences for item between min and max date.
    Will return a list with a single item if the given item has no recurrence.

    """

    if not isinstance(item.start, datetime):
        item_start = dates.to_utc(datetime.utcfromtimestamp(item.start))
    else:
        item_start = item.start

    if not isinstance(item.end, datetime):
        item_end = dates.to_utc(datetime.utcfromtimestamp(item.end))
    else:
        item_end = item.end

    if not item.recurrence:

        if not overlaps(min_date, max_date, item_start, item_end):
            return []
        else:
            return [Occurrence(item, item_start, item_end)]

    tz = pytz.timezone(item.timezone)
    local_start = tz.normalize(item_start)

    _occurrences = recurrence_sequence_ical(start=local_start,
                                            recrule=item.recurrence,
                                            from_=min_date,
                                            until=max_date)

    result = []
    duration = item_end - item_start

    for start in _occurrences:
        start = utcoffset_normalize(start, dstmode=DSTADJUST)
        result.append(Occurrence(item, start, start + duration))

    return result
예제 #10
0
    def __init__(self,
                 start,
                 end,
                 recurrence="",
                 timezone='Europe/Zurich',
                 whole_day=False):

        # if this is a whole day, the times are adjusted (this is what
        # plone.app.event would do on the dexterity type)
        if whole_day:
            start = datetime(start.year, start.month, start.day)
            end = datetime(end.year, end.month, end.day, 23, 59, 59)

        # the given date is implicitly of the given timezone, so enforce
        # that and then convert to utc as this is what an EventItem actually
        # stores.
        self.start = dates.to_utc(dates.as_timezone(start, timezone))
        self.end = dates.to_utc(dates.as_timezone(end, timezone))

        self.recurrence = recurrence
        self.timezone = timezone
        self.whole_day = whole_day
예제 #11
0
    def test_has_future_occurrences(self):
        item = Item(
            datetime(2013, 1, 1), datetime(2013, 1, 2), timezone='utc'
        )

        reference = dates.to_utc(datetime(2013, 1, 3))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 2))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 1))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        item = Item(
            datetime(2013, 1, 1), datetime(2013, 1, 2), timezone='utc',
                recurrence='RRULE:FREQ=DAILY;UNTIL=20130105T000000Z'
        )

        reference = dates.to_utc(datetime(2013, 1, 4))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 5))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 6))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))

        # the recurrence rule may come without a timezone (missing Z)

        item = Item(
            datetime(2013, 1, 1), datetime(2013, 1, 2), timezone='utc',
                recurrence='RRULE:FREQ=DAILY;UNTIL=20130105T000000'
        )

        reference = dates.to_utc(datetime(2013, 1, 4))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 5))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 6))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))
예제 #12
0
    def test_remove_past_imported_events(self):

        run_dry = lambda: cleanup_scheduler.remove_past_imported_events(self.directory, dryrun=True)
        run = lambda: cleanup_scheduler.remove_past_imported_events(self.directory, dryrun=False)

        imported = self.create_event()
        imported.submit()
        imported.publish()
        alsoProvides(imported, IExternalEvent)
        imported.reindexObject()
        self.assertEqual(run(), [])

        hidden = self.create_event()
        hidden.submit()
        hidden.publish()
        alsoProvides(hidden, IExternalEvent)
        hidden.hide()
        hidden.reindexObject()
        self.assertEqual(run(), [])

        # Age events
        imported.start = to_utc(datetime.utcnow() - timedelta(days=10))
        imported.end = to_utc(datetime.utcnow() - timedelta(days=10))
        imported.reindexObject(idxs=["start", "end"])
        hidden.start = to_utc(datetime.utcnow() - timedelta(days=10))
        hidden.end = to_utc(datetime.utcnow() - timedelta(days=10))
        hidden.reindexObject(idxs=["start", "end"])

        # Test dryruns
        ids = run_dry()
        self.assertEqual(len(ids), 2)
        self.assertTrue(imported.id in ids)
        self.assertTrue(hidden.id in ids)

        # Test run
        self.assertEqual(len(self.catalog.catalog()), 3)
        self.assertEqual(run(), [])
        self.assertEqual(len(self.catalog.catalog()), 1)
예제 #13
0
    def test_keep_permanently_archived_events(self):

        run = lambda: cleanup_scheduler.remove_archived_events(self.directory, dryrun=False)

        archived = self.create_event()
        archived.submit()
        archived.publish()
        archived.archive()
        archived.archive_permanently()

        # Age event
        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=["start", "end"])

        # Test run
        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=["start", "end"])

        self.assertEqual(len(self.catalog.catalog()), 2)
        self.assertEqual(run(), [])
        self.assertEqual(len(self.catalog.catalog()), 2)
예제 #14
0
    def test_has_future_occurrences(self):
        item = Item(datetime(2013, 1, 1), datetime(2013, 1, 2), timezone='utc')

        reference = dates.to_utc(datetime(2013, 1, 3))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 2))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 1))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        item = Item(datetime(2013, 1, 1),
                    datetime(2013, 1, 2),
                    timezone='utc',
                    recurrence='RRULE:FREQ=DAILY;UNTIL=20130105T000000Z')

        reference = dates.to_utc(datetime(2013, 1, 4))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 5))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 6))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))

        # the recurrence rule may come without a timezone (missing Z)

        item = Item(datetime(2013, 1, 1),
                    datetime(2013, 1, 2),
                    timezone='utc',
                    recurrence='RRULE:FREQ=DAILY;UNTIL=20130105T000000')

        reference = dates.to_utc(datetime(2013, 1, 4))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 5))
        self.assertTrue(recurrence.has_future_occurrences(item, reference))

        reference = dates.to_utc(datetime(2013, 1, 6))
        self.assertFalse(recurrence.has_future_occurrences(item, reference))
예제 #15
0
    def archive_past_events(self, directory, dryrun=False):

        catalog = IDirectoryCatalog(directory)
        query = catalog.catalog

        log.info("archiving past events (> 2 days old)")

        # events are in the past if they have been over for two days
        # (not one, to ensure that they are really over in all timezones)
        past = to_utc(datetime.utcnow() - timedelta(days=2))
        published_events = query(
            path={"query": directory.getPhysicalPath(), "depth": 2},
            object_provides=IEventsDirectoryItem.__identifier__,
            review_state=("published",),
            start={"query": past, "range": "max"},
            end={"query": past, "range": "max"},
        )

        past_events = []

        for event in published_events:
            event = event.getObject()

            assert event.start < past
            assert event.end < past

            # recurring events may be in the past with one of
            # their occurrences in the future
            if not has_future_occurrences(event, past):
                # published events may be imported events
                if not IExternalEvent.providedBy(event):
                    past_events.append(event)

        ids = [p.id for p in past_events]

        if past_events:
            log.info("archiving past events -> %s" % str(ids))

            if not dryrun:
                for event in past_events:
                    event.archive()
        else:
            log.info("no past events found")

        return ids
예제 #16
0
    def archive_past_events(self, directory, dryrun=False):

        catalog = IDirectoryCatalog(directory)
        query = catalog.catalog

        log.info('archiving past events (> 2 days old)')

        # events are in the past if they have been over for two days
        # (not one, to ensure that they are really over in all timezones)
        past = to_utc(datetime.utcnow() - timedelta(days=2))
        published_events = query(
            path={'query': directory.getPhysicalPath(), 'depth': 2},
            object_provides=IEventsDirectoryItem.__identifier__,
            review_state=('published', ),
            start={'query': past, 'range': 'max'},
            end={'query': past, 'range': 'max'}
        )

        past_events = []

        for event in published_events:
            event = event.getObject()

            assert event.start < past
            assert event.end < past

            # recurring events may be in the past with one of
            # their occurrences in the future
            if not has_future_occurrences(event, past):
                # published events may be imported events
                if not IExternalEvent.providedBy(event):
                    past_events.append(event)

        ids = [p.id for p in past_events]

        if past_events:
            log.info('archiving past events -> %s' % str(ids))

            if not dryrun:
                for event in past_events:
                    event.archive()
        else:
            log.info('no past events found')

        return ids
예제 #17
0
def archive_past_events(directory, dryrun=False):

    catalog = IDirectoryCatalog(directory)
    query = catalog.catalog

    log.info('archiving past events')

    # events are in the past if they have been over for two days
    # (not one, to ensure that they are really over in all timezones)
    past = to_utc(datetime.utcnow() - timedelta(days=2))
    published_events = query(
        path={'query': directory.getPhysicalPath(), 'depth': 2},
        object_provides=IEventsDirectoryItem.__identifier__,
        review_state=('published', ),
        start={'query': past, 'range': 'max'},
        end={'query': past, 'range': 'max'}
    )

    past_events = []

    for event in published_events:
        event = event.getObject()

        assert event.start < past
        assert event.end < past

        # recurring events may be in the past with one of
        # their occurrences in the future
        if not has_future_occurrences(event, past):
            past_events.append(event)

    ids = [p.id for p in past_events]

    if past_events:
        log.info('archiving past events -> %s' % str(ids))

        if not dryrun:
            for event in past_events:
                event.archive()
    else:
        log.info('no past events found')

    return ids
예제 #18
0
    def remove_past_imported_events(self, directory, dryrun=False):

        catalog = IDirectoryCatalog(directory)
        query = catalog.catalog

        log.info('remove past imported events (> 2 days old)')

        # events are in the past if they have been over for two days
        # (not one, to ensure that they are really over in all timezones)
        past = to_utc(datetime.utcnow() - timedelta(days=2))
        imported_events = query(
            path={'query': directory.getPhysicalPath(), 'depth': 2},
            object_provides=IExternalEvent.__identifier__,
            start={'query': past, 'range': 'max'},
            end={'query': past, 'range': 'max'}
        )

        past_events = []

        for event in imported_events:
            event = event.getObject()

            assert event.start < past
            assert event.end < past

            # recurring events may be in the past with one of
            # their occurrences in the future
            if not has_future_occurrences(event, past):
                past_events.append(event)

        past_events = [p.id for p in past_events]

        if past_events:
            log.info('removing past imported events -> %s' % str(past_events))

            if not dryrun:
                directory.manage_delObjects(past_events)
                pass
        else:
            log.info('no past imported events found')

        return past_events
예제 #19
0
    def test_archive_past_events(self):

        run_dry = lambda: cleanup_scheduler.archive_past_events(self.directory,
                                                                dryrun=True)
        run = lambda: cleanup_scheduler.archive_past_events(self.directory,
                                                            dryrun=False)

        published = self.create_event()
        self.assertEqual(run(), [])

        published.submit()
        self.assertEqual(run(), [])

        published.publish()
        self.assertEqual(run(), [])

        # Age event
        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=['start', 'end'])

        # Test dryruns
        self.assertEqual(run_dry(), [published.id])

        published.start += timedelta(days=2)
        published.end += timedelta(days=2)
        published.reindexObject(idxs=['start', 'end'])
        self.assertEqual(run_dry(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=['start', 'end'])
        self.assertEqual(run_dry(), [published.id])

        published.recurrence = 'RRULE:FREQ=WEEKLY;COUNT=10'
        self.assertEqual(run_dry(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=100))
        published.end = to_utc(datetime.utcnow() - timedelta(days=100))
        published.recurrence = 'RRULE:FREQ=WEEKLY;COUNT=3'
        published.reindexObject(idxs=['start', 'end'])

        self.assertEqual(run_dry(), [published.id])

        # Test run
        self.assertEqual(run(), [published.id])
        self.assertEqual(published.state, 'archived')
예제 #20
0
    def test_archive_past_events(self):

        run_dry = lambda: cleanup_scheduler.archive_past_events(self.directory, dryrun=True)
        run = lambda: cleanup_scheduler.archive_past_events(self.directory, dryrun=False)

        published = self.create_event()
        self.assertEqual(run(), [])

        published.submit()
        self.assertEqual(run(), [])

        published.publish()
        self.assertEqual(run(), [])

        # Age event
        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=["start", "end"])

        # Test dryruns
        self.assertEqual(run_dry(), [published.id])

        published.start += timedelta(days=2)
        published.end += timedelta(days=2)
        published.reindexObject(idxs=["start", "end"])
        self.assertEqual(run_dry(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=["start", "end"])
        self.assertEqual(run_dry(), [published.id])

        published.recurrence = "RRULE:FREQ=WEEKLY;COUNT=10"
        self.assertEqual(run_dry(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=100))
        published.end = to_utc(datetime.utcnow() - timedelta(days=100))
        published.recurrence = "RRULE:FREQ=WEEKLY;COUNT=3"
        published.reindexObject(idxs=["start", "end"])

        self.assertEqual(run_dry(), [published.id])

        # Test run
        self.assertEqual(run(), [published.id])
        self.assertEqual(published.state, "archived")
예제 #21
0
    def remove_stale_previews(self, directory, dryrun=False):

        catalog = IDirectoryCatalog(directory)
        query = catalog.catalog.unrestrictedSearchResults

        log.info('searching for stale previews (> 2 days old)')

        past = to_utc(datetime.utcnow() - timedelta(days=2))
        stale_previews = query(
            path={'query': directory.getPhysicalPath(), 'depth': 2},
            object_provides=IEventsDirectoryItem.__identifier__,
            review_state=('preview'),
            modified={'query': past, 'range': 'max'}
        )
        stale_previews = [p.id for p in stale_previews]

        if stale_previews:
            log.info('deleting stale previews -> %s' % str(stale_previews))
            if not dryrun:
                directory.manage_delObjects(stale_previews)
        else:
            log.info('no stale previews found')

        return stale_previews
예제 #22
0
    def remove_stale_previews(self, directory, dryrun=False):

        catalog = IDirectoryCatalog(directory)
        query = catalog.catalog.unrestrictedSearchResults

        log.info("searching for stale previews (> 2 days old)")

        past = to_utc(datetime.utcnow() - timedelta(days=2))
        stale_previews = query(
            path={"query": directory.getPhysicalPath(), "depth": 2},
            object_provides=IEventsDirectoryItem.__identifier__,
            review_state=("preview"),
            modified={"query": past, "range": "max"},
        )
        stale_previews = [p.id for p in stale_previews]

        if stale_previews:
            log.info("deleting stale previews -> %s" % str(stale_previews))
            if not dryrun:
                directory.manage_delObjects(stale_previews)
        else:
            log.info("no stale previews found")

        return stale_previews
예제 #23
0
    def test_remove_archived_events(self):

        run_dry = lambda: cleanup_scheduler.remove_archived_events(
            self.directory, dryrun=True)
        run = lambda: cleanup_scheduler.remove_archived_events(self.directory,
                                                               dryrun=False)

        archived = self.create_event()
        self.assertEqual(run(), [])

        archived.submit()
        self.assertEqual(run(), [])

        archived.publish()
        self.assertEqual(run(), [])

        # Age event
        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=['start', 'end'])

        # Test dryruns
        self.assertEqual(run_dry(), [])

        archived.archive()
        self.assertEqual(run_dry(), [archived.id])

        archived.start = to_utc(datetime.utcnow() - timedelta(days=10))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=10))
        archived.reindexObject(idxs=['start', 'end'])

        self.assertEqual(run_dry(), [])

        # Test run
        archived.start = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.end = to_utc(datetime.utcnow() - timedelta(days=31))
        archived.reindexObject(idxs=['start', 'end'])

        self.assertEqual(len(self.catalog.catalog()), 2)
        self.assertEqual(run(), [])
        self.assertEqual(len(self.catalog.catalog()), 1)
    def test_archive_past_events(self):

        run = lambda: maintenance.archive_past_events(
            self.directory, dryrun=True
        )

        published = self.create_event()
        self.assertEqual(run(), [])

        published.submit()
        self.assertEqual(run(), [])

        published.publish()
        self.assertEqual(run(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=['start', 'end'])

        self.assertEqual(run(), [published.id])

        published.start += timedelta(days=2)
        published.end += timedelta(days=2)
        published.reindexObject(idxs=['start', 'end'])
        self.assertEqual(run(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=3))
        published.end = to_utc(datetime.utcnow() - timedelta(days=3))
        published.reindexObject(idxs=['start', 'end'])
        self.assertEqual(run(), [published.id])

        published.recurrence = 'RRULE:FREQ=WEEKLY;COUNT=10'
        self.assertEqual(run(), [])

        published.start = to_utc(datetime.utcnow() - timedelta(days=100))
        published.end = to_utc(datetime.utcnow() - timedelta(days=100))
        published.recurrence = 'RRULE:FREQ=WEEKLY;COUNT=3'
        published.reindexObject(idxs=['start', 'end'])

        self.assertEqual(run(), [published.id])
예제 #25
0
 def identity_date(self, identity):
     return dates.to_utc(datetime.strptime(identity[:14], '%y.%m.%d-%H:%M'))
예제 #26
0
 def identity_date(self, identity):
     return dates.to_utc(datetime.strptime(identity[:14], '%y.%m.%d-%H:%M'))