def cleanup():
        Log.Debug('Cleaning up stale or invalid sessions')

        for key, session in Dict['nowPlaying'].items():
            delete = False

            # Destroy invalid sessions
            if type(session) is not dict:
                delete = True
            elif 'update_required' not in session:
                delete = True
            elif 'last_updated' not in session:
                delete = True
            elif type(session['last_updated']) is not datetime:
                delete = True
            elif total_seconds(datetime.now() - session['last_updated']) / 60 / 60 > 24:
                # Destroy sessions last updated over 24 hours ago
                Log.Debug('Session %s was last updated over 24 hours ago, queued for deletion', key)
                delete = True

            # Delete session or flag for update
            if delete:
                Log.Info('Session %s looks stale or invalid, deleting it now', key)
                del Dict['nowPlaying'][key]
            elif not session['update_required']:
                Log.Info('Queueing session %s for update', key)
                session['update_required'] = True

                # Update session in storage
                Dict['nowPlaying'][key] = session

        Log.Debug('Finished cleaning up')
    def cleanup():
        Log.Debug('Cleaning up stale or invalid sessions')

        for key, session in Dict['nowPlaying'].items():
            delete = False

            # Destroy invalid sessions
            if type(session) is not dict:
                delete = True
            elif 'update_required' not in session:
                delete = True
            elif 'last_updated' not in session:
                delete = True
            elif type(session['last_updated']) is not datetime:
                delete = True
            elif total_seconds(datetime.now() - session['last_updated']) / 60 / 60 > 24:
                # Destroy sessions last updated over 24 hours ago
                Log.Debug('Session %s was last updated over 24 hours ago, queued for deletion', key)
                delete = True

            # Delete session or flag for update
            if delete:
                Log.Info('Session %s looks stale or invalid, deleting it now', key)
                del Dict['nowPlaying'][key]
            elif not session['update_required']:
                Log.Info('Queueing session %s for update', key)
                session['update_required'] = True

                # Update session in storage
                Dict['nowPlaying'][key] = session

        Log.Debug('Finished cleaning up')
    def cleanup():
        log.debug('Cleaning up stale or invalid sessions')

        sessions = WatchSession.all()

        if not len(sessions):
            return

        for key, ws in sessions:
            delete = False

            # Destroy invalid sessions
            if ws is None:
                delete = True
            elif not ws.last_updated or type(ws.last_updated) is not datetime:
                delete = True
            elif total_seconds(datetime.now() - ws.last_updated) / 60 / 60 > 24:
                # Destroy sessions last updated over 24 hours ago
                log.debug('Session %s was last updated over 24 hours ago, queued for deletion', key)
                delete = True

            # Delete session or flag for update
            if delete:
                log.info('Session %s looks stale or invalid, deleting it now', key)
                WatchSession.delete(key)
            elif not ws.update_required:
                log.info('Queueing session %s for update', key)
                ws.update_required = True
                ws.save()

        log.debug('Finished cleaning up')
Exemple #4
0
        def get_merged(cls,
                       media,
                       watched=True,
                       ratings=False,
                       collected=False,
                       extended=None,
                       retry=True,
                       cache_id=None):
            start = datetime.utcnow()

            # Merge data
            items = {}

            params = {
                'authenticate': True,
                'retry': retry,
                'cache_id': cache_id
            }

            # Merge watched library
            if watched and not Trakt.merge_watched(items, media, extended, **
                                                   params):
                log.warn('Failed to merge watched library')
                return None

            # Merge ratings
            if ratings and not Trakt.merge_ratings(items, media, **params):
                log.warn('Failed to merge ratings')
                return None

            # Merge collected library
            if collected and not Trakt.merge_collected(items, media, extended,
                                                       **params):
                log.warn('Failed to merge collected library')
                return None

            # Generate entries table with alternative keys
            table = items.copy()

            for key, item in table.items():
                # Skip first key (because it's the root_key)
                for alt_key in item.keys[1:]:
                    table[alt_key] = item

            # Calculate elapsed time
            elapsed = datetime.utcnow() - start

            log.debug(
                'get_merged returned dictionary with %s keys for %s items in %s seconds',
                len(table), len(items), total_seconds(elapsed))

            return items, table
    def check_schedule(cls):
        interval = INTERVAL_MAP.get(Prefs['sync_run_interval'])
        if not interval:
            return False

        status = cls.get_status('synchronize')
        if not status.previous_timestamp:
            return False

        since_run = total_seconds(datetime.utcnow() - status.previous_timestamp) / 60
        if since_run < interval:
            return False

        return cls.trigger_synchronize()
Exemple #6
0
    def check_schedule(cls):
        interval = INTERVAL_MAP.get(Prefs['sync_run_interval'])
        if not interval:
            return False

        status = cls.get_status('synchronize')
        if not status.previous_timestamp:
            return False

        since_run = total_seconds(datetime.utcnow() -
                                  status.previous_timestamp) / 60
        if since_run < interval:
            return False

        return cls.trigger_synchronize()
        def get_merged(cls, media, watched=True, ratings=False, collected=False, extended=None, retry=True, cache_id=None):
            start = datetime.utcnow()

            # Merge data
            items = {}

            params = {
                'authenticate': True,
                'retry': retry,
                'cache_id': cache_id
            }

            # Merge watched library
            if watched and not Trakt.merge_watched(items, media, extended, **params):
                log.warn('Failed to merge watched library')
                return None

            # Merge ratings
            if ratings and not Trakt.merge_ratings(items, media, **params):
                log.warn('Failed to merge ratings')
                return None

            # Merge collected library
            if collected and not Trakt.merge_collected(items, media, extended, **params):
                log.warn('Failed to merge collected library')
                return None

            # Generate entries table with alternative keys
            table = items.copy()

            for key, item in table.items():
                # Skip first key (because it's the root_key)
                for alt_key in item.keys[1:]:
                    table[alt_key] = item

            # Calculate elapsed time
            elapsed = datetime.utcnow() - start

            log.debug(
                'get_merged returned dictionary with %s keys for %s items in %s seconds',
                len(table), len(items), total_seconds(elapsed)
            )

            return items, table
    def calculate_timing(self, stat, cur_progress):
        if not stat.last_update:
            return

        progress_delta = cur_progress - (stat.progress or 0)
        delta_seconds = total_seconds(datetime.utcnow() - stat.last_update)

        # Calculate current speed (in [percent progress]/sec)
        cur_speed = delta_seconds / (progress_delta * 100)

        if stat.per_perc is None:
            # Start initially at first speed value
            stat.per_perc = cur_speed
        else:
            # Calculate EMA speed
            stat.per_perc = ema(cur_speed, stat.per_perc)

        # Calculate estimated time remaining
        stat.seconds_remaining = ((1 - cur_progress) * 100) * stat.per_perc
    def calculate_timing(self, stat, cur_progress):
        if not stat.last_update:
            return

        progress_delta = cur_progress - (stat.progress or 0)
        delta_seconds = total_seconds(datetime.utcnow() - stat.last_update)

        # Calculate current speed (in [percent progress]/sec)
        cur_speed = delta_seconds / (progress_delta * 100)

        if stat.per_perc is None:
            # Start initially at first speed value
            stat.per_perc = cur_speed
        else:
            # Calculate EMA speed
            stat.per_perc = ema(cur_speed, stat.per_perc)

        # Calculate estimated time remaining
        stat.seconds_remaining = ((1 - cur_progress) * 100) * stat.per_perc
    def merged(cls, media, watched=True, ratings=False, collected=False, extended='min'):
        cached = cls.merged_cache.get(media)

        # Check if the cached library is valid
        if cached and cached['cache_id'] == cls.get_cache_id():
            items, table = cached['result']

            log.debug(
                'merged() returned cached %s library with %s keys for %s items',
                media, len(table), len(items)
            )

            return items, table

        # Start building merged library
        start = datetime.utcnow()

        # Merge data
        items = {}

        # Merge watched library
        if watched and Trakt['user/library/%s' % media].watched(extended=extended, store=items) is None:
            log.warn('Unable to fetch watched library')
            return None, None

        # Merge ratings
        if ratings:
            if Trakt['user/ratings'].get(media, extended=extended, store=items) is None:
                log.warn('Unable to fetch ratings')
                return None, None

            # Fetch episode ratings (if we are fetching shows)
            if media == 'shows' and Trakt['user/ratings'].get('episodes', extended=extended, store=items) is None:
                log.warn('Unable to fetch episode ratings')
                return None, None

        # Merge collected library
        if collected and Trakt['user/library/%s' % media].collection(extended=extended, store=items) is None:
            log.warn('Unable to fetch collected library')
            return None, None

        # Generate item table with alternative keys
        table = items.copy()

        for key, item in table.items():
            # Skip first key (because it's the root_key)
            for alt_key in item.keys[1:]:
                table[alt_key] = item

        # Calculate elapsed time
        elapsed = datetime.utcnow() - start

        log.debug(
            'merged() built %s library with %s keys for %s items in %s seconds',
            media, len(table), len(items), total_seconds(elapsed)
        )

        # Cache for future calls
        cls.merged_cache[media] = {
            'cache_id': cls.get_cache_id(),
            'result': (items, table)
        }

        return items, table
    def merged(cls, media, watched=True, ratings=False, collected=False, exceptions=True):
        cached = cls.merged_cache.get(media)

        # Check if the cached library is valid
        if cached and cached['sid'] == cls.get_sid():
            items, table = cached['result']

            log.debug(
                'merged() returned cached %s library with %s keys for %s items',
                media, len(table), len(items)
            )

            return items, table

        # Start building merged library
        start = datetime.utcnow()

        # Merge data
        items = {}

        params = {
            'store': items,
            'exceptions': exceptions
        }

        # Merge watched library
        if watched and Trakt['sync/watched'].get(media, **params) is None:
            log.warn('Unable to fetch watched items')
            return None, None

        # Merge ratings
        if ratings:
            if Trakt['sync/ratings'].get(media, **params) is None:
                log.warn('Unable to fetch ratings')
                return None, None

            # Fetch episode ratings (if we are fetching shows)
            if media == 'shows' and Trakt['sync/ratings'].get('episodes', **params) is None:
                log.warn('Unable to fetch episode ratings')
                return None, None

        # Merge collected library
        if collected and Trakt['sync/collection'].get(media, **params) is None:
            log.warn('Unable to fetch collected items')
            return None, None

        # Generate item table with alternative keys
        table = items.copy()

        for key, item in table.items():
            # Skip first key (because it's the root_key)
            for alt_key in item.keys[1:]:
                table[alt_key] = item

        # Calculate elapsed time
        elapsed = datetime.utcnow() - start

        log.debug(
            'merged() built %s library with %s keys for %s items in %s seconds',
            media, len(table), len(items), total_seconds(elapsed)
        )

        cls.merged_cache[media] = {
            'sid': cls.get_sid(),
            'result': (items, table)
        }

        # TODO Run asynchronously?
        ModuleManager['backup'].run(media, items)

        return items, table