示例#1
0
def login(app, name='TestUser'):
    social_token = DBSession.query(ComunityUser).filter(
        ComunityUser.name == name).one().tokens[0]

    class MockLoginProvider(ILoginProvider):
        provider_token = ProviderToken(
            social_token.provider, token=social_token.token,
            response={})  # this token should match [email protected]

        @property
        def html_include(self):
            return """<script>console.log('MockLoginProvider html_include');</script>"""

        def verify_cridentials(self, request):
            assert request.params.get('token') == 'token'
            return self.provider_token

        def aquire_additional_user_details(self, provider_token):
            assert provider_token == self.provider_token
            return social_token.data

    from karakara.views.comunity_login import social_login
    with patch.dict(social_login.login_providers,
                    {'test_provider': MockLoginProvider()}):
        response = app.get('/comunity/login?token=token')
        assert name in response.text
        assert 'MockLoginProvider' in response.text
示例#2
0
文件: misc.py 项目: shish/KaraKara
def random_images(request):
    """
    The player interface titlescreen can be populated with random thumbnails from the system.
    This is a nice showcase.
    Not optimised as this is rarely called.
    """
    import random
    from karakara.model              import DBSession
    from karakara.model.model_tracks import Attachment
    thumbnails = DBSession.query(Attachment.location).filter(Attachment.type=='thumbnail').all()
    random.shuffle(thumbnails)
    thumbnails = [t[0] for t in thumbnails]
    return action_ok(data={'thumbnails':thumbnails[0:int(request.params.get('count',0) or 100)]})
 def consume(self):
     try:
         token = DBSession.query(PriorityToken) \
             .filter(PriorityToken.used == False) \
             .filter(PriorityToken.session_owner == self.user_id) \
             .filter(PriorityToken.valid_start <= now(), PriorityToken.valid_end > now()) \
             .one()
         token.used = True
         self.request.response.delete_cookie('priority_token')
         log.debug('priority_token consumed')
         return True
     except NoResultFound:
         return False
示例#4
0
文件: misc.py 项目: richlanc/KaraKara
def random_images(request):
    """
    The player interface titlescreen can be populated with random thumbnails from the system.
    This is a nice showcase.
    Not optimised as this is rarely called.
    """
    import random
    from karakara.model import DBSession
    from karakara.model.model_tracks import Attachment
    images = DBSession.query(Attachment.location).filter(Attachment.type == 'image').all()
    # TODO: use serach.restrict_trags to get the images for the current event
    random.shuffle(images)
    images = [t[0] for t in images]
    return action_ok(data={'images': images[0: int(request.params.get('count', 0) or 100)]})
示例#5
0
def random_images(request):
    """
    The player interface titlescreen can be populated with random thumbnails from the system.
    This is a nice showcase.
    Not optimised as this is rarely called.
    """
    import random
    from karakara.model import DBSession
    from karakara.model.model_tracks import Attachment
    images = DBSession.query(
        Attachment.location).filter(Attachment.type == 'image').all()
    # TODO: use serach.restrict_trags to get the images for the current event
    random.shuffle(images)
    images = [t[0] for t in images]
    return action_ok(
        data={'images': images[0:int(request.params.get('count', 0) or 100)]})
示例#6
0
def import_media(**kwargs):
    """
    """
    stats = dict(meta_set=set(), meta_imported=set(), meta_unprocessed=set(), db_removed=list(), missing_processed_deleted=set(), missing_processed_aborted=set(), db_start=set(), meta_hash_matched_db_hash=set())

    def get_db_track_names():
        return set(t.source_filename for t in DBSession.query(Track.source_filename))

    meta_manager = MetaManagerExtended(**kwargs)
    importer = TrackImporter(meta_manager=meta_manager)
    stats['db_start'] = get_db_track_names()

    meta_manager.load_all()  # mtime=epoc(last_update())

    meta_processed_track_ids = set(meta_manager.source_hashs)
    stats['meta_set'] = set(m.name for m in meta_manager.meta_items if m.source_hash)

    for name in progress_bar(meta_manager.meta.keys()):
        try:
            if importer.import_track(name):
                stats['meta_imported'].add(name)
            else:
                stats['meta_hash_matched_db_hash'].add(name)
        except TrackNotProcesedException:
            log.debug('Unprocessed (no source_hash): %s', name)
            stats['meta_unprocessed'].add(name)
        except TrackMissingProcessedFiles as ex:
            if ex.id:
                log.warning('Missing (processed files) delete existing: %s', name)
                delete_track(ex.id)
                commit()
                stats['missing_processed_deleted'].add(name)
            else:
                log.warning('Missing (processed files) abort import: %s', name)
                stats['missing_processed_aborted'].add(name)

    for unneeded_track_id in importer.exisiting_track_ids - meta_processed_track_ids:
        log.warning('Remove: %s', unneeded_track_id)
        stats['db_removed'].append(DBSession.query(Track).get(unneeded_track_id).source_filename or unneeded_track_id)
        delete_track(unneeded_track_id)
        commit()

    stats['db_end'] = get_db_track_names()

    #assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported']  # TODO! Reinstate this

    return stats
示例#7
0
def login(app, name='TestUser'):
    social_token = DBSession.query(ComunityUser).filter(ComunityUser.name == name).one().tokens[0]

    class MockLoginProvider(ILoginProvider):
        provider_token = ProviderToken(social_token.provider, token=social_token.token, response={})  # this token should match [email protected]
        @property
        def html_include(self):
            return """<script>console.log('MockLoginProvider html_include');</script>"""
        def verify_cridentials(self, request):
            assert request.params.get('token') == 'token'
            return self.provider_token
        def aquire_additional_user_details(self, provider_token):
            assert provider_token == self.provider_token
            return social_token.data

    from karakara.views.comunity_login import social_login
    with patch.dict(social_login.login_providers, {'test_provider': MockLoginProvider()}):
        response = app.get('/comunity/login?token=token')
        assert name in response.text
        assert 'MockLoginProvider' in response.text
示例#8
0
 def _get_attachment(attachment):
     if hasattr(attachment, 'location') and hasattr(attachment, 'type'):
         return create_attachment(attachment)
     else:
         return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(attachment))).one()
示例#9
0
def _existing_tracks_dict():
    return {
        t.id: t.source_filename
        for t in DBSession.query(Track.id, Track.source_filename)
    }
示例#10
0
 def del_track(track_id):
     DBSession.query(Track).filter(Track.id == track_id).delete()
     commit()
示例#11
0
def _existing_tracks_dict():
    return {t.id: t.source_filename for t in DBSession.query(Track.id, Track.source_filename)}
示例#12
0
 def __init__(self, meta_manager=None, path_meta=None, path_processed=None, **kwargs):
     self.meta = meta_manager or MetaManager(path_meta)
     self.processed_files_manager = ProcessedFilesManager(path_processed)
     self.exisiting_track_ids = set(t.id for t in DBSession.query(Track.id))
示例#13
0
 def _get_attachment(attachment):
     if hasattr(attachment, 'location') and hasattr(attachment, 'type'):
         return create_attachment(attachment)
     else:
         return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(attachment))).one()
示例#14
0
文件: tracks.py 项目: shish/KaraKara
 def _get_attachment(filename):
     return DBSession.query(Attachment).filter(Attachment.location.like('%%{0}%%'.format(filename))).one()
示例#15
0
 def get_db_track_names():
     return set(t.source_filename for t in DBSession.query(Track.source_filename))
示例#16
0
    def issue(self):
        priority_window = self.settings.get(
            'karakara.queue.add.limit.priority_window')
        # TODO: Depreciate this priority_window settings
        #  This can be auto-calculated from the current average track length in the queue

        # Aquire most recent priority token - if most recent token in past, set recent token to now
        try:
            latest_token = DBSession.query(PriorityToken).filter(
                PriorityToken.used == False).order_by(
                    PriorityToken.valid_end.desc()).limit(1).one()
            latest_token_end = latest_token.valid_end
        except NoResultFound:
            latest_token_end = None
        if not latest_token_end or latest_token_end < now():
            # When issueing the first priority token
            latest_token_end = now(
            ) + priority_window  # get_queue_duration(request) # Adding entire queue here was unnessisary.

        # Do not issue tokens past the end of the event
        event_end = self.settings.get('karakara.event.end')
        if event_end and latest_token_end > event_end:
            # Unable to issue token as event end
            log.debug('priority_token rejected - event end')
            return TOKEN_ISSUE_ERROR.EVENT_END

        # TODO: possibly depricate this - we can just keep staking tokens until the end of the event
        priority_token_limit = self.settings.get(
            'karakara.queue.add.limit.priority_token')
        if priority_token_limit and latest_token_end > now(
        ) + priority_token_limit:
            # Unable to issue token as priority tokens are time limited
            log.debug('priority_token rejected - token limit')
            return TOKEN_ISSUE_ERROR.TOKEN_LIMIT

        # TODO: Look at the last priority tokens created and raise a warning if the token is likely to pass beyond the end of the event.

        # Do not issue another priority_token if current user already has a priority_token
        try:
            priority_token = DBSession.query(PriorityToken) \
                                .filter(PriorityToken.used==False) \
                                .filter(PriorityToken.session_owner==self.user_id) \
                                .filter(PriorityToken.valid_end>now()) \
                                .one()
            if priority_token:
                log.debug('priority_token rejected - existing token')
                return TOKEN_ISSUE_ERROR.TOKEN_ISSUED
        except NoResultFound:
            pass

        # Issue the new token
        priority_token = PriorityToken()
        priority_token.session_owner = self.user_id
        priority_token.valid_start = latest_token_end
        priority_token.valid_end = latest_token_end + priority_window
        DBSession.add(priority_token)

        # TODO: replace with new one in lib
        #request.response.set_cookie('priority_token', json_cookie);  # WebOb.set_cookie mangles the cookie with m.serialize() - so I rolled my own set_cookie
        priority_token_dict = priority_token.to_dict()
        priority_token_dict.update({
            'server_datetime': now(
            ),  # The client datetime and server datetime may be out. we need to return the server time so the client can calculate the difference
        })
        json_cookie = json.dumps(priority_token_dict,
                                 default=json_object_handler)
        self.request.response.headerlist.append(
            ('Set-Cookie', 'priority_token={0}; Path=/'.format(json_cookie)))
        #self.request.response.set_cookie(name='priority_token', value=json_cookie, path='/', overwrite=True)  # This method butchers the json and cannot be used

        log.debug('priority_token issued')
        return priority_token
示例#17
0
 def __init__(self, meta_manager=None):  # , path_meta=None, path_processed=None, **kwargs
     self.meta_manager = meta_manager #or MetaManager(path_meta)
     self.exisiting_track_ids = set(t.id for t in DBSession.query(Track.id))
     self.existing_files_lookup = set(f.relative for f in fast_scan(self.meta_manager.processed_files_manager.path))
示例#18
0
 def del_track(track_id):
     DBSession.query(Track).filter(Track.id == track_id).delete()
     commit()
示例#19
0
def import_media(**kwargs):
    """
     - hash and identify primary key for track
     - import tags
     - import subtiles
     - cleanup db - any sources we don't have the actual processed files for - prune and remove from db
       - check this removes unnneeded attachments

    stats description:
        : number of tracks imported this session
        : the total number of tracks in the processed meta dataset
        : the number of track in the db before this import operation was performed
        : meta exists, but the processed data has not been encoded yet
        : some source files were missing, making it impossible to use
        : no matching processed meta paired with db entry at all
        db_end: the total tracks in the db at the end of this import operation
        meta_hash_matched_db_hash: The number of meta tracks that matched existing hash in the db
    """
    stats = dict(meta_set=set(), meta_imported=set(), meta_unprocessed=set(), db_removed=list(), missing_processed_deleted=set(), missing_processed_aborted=set(), db_start=set(), meta_hash_matched_db_hash=set())

    def get_db_track_names():
        return set(t.source_filename for t in DBSession.query(Track.source_filename))

    meta = MetaManager(kwargs['path_meta'])
    importer = TrackImporter(meta_manager=meta, path_processed=kwargs['path_processed'])
    stats['db_start'] = get_db_track_names()

    meta.load_all()  # mtime=epoc(last_update())

    meta_processed_track_ids = set(m.source_hash for m in meta.meta.values() if m.source_hash)
    stats['meta_set'] = set(m.name for m in meta.meta.values() if m.source_hash)

    for name in meta.meta.keys():
        try:
            if importer.import_track(name):
                stats['meta_imported'].add(name)
            else:
                stats['meta_hash_matched_db_hash'].add(name)
        except TrackNotProcesedException:
            log.debug('Unprocessed (no source_hash): %s', name)
            stats['meta_unprocessed'].add(name)
        except TrackMissingProcessedFiles as ex:
            if ex.id:
                log.warn('Missing (processed files) delete existing: %s', name)
                delete_track(ex.id)
                stats['missing_processed_deleted'].add(name)
            else:
                log.warn('Missing (processed files) abort import: %s', name)
                stats['missing_processed_aborted'].add(name)

    for unneeded_track_id in importer.exisiting_track_ids - meta_processed_track_ids:
        log.warn('Remove: %s', unneeded_track_id)
        stats['db_removed'].append(DBSession.query(Track).get(unneeded_track_id).source_filename or unneeded_track_id)
        delete_track(unneeded_track_id)
    commit()

    stats['db_end'] = get_db_track_names()

    assert stats['db_end'] == stats['meta_hash_matched_db_hash'] | stats['meta_imported']

    return stats