Пример #1
0
class ThumbnailProcessorGM(IThumbnailProcessor):
    '''
    Implementation for @see: IThumbnailProcessor
    '''

    command_transform = '"%(gm)s" convert "%(source)s" "%(destination)s"'; wire.config('command_transform', doc='''
    The command used to transform the thumbnails''')
    command_resize = '"%(gm)s" convert "%(source)s" -resize %(width)ix%(height)i  "%(destination)s"'
    wire.config('command_resize', doc='''The command used to resize the thumbnails''')
    command_scale_to_height = '"%(gm)s" convert "%(source)s" -resize x%(height)i  "%(destination)s"'
    wire.config('command_scale_to_height', doc='''The command used to resize the thumbnails to specific heights''')
    gm_path = join('workspace', 'tools', 'gm', 'bin', 'gm.exe'); wire.config('gm_path', doc='''
    The path where the gm is found''')

    def __init__(self):
        assert isinstance(self.command_transform, str), 'Invalid command transform %s' % self.command_transform
        assert isinstance(self.command_resize, str), 'Invalid command resize %s' % self.command_resize
        assert isinstance(self.command_scale_to_height, str), 'Invalid command resize to height %s' % self.command_scale_to_height
        assert isinstance(self.gm_path, str), 'Invalid gm path %s' % self.gm_path

    def processThumbnail(self, source, destination, width=None, height=None):
        '''
        @see: IThumbnailProcessor.processThumbnail
        '''
        assert isinstance(source, str), 'Invalid source path %s' % source
        assert isinstance(destination, str), 'Invalid destination path %s' % destination

        params = dict(gm=abspath(self.gm_path), source=source, destination=destination)
        if width and height:
            assert isinstance(width, int), 'Invalid width %s' % width
            assert isinstance(height, int), 'Invalid height %s' % height

            params.update(width=width, height=height)
            command = self.command_resize % params

        elif height:
            assert isinstance(height, int), 'Invalid height %s' % height

            params.update(height=height)
            command = self.command_scale_to_height % params

        else: command = self.command_transform % params

        destDir = dirname(destination)
        if not exists(destDir): makedirs(destDir)
        try:
            p = Popen(shlex.split(command), stdin=PIPE, stdout=PIPE, stderr=PIPE)
            error = p.wait() != 0
        except Exception as e:
            log.exception('Problems while executing command:\n%s \n%s' % (command, e))
            error = True

        if error:
            if exists(destination): os.remove(destination)
Пример #2
0
class ThumbnailProcessorAVConv(IThumbnailProcessor):
    '''
    Implementation for @see: IThumbnailProcessor
    '''

    command_transform = '"%(avconv)s" -i "%(source)s" "%(destination)s"'; wire.config('command_transform', doc='''
    The command used to transform the thumbnails''')
    command_resize = '"%(avconv)s" -i "%(source)s" -s %(width)ix%(height)i "%(destination)s"'
    wire.config('command_resize', doc='''The command used to resize the thumbnails''')
    avconv_dir_path = join('workspace', 'tools', 'avconv'); wire.config('avconv_dir_path', doc='''
    The path where the avconv is placed in order to be used, if empty will not place the contained avconv''')
    avconv_path = join(avconv_dir_path, 'bin', 'avconv'); wire.config('avconv_path', doc='''
    The path where the avconv is found''')

    def __init__(self):
        assert isinstance(self.command_transform, str), 'Invalid command transform %s' % self.command_transform
        assert isinstance(self.command_resize, str), 'Invalid command resize %s' % self.command_resize
        assert isinstance(self.avconv_dir_path, str), 'Invalid avconv directory %s' % self.avconv_dir_path
        assert isinstance(self.avconv_path, str), 'Invalid avconv path %s' % self.avconv_path

        if self.avconv_dir_path: synchronizeURIToDir(join(pythonPath(), 'resources', 'avconv'), self.avconv_dir_path)

    def processThumbnail(self, source, destination, width=None, height=None):
        '''
        @see: IThumbnailProcessor.processThumbnail
        '''
        assert isinstance(source, str), 'Invalid source path %s' % source
        assert isinstance(destination, str), 'Invalid destination path %s' % destination

        params = dict(avconv=abspath(self.avconv_path), source=source, destination=destination)
        if width and height:
            assert isinstance(width, int), 'Invalid width %s' % width
            assert isinstance(height, int), 'Invalid height %s' % height

            params.update(width=width, height=height)
            command = self.command_resize % params
        else: command = self.command_transform % params

        destDir = dirname(destination)
        if not exists(destDir): makedirs(destDir)
        try:
            p = Popen(command)
            error = p.wait() != 0
        except:
            log.exception('Problems while executing command:\n % s', command)
            error = True

        if error:
            if exists(destination): os.remove(destination)
            raise IOError('Cannot process thumbnail from \'%s\' to \'%s\'' % (source, destination))
Пример #3
0
class BlogCollaboratorFilterServiceAlchemy(BlogFilterServiceAlchemyBase, IBlogCollaboratorFilterService):
    '''
    Implementation for @see: IBlogCollaboratorFilterService
    '''
    
    collaborator_types = ['Administrator', 'Collaborator']; wire.config('collaborator_types', doc='''
    The collaborator type(s) name associated with the collaborator filter.
    ''')
    
    def __init__(self): super().__init__()
Пример #4
0
class PostServiceAlchemy(EntityGetServiceAlchemy, IPostService):
    '''
    Implementation for @see: IPostService
    '''
    default_source_name = 'internal'
    wire.config('default_source_name',
                doc='''
    The default source name used when a source was not supplied''')

    def __init__(self):
        '''
        Construct the post service.
        '''
        EntityGetServiceAlchemy.__init__(self, PostMapped)

    def getUnpublished(self,
                       creatorId=None,
                       authorId=None,
                       offset=None,
                       limit=None,
                       detailed=False,
                       q=None):
        '''
        @see: IPostService.getUnpublished
        '''
        assert q is None or isinstance(
            q, QPostUnpublished), 'Invalid query %s' % q
        sql = self._buildQuery(creatorId, authorId, q)
        sql = sql.filter(PostMapped.PublishedOn == None)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPublished(self,
                     creatorId=None,
                     authorId=None,
                     offset=None,
                     limit=None,
                     detailed=False,
                     q=None):
        '''
        @see: IPostService.getPublished
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q
        sql = self._buildQuery(creatorId, authorId, q)
        sql = sql.filter(PostMapped.PublishedOn != None)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getAll(self,
               creatorId=None,
               authorId=None,
               offset=None,
               limit=None,
               detailed=False,
               q=None):
        '''
        @see: IPostService.getPublished
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q
        sql = self._buildQuery(creatorId, authorId, q)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getUnpublishedBySource(self,
                               sourceId,
                               offset=None,
                               limit=None,
                               detailed=False,
                               q=None):
        '''
        @see: IPostService.getUnpublishedBySource
        '''
        assert q is None or isinstance(
            q, QPostUnpublished), 'Invalid query %s' % q

        sql = self._buildQueryBySource(sourceId)
        sql = sql.filter(PostMapped.PublishedOn == None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getUnpublishedBySourceType(self,
                                   sourceTypeKey,
                                   offset=None,
                                   limit=None,
                                   detailed=False,
                                   q=None):
        '''
        @see: IPostService.getUnpublishedBySourceType
        '''
        assert q is None or isinstance(
            q, QPostUnpublished), 'Invalid query %s' % q

        sql = self._buildQueryBySourceType(sourceTypeKey)
        sql = sql.filter(PostMapped.PublishedOn == None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPublishedBySource(self,
                             sourceId,
                             offset=None,
                             limit=None,
                             detailed=False,
                             q=None):
        '''
        @see: IPostService.getPublishedBySource
        '''
        assert q is None or isinstance(q,
                                       QPostPublished), 'Invalid query %s' % q

        sql = self._buildQueryBySource(sourceId)
        sql = sql.filter(PostMapped.PublishedOn != None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPublishedBySourceType(self,
                                 sourceTypeKey,
                                 offset=None,
                                 limit=None,
                                 detailed=False,
                                 q=None):
        '''
        @see: IPostService.getPublishedBySourceType
        '''
        assert q is None or isinstance(q,
                                       QPostPublished), 'Invalid query %s' % q

        sql = self._buildQueryBySourceType(sourceTypeKey)
        sql = sql.filter(PostMapped.PublishedOn != None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getAllBySource(self,
                       sourceId,
                       offset=None,
                       limit=None,
                       detailed=False,
                       q=None):
        '''
        @see: IPostService.getAllBySource
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q

        sql = self._buildQueryBySource(sourceId)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getAllBySourceType(self,
                           sourceTypeKey,
                           offset=None,
                           limit=None,
                           detailed=False,
                           q=None):
        '''
        @see: IPostService.getAllBySourceType
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q

        sql = self._buildQueryBySourceType(sourceTypeKey)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def insert(self, post):
        '''
        @see: IPostService.insert
        '''
        assert isinstance(post, Post), 'Invalid post %s' % post
        postDb = PostMapped()
        copy(post, postDb, exclude=COPY_EXCLUDE)
        postDb.typeId = self._typeId(post.Type)

        # TODO: implement the proper fix using SQLAlchemy compilation rules
        nohigh = {i: None for i in range(0x10000, 0x110000)}
        if postDb.Meta: postDb.Meta = postDb.Meta.translate(nohigh)
        if postDb.Content: postDb.Content = postDb.Content.translate(nohigh)
        if postDb.ContentPlain:
            postDb.ContentPlain = postDb.ContentPlain.translate(nohigh)

        if post.CreatedOn is None: postDb.CreatedOn = current_timestamp()
        if not postDb.Author:
            colls = self.session().query(CollaboratorMapped).filter(
                CollaboratorMapped.User == postDb.Creator).all()
            if not colls:
                coll = CollaboratorMapped()
                coll.User = postDb.Creator
                src = self.session().query(SourceMapped).filter(
                    SourceMapped.Name ==
                    PostServiceAlchemy.default_source_name).one()
                coll.Source = src.Id
                self.session().add(coll)
                self.session().flush((coll, ))
                colls = (coll, )
            postDb.Author = colls[0].Id

        self.session().add(postDb)
        self.session().flush((postDb, ))
        post.Id = postDb.Id
        return post.Id

    def update(self, post):
        '''
        @see: IPostService.update
        '''
        assert isinstance(post, Post), 'Invalid post %s' % post
        postDb = self.session().query(PostMapped).get(post.Id)
        if not postDb or postDb.DeletedOn is not None:
            raise InputError(Ref(_('Unknown post id'), ref=Post.Id))

        if Post.Type in post: postDb.typeId = self._typeId(post.Type)
        if post.UpdatedOn is None: postDb.UpdatedOn = current_timestamp()

        self.session().flush((copy(post, postDb, exclude=COPY_EXCLUDE), ))

    def delete(self, id):
        '''
        @see: IPostService.delete
        '''
        postDb = self.session().query(PostMapped).get(id)
        if not postDb or postDb.DeletedOn is not None: return False

        postDb.DeletedOn = current_timestamp()
        self.session().flush((postDb, ))
        return True

    # ----------------------------------------------------------------

    def _buildQuery(self, creatorId=None, authorId=None, q=None):
        '''
        Builds the general query for posts.
        '''
        sql = self.session().query(PostMapped)
        if creatorId: sql = sql.filter(PostMapped.Creator == creatorId)
        if authorId: sql = sql.filter(PostMapped.Author == authorId)
        addDeleted = False
        if q:
            sql = buildQuery(sql, q, PostMapped)
            addDeleted = QPostUnpublished.deletedOn in q
        if not addDeleted: sql = sql.filter(PostMapped.DeletedOn == None)
        return sql

    def _typeId(self, key):
        '''
        Provides the post type id that has the provided key.
        '''
        try:
            sql = self.session().query(
                PostTypeMapped.id).filter(PostTypeMapped.Key == key)
            return sql.one()[0]
        except NoResultFound:
            raise InputError(
                Ref(_('Invalid post type %(type)s') % dict(type=key),
                    ref=Post.Type))

    def _buildQueryBySource(self, sourceId):
        sql = self.session().query(PostMapped)
        sql = sql.join(CollaboratorMapped,
                       PostMapped.Author == CollaboratorMapped.Id)
        sql = sql.filter(CollaboratorMapped.Source == sourceId)
        return sql

    def _buildQueryBySourceType(self, sourceTypeKey):
        sql = self.session().query(PostMapped)
        sql = sql.join(CollaboratorMapped,
                       PostMapped.Author == CollaboratorMapped.Id)
        sql = sql.join(SourceMapped,
                       CollaboratorMapped.Source == SourceMapped.Id)
        sql = sql.join(SourceTypeMapped,
                       SourceMapped.typeId == SourceTypeMapped.id)
        sql = sql.filter(SourceTypeMapped.Key == sourceTypeKey)
        return sql

    def _buildQueryWithCId(self, q, sql):
        if q:
            if QWithCId.cId in q and q.cId:
                if AsRange.start in q.cId:
                    sql = sql.filter(PostMapped.Id >= q.cId.start)
                if AsRange.since in q.cId:
                    sql = sql.filter(PostMapped.Id > q.cId.since)
                if AsRange.end in q.cId:
                    sql = sql.filter(PostMapped.Id <= q.cId.end)
                if AsRange.until in q.cId:
                    sql = sql.filter(PostMapped.Id < q.cId.until)
            sql = buildQuery(sql, q, PostMapped)
        return sql
Пример #5
0
class ChainedSyncProcess:
    '''
    Chained sync process.
    '''

    blogSyncService = IBlogSyncService
    wire.entity('blogSyncService')
    # blog sync service used to retrieve blogs set on auto publishing

    sourceService = ISourceService
    wire.entity('sourceService')
    # source service used to retrieve source data

    blogPostService = IBlogPostService
    wire.entity('blogPostService')
    # blog post service used to insert blog posts

    postService = IPostService
    wire.entity('postService')
    # post service used to insert/update posts

    collaboratorService = ICollaboratorService
    wire.entity('collaboratorService')
    # blog post service used to retrive collaborator

    userService = IUserService
    wire.entity('userService')

    metaDataService = IMetaDataUploadService
    wire.entity('metaDataService')

    metaInfoService = IMetaInfoService
    wire.entity('metaInfoService')

    personIconService = IPersonIconService
    wire.entity('personIconService')

    syncThreads = {}
    # dictionary of threads that perform synchronization

    sync_interval = 53
    wire.config('sync_interval',
                doc='''
    The number of seconds to perform sync for blogs.''')

    timeout_inteval = 4  #; wire.config('timeout_interval', doc='''
    #The number of seconds after the sync ownership can be taken.''')

    published_posts_path = 'Post/Published'
    wire.config('published_posts_path',
                doc='''
    The partial path used to construct the URL for published posts retrieval'''
                )

    user_type_key = 'chained blog'
    wire.config('user_type_key',
                doc='''
    The user type that is used for the anonymous users of chained blog posts'''
                )

    blog_provider_type = 'blog provider'
    wire.config('blog_provider_type',
                doc='''
    Key of the source type for blog providers''')

    acceptType = 'text/json'
    # mime type accepted for response from remote blog
    encodingType = 'UTF-8'
    # character encoding type accepted for response from remove blog

    @app.deploy
    def startChainSyncThread(self):
        '''
        Starts the chain sync thread.
        '''
        schedule = scheduler(time.time, time.sleep)

        def syncChains():
            self.syncChains()
            schedule.enter(self.sync_interval, 1, syncChains, ())

        schedule.enter(self.sync_interval, 1, syncChains, ())
        scheduleRunner = Thread(name='chained sync', target=schedule.run)
        scheduleRunner.daemon = True
        scheduleRunner.start()
        log.info('Started the chained blogs automatic synchronization.')

    def syncChains(self):
        '''
        Read all chained blog sync entries and sync with the corresponding blogs.
        '''
        log.info('Start chained blog synchronization')
        for blogSync in self.blogSyncService.getBySourceType(
                self.blog_provider_type):
            assert isinstance(blogSync, BlogSync)
            key = (blogSync.Blog, blogSync.Source)
            thread = self.syncThreads.get(key)
            if thread:
                assert isinstance(thread, Thread), 'Invalid thread %s' % thread
                if thread.is_alive():
                    log.info('Chained thread for blog %d is alive',
                             blogSync.Blog)
                    continue

                if not self.blogSyncService.checkTimeout(
                        blogSync.Id,
                        self.timeout_inteval * self.sync_interval):
                    log.info('Chained thread for blog %d is already taken',
                             blogSync.Blog)
                    continue

            self.syncThreads[key] = Thread(name='blog %d sync' % blogSync.Blog,
                                           target=self._syncChain,
                                           args=(blogSync, ))
            self.syncThreads[key].daemon = True
            self.syncThreads[key].start()
            log.info('Chained thread started for blog id %d and source id %d',
                     blogSync.Blog, blogSync.Source)

        log.info('End chained blog synchronization')

    def _syncChain(self, blogSync):
        '''
        Synchronize the blog for the given sync entry.

        @param blogSync: BlogSync
            The blog sync entry declaring the blog and source from which the blog
            has to be updated.
        '''
        assert isinstance(blogSync,
                          BlogSync), 'Invalid blog sync %s' % blogSync
        source = self.sourceService.getById(blogSync.Source)

        log.info('_syncChain blogId=%d, sourceId=%d', blogSync.Blog,
                 blogSync.Source)

        assert isinstance(source, Source)
        (scheme, netloc, path, params, query, fragment) = urlparse(source.URI)

        if not scheme: scheme = 'http'

        q = parse_qsl(query, keep_blank_values=True)
        q.append(('asc', 'cId'))
        q.append(
            ('cId.since', blogSync.CId if blogSync.CId is not None else 0))

        url = urlunparse(
            (scheme, netloc, path + '/' + self.published_posts_path, params,
             urlencode(q), fragment))
        req = Request(url,
                      headers={
                          'Accept': self.acceptType,
                          'Accept-Charset': self.encodingType,
                          'X-Filter':
                          '*,Creator.*,Author.User.*,Author.Source.*',
                          'User-Agent': 'Magic Browser'
                      })

        try:
            resp = urlopen(req)
        except (HTTPError, socket.error) as e:
            log.error('Read error on %s: %s' % (source.URI, e))
            blogSync.LastActivity = None
            self.blogSyncService.update(blogSync)
            return

        if str(resp.status) != '200':
            log.error('Read problem on %s, status: %s' %
                      (source.URI, resp.status))
            blogSync.LastActivity = None
            self.blogSyncService.update(blogSync)
            return

        try:
            msg = json.load(codecs.getreader(self.encodingType)(resp))
        except ValueError as e:
            log.error('Invalid JSON data %s' % e)
            blogSync.LastActivity = None
            self.blogSyncService.update(blogSync)
            return

        usersForIcons = {}
        for post in msg['PostList']:
            try:
                if post['IsPublished'] != 'True': continue

                insert = False
                if 'Uuid' in post:
                    uuid = post['Uuid']
                    localPost = self.postService.getByUuidAndSource(
                        uuid, source.Id)
                else:
                    #To support old instances that don't have Uuid attribute
                    uuid = str(uuid4().hex)
                    localPost = None

                if localPost == None:
                    if 'DeletedOn' in post: continue
                    localPost = Post()
                    localPost.Uuid = uuid
                    insert = True

                if 'DeletedOn' not in post:
                    #TODO: workaround, read again the Author because sometimes we get access denied
                    post['Author'] = self._readAuthor(post['Author']['href'])
                    post['Creator'] = self._readCreator(
                        post['Creator']['href'])

                    #if exists local, update it, otherwise continue the original insert
                    localPost.Type = post['Type']['Key']
                    localPost.Author, localPost.Creator, needUpdate, isAuthor = self._getCollaboratorForAuthor(
                        post['Author'], post['Creator'], source)
                    localPost.Feed = source.Id
                    localPost.Meta = post['Meta'] if 'Meta' in post else None
                    localPost.ContentPlain = post[
                        'ContentPlain'] if 'ContentPlain' in post else None
                    localPost.Content = post[
                        'Content'] if 'Content' in post else None
                    localPost.Order = post['Order'] if 'Order' in post else None
                    localPost.CreatedOn = current_timestamp()
                    if blogSync.Auto:
                        localPost.PublishedOn = current_timestamp()
                        localPost.WasPublished = True

                    log.info("received post: %s", str(localPost))

                    if localPost.Creator and (
                            localPost.Creator
                            not in usersForIcons) and needUpdate:
                        try:
                            if isAuthor:
                                usersForIcons[
                                    localPost.Creator] = post['Author']['User']
                            else:
                                usersForIcons[
                                    localPost.Creator] = post['Creator']
                        except KeyError:
                            pass

                else:
                    localPost.DeletedOn = datetime.strptime(
                        post['DeletedOn'], '%m/%d/%y %I:%M %p')

                # prepare the blog sync model to update the change identifier
                blogSync.CId = int(post['CId']) if blogSync.CId is None or int(
                    post['CId']) > blogSync.CId else blogSync.CId

                if insert:
                    self.blogPostService.insert(blogSync.Blog, localPost)
                else:
                    self.blogPostService.update(blogSync.Blog, localPost)

                # update blog sync entry
                blogSync.LastActivity = datetime.now().replace(microsecond=0)
                self.blogSyncService.update(blogSync)

            except KeyError as e:
                log.error('Post from source %s is missing attribute %s' %
                          (source.URI, e))
            except Exception as e:
                log.error('Error in source %s post: %s' % (source.URI, e))

        self._updateIcons(usersForIcons)

        blogSync.LastActivity = None
        self.blogSyncService.update(blogSync)

    def _getCollaboratorForAuthor(self, author, creator, source):
        '''
        Returns a collaborator identifier for the user/source defined in the post.
        If the post was not created by a user (it is twitter, facebook, etc. post) 
        it returns a collaborator for the user that has added the post.

        @param author: dict
            The author data in JSON decoded format
        @param creator: dict
            The creator data in JSON decoded format
        @param source: Source
            The source from which the blog synchronization is done
        @return: integer
            The collaborator identifier.
        '''
        assert isinstance(source, Source)

        user = User()

        isAuthor = False

        if 'User' in author:
            userJSON = author['User']
            isAuthor = True
        else:
            userJSON = creator

        #To support old instances that don't have Uuid attribute
        if 'Uuid' in userJSON: user.Uuid = userJSON.get('Uuid', '')
        else: user.Uuid = str(uuid4().hex)

        if 'Cid' in userJSON: cid = int(userJSON.get('Cid', ''))
        else: cid = None

        user.Name = user.Uuid
        user.FirstName, user.LastName = userJSON.get('FirstName',
                                                     ''), userJSON.get(
                                                         'LastName', '')
        user.Address, user.PhoneNumber = userJSON.get('Address',
                                                      ''), userJSON.get(
                                                          'PhoneNumber', '')
        user.EMail, user.Password = userJSON.get('EMail', ''), '~'
        user.Type = self.user_type_key

        needUpdate = True
        try:
            userId = self.userService.insert(user)
        except InputError:
            localUser = self.userService.getByUuid(user.Uuid)
            userId = localUser.Id
            if localUser.Type == self.user_type_key and (cid is None or
                                                         localUser.Cid < cid):
                user.Id = localUser.Id
                user.Type = localUser.Type
                user.Cid = cid
                self.userService.update(user)
            else:
                needUpdate = False

        collaborator = Collaborator()
        collaborator.User, collaborator.Source = userId, source.Id
        try:
            collaboratorId = self.collaboratorService.insert(collaborator)
        except InputError:
            collaborators = self.collaboratorService.getAll(userId, source.Id)
            collaboratorId = collaborators[0].Id

        if isAuthor:
            return [collaboratorId, userId, needUpdate, isAuthor]
        else:
            q = QSource(name=author['Source']['Name'], isModifiable=False)
            sources = self.sourceService.getAll(q=q)
            if not sources: raise Exception('Invalid source %s' % q.name)
            collaborators = self.collaboratorService.getAll(
                userId=None, sourceId=sources[0].Id)
            if collaborators:
                return [collaborators[0].Id, userId, needUpdate, isAuthor]
            else:
                collaborator = Collaborator()
                collaborator.Source = sources[0].Id
                return [
                    self.collaboratorService.insert(collaborator), userId,
                    needUpdate, isAuthor
                ]

    def _updateIcons(self, usersData):
        '''
        Setting the icon of the user
        '''
        userIcons = {}
        for userId in usersData:
            userJSON = usersData[userId]
            userIcons[userId] = {'url': None, 'name': None}

            try:
                metaDataIconJSON = userJSON['MetaDataIcon']
                metaDataIconURL = metaDataIconJSON.get('href', '')
                if not metaDataIconURL:
                    continue

                (scheme, netloc, path, params, query,
                 fragment) = urlparse(metaDataIconURL)
                if not scheme:
                    metaDataIconURL = urlunparse(
                        ('http', netloc, path, params, query, fragment))

                req = Request(metaDataIconURL,
                              headers={
                                  'Accept': self.acceptType,
                                  'Accept-Charset': self.encodingType,
                                  'User-Agent': 'Magic Browser'
                              })
                try:
                    resp = urlopen(req)
                except (HTTPError, socket.error) as e:
                    continue
                if str(resp.status) != '200':
                    continue

                try:
                    msg = json.load(codecs.getreader(self.encodingType)(resp))
                except ValueError as e:
                    log.error('Invalid JSON data %s' % e)
                    continue

                userIcons[userId]['url'] = msg['Content'].get('href', None)

                if userIcons[userId]['url']:
                    iconFileName = userIcons[userId]['url'].split('/')[-1]
                    if iconFileName:
                        iconFileName = '_' + iconFileName
                    userIcons[userId]['name'] = 'icon_' + str(
                        userId) + iconFileName

            except KeyError:
                continue

        for userId in userIcons:
            iconInfo = userIcons[userId]
            self._synchronizeIcon(userId, iconInfo)

    def _synchronizeIcon(self, userId, iconInfo):
        '''
        Synchronizing local icon according to the remote one
        '''
        if not userId:
            return

        shouldRemoveOld = False
        needToUploadNew = False

        try:
            metaDataLocal = self.personIconService.getByPersonId(
                userId, 'http')
        except InputError:
            metaDataLocal = None

        if metaDataLocal:
            localId = metaDataLocal.Id
            localName = metaDataLocal.Name
        else:
            localId = None
            localName = None

        if not localId:
            if iconInfo['url']:
                needToUploadNew = True

        else:
            if iconInfo['url']:
                #on changed avatar the name of the file is changed
                if (not iconInfo['name']) or (not localName) or (
                        localName != iconInfo['name']):
                    shouldRemoveOld = True
                    needToUploadNew = True
            else:
                shouldRemoveOld = True

        if shouldRemoveOld:
            try:
                self.personIconService.detachIcon(userId)
                #self.metaInfoService.delete(localId)
            except InputError:
                log.error('Can not remove old icon for chained user %s' %
                          userId)

        if needToUploadNew:
            try:
                iconContent = ChainedIconContent(iconInfo['url'],
                                                 iconInfo['name'])
                imageData = self.metaDataService.insert(
                    userId, iconContent, 'http')
                if (not imageData) or (not imageData.Id):
                    return
                self.personIconService.setIcon(userId, imageData.Id, False)
            except InputError:
                log.error('Can not upload icon for chained user %s' % userId)

    def _readAuthor(self, url):

        (scheme, netloc, path, params, query, fragment) = urlparse(url)
        if not scheme:
            url = urlunparse(('http', netloc, path, params, query, fragment))

        request = Request(url,
                          headers={
                              'Accept': self.acceptType,
                              'Accept-Charset': self.encodingType,
                              'User-Agent': 'Magic Browser',
                              'X-Filter': '*,User.*,Source.*'
                          })

        try:
            response = urlopen(request)
        except (HTTPError, socket.error) as e:
            return None

        if str(response.status) != '200':
            return None

        try:
            return json.load(codecs.getreader(self.encodingType)(response))
        except ValueError as e:
            log.error('Invalid JSON data %s' % e)
            return None

    def _readCreator(self, url):

        (scheme, netloc, path, params, query, fragment) = urlparse(url)
        if not scheme:
            url = urlunparse(('http', netloc, path, params, query, fragment))

        request = Request(url,
                          headers={
                              'Accept': self.acceptType,
                              'Accept-Charset': self.encodingType,
                              'User-Agent': 'Magic Browser',
                              'X-Filter': '*'
                          })

        try:
            response = urlopen(request)
        except (HTTPError, socket.error) as e:
            return None

        if str(response.status) != '200':
            return None

        try:
            return json.load(codecs.getreader(self.encodingType)(response))
        except ValueError as e:
            log.error('Invalid JSON data %s' % e)
            return None
Пример #6
0
class AudioPersistanceAlchemy(SessionSupport, IMetaDataHandler):
    '''
    Provides the service that handles the audio persistence @see: IAudioPersistanceService.
    '''

    format_file_name = '%(id)s.%(file)s'
    wire.config('format_file_name',
                doc='''
    The format for the audios file names in the media archive''')
    default_format_thumbnail = '%(size)s/audio.jpg'
    wire.config('default_format_thumbnail',
                doc='''
    The format for the audio thumbnails in the media archive''')
    format_thumbnail = '%(size)s/%(id)s.%(name)s.jpg'
    wire.config('format_thumbnail',
                doc='''
    The format for the audio thumbnails in the media archive''')
    ffmpeg_path = join('/', 'usr', 'bin', 'ffmpeg')
    wire.config('ffmpeg_path',
                doc='''
    The path where the ffmpeg is found''')
    ffmpeg_tmp_path = join('workspace', 'tools', 'ffmpeg', 'tmp')
    wire.config('ffmpeg_tmp_path',
                doc='''
    The path where ffmpeg writes temp data''')

    audio_supported_files = '3gp, act, AIFF, ALAC, Au, flac, gsm, m4a, m4p, mp3, ogg, ram, raw, vox, wav, wma'

    thumbnailManager = IThumbnailManager
    wire.entity('thumbnailManager')

    # Provides the thumbnail referencer

    def __init__(self):
        assert isinstance(
            self.format_file_name,
            str), 'Invalid format file name %s' % self.format_file_name
        assert isinstance(
            self.default_format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.default_format_thumbnail
        assert isinstance(
            self.format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.format_thumbnail
        assert isinstance(
            self.audio_supported_files,
            str), 'Invalid supported files %s' % self.audio_supported_files
        assert isinstance(self.ffmpeg_path,
                          str), 'Invalid ffmpeg path %s' % self.ffmpeg_path
        assert isinstance(
            self.ffmpeg_tmp_path,
            str), 'Invalid ffmpeg tmp path %s' % self.ffmpeg_tmp_path

        self.audioSupportedFiles = set(
            re.split('[\\s]*\\,[\\s]*', self.audio_supported_files))
        self._defaultThumbnailFormatId = self._thumbnailFormatId = self._metaTypeId = None

        if not path.exists(self.ffmpeg_tmp_path):
            makedirs(self.ffmpeg_tmp_path)

    def addMetaInfo(self, metaDataMapped, languageId):
        audioInfoMapped = AudioInfoMapped()
        audioInfoMapped.MetaData = metaDataMapped.Id
        audioInfoMapped.Language = languageId
        try:
            self.session().add(audioInfoMapped)
            self.session().flush((audioInfoMapped, ))
        except SQLAlchemyError as e:
            handle(e, audioInfoMapped)
        return audioInfoMapped

    def processByInfo(self, metaDataMapped, contentPath, contentType):
        '''
        @see: IMetaDataHandler.processByInfo
        '''
        if contentType is not None and contentType.startswith(META_TYPE_KEY):
            return self.process(metaDataMapped, contentPath)

        extension = splitext(metaDataMapped.Name)[1][1:]
        if extension in self.audioSupportedFiles:
            return self.process(metaDataMapped, contentPath)

        return False

    def process(self, metaDataMapped, contentPath):
        '''
        @see: IMetaDataHandler.process
        '''
        assert isinstance(
            metaDataMapped,
            MetaDataMapped), 'Invalid meta data mapped %s' % metaDataMapped

        # extract metadata operation to a file in order to have an output parameter for ffmpeg; if no output parameter -> get error code 1
        # the generated metadata file will be deleted
        tmpFile = self.ffmpeg_tmp_path + str(metaDataMapped.Id)

        if exists(tmpFile): remove(tmpFile)
        p = Popen(
            (self.ffmpeg_path, '-i', contentPath, '-f', 'ffmetadata', tmpFile),
            stdin=PIPE,
            stdout=PIPE,
            stderr=STDOUT)
        result = p.wait()
        if exists(tmpFile): remove(tmpFile)
        if result != 0: return False

        audioDataEntry = AudioDataEntry()
        audioDataEntry.Id = metaDataMapped.Id
        metadata = False

        while True:
            line = p.stdout.readline()
            if not line: break
            line = str(line, 'utf-8')
            if line.find('misdetection possible!') != -1: return False

            if metadata:
                property = self.extractProperty(line)

                if property == None:
                    metadata = False
                else:
                    try:
                        if property == 'title':
                            audioDataEntry.Title = self.extractString(line)
                        elif property == 'artist':
                            audioDataEntry.Artist = self.extractString(line)
                        elif property == 'track':
                            audioDataEntry.Track = self.extractNumber(line)
                        elif property == 'album':
                            audioDataEntry.Album = self.extractString(line)
                        elif property == 'genre':
                            audioDataEntry.Genre = self.extractString(line)
                        elif property == 'TCMP':
                            audioDataEntry.Tcmp = self.extractNumber(line)
                        elif property == 'album_artist':
                            audioDataEntry.AlbumArtist = self.extractString(
                                line)
                        elif property == 'date':
                            audioDataEntry.Year = self.extractNumber(line)
                        elif property == 'disc':
                            audioDataEntry.Disk = self.extractNumber(line)
                        elif property == 'TBPM':
                            audioDataEntry.Tbpm = self.extractNumber(line)
                        elif property == 'composer':
                            audioDataEntry.Composer = self.extractString(line)
                        elif property == 'Duration':
                            # Metadata section is finished
                            metadata = False
                    except:
                        #skip if not able to extract data
                        pass

                if metadata: continue
            elif line.find('Metadata') != -1:
                metadata = True
                continue

            if line.find('Stream') != -1 and line.find('Audio') != -1:
                try:
                    values = self.extractAudio(line)
                    audioDataEntry.AudioEncoding = values[0]
                    audioDataEntry.SampleRate = values[1]
                    audioDataEntry.Channels = values[2]
                    audioDataEntry.AudioBitrate = values[3]
                except:
                    pass
            elif line.find('Duration') != -1 and line.find('start') != -1:
                try:
                    values = self.extractDuration(line)
                    audioDataEntry.Length = values[0]
                    audioDataEntry.AudioBitrate = values[1]
                except:
                    pass
            elif line.find('Output #0') != -1:
                break

        path = self.format_file_name % {
            'id': metaDataMapped.Id,
            'file': metaDataMapped.Name
        }
        path = ''.join((META_TYPE_KEY, '/',
                        self.generateIdPath(metaDataMapped.Id), '/', path))

        metaDataMapped.content = path
        metaDataMapped.Type = META_TYPE_KEY
        metaDataMapped.typeId = self.metaTypeId()
        metaDataMapped.thumbnailFormatId = self.defaultThumbnailFormatId()
        metaDataMapped.IsAvailable = True

        try:
            self.session().add(audioDataEntry)
            self.session().flush((audioDataEntry, ))
        except SQLAlchemyError as e:
            metaDataMapped.IsAvailable = False
            handle(e, AudioDataEntry)

        return True

    # ----------------------------------------------------------------

    @app.populate
    def populateThumbnail(self):
        '''
        Populates the thumbnail for audio.
        '''
        self.thumbnailManager.putThumbnail(
            self.defaultThumbnailFormatId(),
            abspath(join(pythonPath(), 'resources', 'audio.jpg')))

    # ----------------------------------------------------------------

    def metaTypeId(self):
        '''
        Provides the meta type id.
        '''
        if self._metaTypeId is None:
            self._metaTypeId = metaTypeFor(self.session(), META_TYPE_KEY).Id
        return self._metaTypeId

    def defaultThumbnailFormatId(self):
        '''
        Provides the thumbnail format id.
        '''
        if not self._defaultThumbnailFormatId:
            self._defaultThumbnailFormatId = thumbnailFormatFor(
                self.session(), self.default_format_thumbnail).id
        return self._defaultThumbnailFormatId

    def thumbnailFormatId(self):
        '''
        Provides the thumbnail format id.
        '''
        if not self._thumbnailFormatId:
            self._thumbnailFormatId = thumbnailFormatFor(
                self.session(), self.format_thumbnail).id
        return self._thumbnailFormatId

    def extractDuration(self, line):
        # Duration: 00:00:30.06, start: 0.000000, bitrate: 585 kb/s
        properties = line.split(',')

        length = properties[0].partition(':')[2]
        length = length.strip().split(':')
        length = int(length[0]) * 60 + int(length[1]) * 60 + int(
            float(length[2]))

        bitrate = properties[2]
        bitrate = bitrate.partition(':')[2]
        bitrate = bitrate.strip().partition(' ')
        if bitrate[2] == 'kb/s':
            bitrate = int(float(bitrate[0]))
        else:
            bitrate = None

        return (length, bitrate)

    def extractAudio(self, line):
        # Stream #0.1(eng): Audio: aac, 44100 Hz, stereo, s16, 61 kb/s
        properties = (line.rpartition(':')[2]).split(',')

        index = 0
        encoding = properties[index].strip()

        index += 1
        sampleRate = properties[index].strip().partition(' ')
        if sampleRate[2] == 'Hz':
            sampleRate = int(float(sampleRate[0]))
        else:
            sampleRate = None

        index += 1
        channels = properties[index].strip()

        index += 2
        bitrate = properties[4].strip().partition(' ')
        if bitrate[2] == 'kb/s':
            bitrate = int(float(bitrate[0]))
        else:
            bitrate = None

        return (encoding, sampleRate, channels, bitrate)

    # ----------------------------------------------------------------

    def extractProperty(self, line):
        return line.partition(':')[0].strip()

    # ----------------------------------------------------------------

    def extractNumber(self, line):
        return int(float(line.partition(':')[2].strip()))

    # ----------------------------------------------------------------

    def extractString(self, line):
        return line.partition(':')[2].strip()

    # ----------------------------------------------------------------

    def generateIdPath(self, id):
        return "{0:03d}".format((id // 1000) % 1000)
Пример #7
0
class BlogSourceServiceAlchemy(EntityCRUDServiceAlchemy, IBlogSourceService):
    '''
    Implementation for @see: IBlogSourceService
    '''
    sources_auto_delete = [
        'chained blog',
    ]
    wire.config('sources_auto_delete',
                doc='''
    List of source types for sources that should be deleted under deleting all of their usage'''
                )

    sourceService = ISourceService
    wire.entity('sourceService')

    # The source service used to manage all operations on sources

    def __init__(self):
        '''
        Construct the blog source service.
        '''

    def getSource(self, blogId, sourceId):
        '''
        @see: IBlogSourceService.getSource
        '''
        source = self.session().query(SourceMapped).get(sourceId)
        if not source:
            raise InputError(Ref(_('Unknown source'), ))
        sql = self.session().query(BlogSourceDB)
        sql = sql.filter(BlogSourceDB.blog == blogId).filter(
            BlogSourceDB.source == sourceId)
        return source

    def getSources(self, blogId):
        '''
        @see: IBlogSourceService.getSources
        '''
        sql = self.session().query(SourceMapped)
        sql = sql.join(BlogSourceDB, SourceMapped.Id == BlogSourceDB.source)
        sql = sql.join(
            BlogMapped,
            BlogMapped.Id == BlogSourceDB.blog).filter(BlogMapped.Id == blogId)
        return sql.all()

    def addSource(self, blogId, source):
        '''
        @see: IBlogSourceService.addSource
        NB: The source must have the correct type set in.
            This way, we can reuse it for other purposes, apart from the chained blogs.
        '''
        assert isinstance(blogId, int), 'Invalid blog identifier %s' % blogId
        assert isinstance(source, Source), 'Invalid source %s' % source

        # insert source if it didn't exist yet
        q = QSource(name=source.Name)
        sources = self.sourceService.getAll(typeKey=source.Type, q=q)
        if not sources: sourceId = self.sourceService.insert(source)
        else: sourceId = sources[0].Id

        ent = BlogSourceDB()
        ent.blog = blogId
        ent.source = sourceId
        try:
            self.session().add(ent)
            self.session().flush((ent, ))
        except SQLAlchemyError as e:
            handle(e, ent)
        return sourceId

    def deleteSource(self, blogId, sourceId):
        '''
        @see: IBlogSourceService.deleteSource
        '''
        assert isinstance(blogId, int), 'Invalid blog identifier %s' % blogId
        assert isinstance(sourceId,
                          int), 'Invalid source identifier %s' % sourceId
        try:
            res = self.session().query(BlogSourceDB).filter(
                BlogSourceDB.blog == blogId).filter(
                    BlogSourceDB.source == sourceId).delete() > 0
            if res:
                sourceTypeKey, = self.session().query(
                    SourceTypeMapped.Key).join(
                        SourceMapped,
                        SourceTypeMapped.id == SourceMapped.typeId).filter(
                            SourceMapped.Id == sourceId).one()
                if sourceTypeKey in self.sources_auto_delete:
                    self.sourceService.delete(sourceId)
            return res
        except OperationalError:
            assert log.debug(
                'Could not delete blog source with blog id \'%s\' and source id \'%s\'',
                blogId,
                sourceId,
                exc_info=True) or True
            raise InputError(Ref(_('Cannot delete because is in use'), ))

    def getChainedPosts(self,
                        blogId,
                        sourceTypeKey,
                        offset=None,
                        limit=None,
                        detailed=False,
                        q=None):
        '''
        @see: IBlogSourceService.getChainedPosts
        '''
        sql = self.session().query(PostMapped)
        sql = sql.join(CollaboratorMapped).join(SourceMapped).join(
            SourceTypeMapped)
        sql = sql.filter(SourceTypeMapped.Key == sourceTypeKey)
        sql = sql.join(BlogSourceDB,
                       SourceMapped.Id == BlogSourceDB.source).filter(
                           BlogMapped.Id == blogId)

        if q:
            assert isinstance(q, QPostWithPublished), 'Invalid query %s' % q
            sql = buildQuery(sql, q, PostMapped)

            if q and QPostWithPublished.isPublished in q:
                if q.isPublished.value:
                    sql = sql.filter(PostMapped.PublishedOn != None)
                else:
                    sql = sql.filter(PostMapped.PublishedOn == None)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()
Пример #8
0
class SeoSyncProcess:
    '''
    Seo sync process.
    '''

    blogSeoService = IBlogSeoService; wire.entity('blogSeoService')
    # blog seo service used to retrieve blogs set on auto publishing

    blogService = IBlogService; wire.entity('blogService')
    # blog service used to get the blog name
    
    blogThemeService = IBlogThemeService; wire.entity('blogThemeService')
    # blog theme service used to get the theme name
    
    languageService = ILanguageService; wire.entity('languageService')
    # blog language service used to get the language code
    
    htmlCDM = ICDM; wire.entity('htmlCDM')
    # cdm service used to store the generated HTML files
    
    syncThreads = {}
    # dictionary of threads that perform synchronization

    sync_interval = 59; wire.config('sync_interval', doc='''
    The number of seconds to perform sync for seo blogs.''')
    
    timeout_inteval = 4#; wire.config('timeout_interval', doc='''
    #The number of seconds after the sync ownership can be taken.''')
    
    html_generation_server = 'http://nodejs-dev.sourcefabric.org/'; wire.config('html_generation_server', doc='''
    The partial path used to construct the URL for blog html generation''')
    
    acceptType = 'text/json'
    # mime type accepted for response from remote blog
    
    encodingType = 'UTF-8'
    # character encoding type accepted for response from remove blog
    
    format_file_name = '%(blog_id)s.html'
    #default file format

    @app.deploy
    def startSeoSyncThread(self):
        '''
        Starts the seo sync thread.
        '''
        schedule = scheduler(time.time, time.sleep)
        def syncSeoBlogs():
            self.syncSeoBlogs()
            schedule.enter(self.sync_interval, 1, syncSeoBlogs, ())
        schedule.enter(self.sync_interval, 1, syncSeoBlogs, ())
        scheduleRunner = Thread(name='blog html for seo', target=schedule.run)
        scheduleRunner.daemon = True
        scheduleRunner.start()
        log.info('Started the seo html synchronization.')

    def syncSeoBlogs(self):
        '''
        Read all chained blog sync entries and sync with the corresponding blogs.
        '''
        log.info('Start seo blog synchronization')
        
        sleep_time = randint(0, 1000) * 0.001
        time.sleep(sleep_time)
        
        crtTime = datetime.datetime.now().replace(microsecond=0) 
        
        q = QBlogSeo(refreshActive=True)
        q.nextSync.until = crtTime
        
        for blogSeo in self.blogSeoService.getAll(q=q): 
            assert isinstance(blogSeo, BlogSeo)
            
            nextSync = crtTime + datetime.timedelta(seconds=blogSeo.RefreshInterval)
            self.blogSeoService.updateNextSync(blogSeo.Id, nextSync) 
            
            existsChanges = self.blogSeoService.existsChanges(blogSeo.Blog, blogSeo.LastCId)
            
            if blogSeo.LastSync is not None and not existsChanges: 
                log.info('Skip blog seo %d for blog %d', blogSeo.Id, blogSeo.Blog)
                continue
            
            key = blogSeo.Id
            thread = self.syncThreads.get(key)
            if thread:
                assert isinstance(thread, Thread), 'Invalid thread %s' % thread
                if thread.is_alive(): continue

            if not self.blogSeoService.checkTimeout(blogSeo.Id, self.timeout_inteval * self.sync_interval): continue

            self.syncThreads[key] = Thread(name='blog seo %d for blog %d' % (blogSeo.Id, blogSeo.Blog),
                                           target=self._syncSeoBlog, args=(blogSeo,))
            self.syncThreads[key].daemon = True
            self.syncThreads[key].start()
            log.info('Seo thread started for blog seo %d, blog %d and theme %d', blogSeo.Id, blogSeo.Blog, blogSeo.BlogTheme)   
        log.info('End seo blog synchronization')

    def _syncSeoBlog(self, blogSeo):
        '''
        Synchronize the blog for the given sync entry.

        @param blogSync: BlogSync
            The blog sync entry declaring the blog and source from which the blog
            has to be updated.
        '''
        assert isinstance(blogSeo, BlogSeo), 'Invalid blog seo %s' % blogSeo
        
        (scheme, netloc, path, params, query, fragment) = urlparse(self.host_url)
        if not scheme: scheme = 'http'
        if not netloc: 
            netloc = path
        host_url = urlunparse((scheme, netloc, '', '', '', ''))
        
        lastCId = blogSeo.LastCId
        self.blogSeoService.getLastCId(blogSeo)
        blog = self.blogService.getBlog(blogSeo.Blog)
        theme = self.blogThemeService.getById(blogSeo.BlogTheme)
        language = self.languageService.getById(blog.Language, ())
                   
        (scheme, netloc, path, params, query, fragment) = urlparse(self.html_generation_server)

        q = parse_qsl(query, keep_blank_values=True)
        q.append(('liveblog[id]', blogSeo.Blog))
        q.append(('liveblog[theme]', theme.Name))
        q.append(('liveblog[servers][rest]', host_url))
        q.append(('liveblog[fallback][language]', language.Code))
        if blogSeo.MaxPosts is not None:
            q.append(('liveblog[limit]', blogSeo.MaxPosts))

        url = urlunparse((scheme, netloc, path, params, urlencode(q), fragment))
        req = Request(url, headers={'Accept' : self.acceptType, 'Accept-Charset' : self.encodingType,
                                    'User-Agent' : 'LiveBlog REST'})
        
        try: resp = urlopen(req)
        except HTTPError as e:
            blogSeo.CallbackStatus = e.read().decode(encoding='UTF-8')
            blogSeo.LastBlocked = None 
            blogSeo.LastCId = lastCId
            self.blogSeoService.update(blogSeo)
            log.error('Read problem on %s, error code with message: %s ' % (str(url), blogSeo.CallbackStatus))
            return
        except Exception as e:  
            blogSeo.CallbackStatus = 'Can\'t access the HTML generation server: ' + self.html_generation_server
            blogSeo.LastBlocked = None 
            blogSeo.LastCId = lastCId
            self.blogSeoService.update(blogSeo)
            log.error('Read problem on accessing %s' % (self.html_generation_server, ))
            return
 
        try: 
            baseContent = self.htmlCDM.getURI('')
            path = blogSeo.HtmlURL[len(baseContent):]
            self.htmlCDM.publishContent(path, resp)
            
            default_name = self.format_file_name % {'blog_id': blogSeo.Blog}
            if not path.endswith('/' + default_name) and self.blogSeoService.isFirstSEO(blogSeo.Id, blogSeo.Blog):                   
                url = host_url + blogSeo.HtmlURL          
                req = Request(url, headers={'Accept' : self.acceptType, 'Accept-Charset' : self.encodingType,
                                    'User-Agent' : 'LiveBlog REST'})
                resp = urlopen(req)
                path = dirname(path) + '/' + default_name
                self.htmlCDM.publishContent(path, resp) 
        except ValueError as e:
            log.error('Fail to publish the HTML file on CDM %s' % e)
            blogSeo.CallbackStatus = 'Fail to publish the HTML file on CDM'
            blogSeo.LastBlocked = None 
            blogSeo.LastCId = lastCId
            self.blogSeoService.update(blogSeo)
            return
        
        blogSeo.CallbackStatus = None  

        if blogSeo.CallbackActive and blogSeo.CallbackURL:
            (scheme, netloc, path, params, query, fragment) = urlparse(blogSeo.CallbackURL)
            
            if not scheme: scheme = 'http'
            if not netloc: 
                netloc = path
                path = ''
    
            q = parse_qsl(query, keep_blank_values=True)
            q.append(('blogId', blogSeo.Blog))
            q.append(('blogTitle', blog.Title))
            q.append(('theme', theme.Name))
            q.append(('htmlFile', host_url + blogSeo.HtmlURL))
                
            url = urlunparse((scheme, netloc, path, params, urlencode(q), fragment))
            req = Request(url, headers={'Accept' : self.acceptType, 'Accept-Charset' : self.encodingType,
                                        'User-Agent' : 'Magic Browser'})
            
            try: resp = urlopen(req)
            except HTTPError as e:
                log.error('Error opening URL %s; error status: %s' % (blogSeo.CallbackURL, resp.status))
                blogSeo.CallbackStatus = 'Error opening callback URL: ' + blogSeo.CallbackURL + '; error status: ' + resp.status
            except Exception as e:
                log.error('Error opening URL %s: %s' % (blogSeo.CallbackURL, e))
                blogSeo.CallbackStatus = 'Error opening callback URL:' + blogSeo.CallbackURL 
            else: 
                blogSeo.CallbackStatus = None    
        
        blogSeo.LastSync = datetime.datetime.now().replace(microsecond=0) 
        blogSeo.LastBlocked = None 
        self.blogSeoService.update(blogSeo)
Пример #9
0
class RegisterDefaultGateways(HandlerProcessorProceed):
    '''
    Provides the handler that populates default gateways.
    '''

    default_gateways = []
    wire.config('default_gateways',
                doc='''
    The default gateways that are available for any unauthorized access. This is a list of dictionaries that are allowed
    the following keys:
        Pattern -   a string value:
                    contains the regex that needs to match with the requested URI. The pattern needs to produce, if is the
                    case, capturing groups that can be used by the Filters or Navigate.
        Headers -   a list of strings:
                    the headers to be filtered in order to validate the navigation. Even though this might look specific for
                    http they actually can be used for any meta data that accompanies a request, it depends mostly on the
                    gateway interpretation. The headers are provided as regexes that need to be matched. In case of headers
                    that are paired as name and value the regex will receive the matching string as 'Name:Value', the name
                    is not allowed to contain ':'. At least one header needs to match to consider the navigation valid.
        Methods -   a list of strings:
                    the list of allowed methods for the request, if no method is provided then all methods are considered
                    valid. At least one method needs to match to consider the navigation valid.
        Filters -   a list of strings:
                    contains a list of URIs that need to be called in order to allow the gateway Navigate. The filters are
                    allowed to have place holders of form '{1}' or '{2}' ... '{n}' where n is the number of groups obtained
                    from the Pattern, the place holders will be replaced with their respective group value. All filters
                    need to return a True value in order to allow the gateway Navigate.
        Errors -    a list of integers:
                    the list of errors codes that are considered to be handled by this Gateway entry, if no error is provided
                    then it means the entry is not solving any error navigation. At least one error needs to match in order
                    to consider the navigation valid.
        Host -      a string value:
                    the host where the request needs to be resolved, if not provided the request will be delegated to the
                    default host.
        Protocol -  a string value:
                    the protocol to be used in the communication with the server that handles the request, if not provided
                    the request will be delegated using the default protocol.
        Navigate -  a string value:
                    a pattern like string of forms like '*', 'resources/*' or 'redirect/Model/{1}'. The pattern is allowed to
                    have place holders and also the '*' which stands for the actual called URI, also parameters are allowed
                    for navigate URI, the parameters will override the actual parameters.
        PutHeaders -The headers to be put on the forwarded requests. The values are provided as 'Name:Value', the name is
                    not allowed to contain ':'.
    ''')

    def __init__(self):
        '''
        Construct the default gateways register.
        '''
        assert isinstance(
            self.default_gateways,
            list), 'Invalid default gateways %s' % self.default_gateways
        super().__init__()

        self._gateways = []
        for config in self.default_gateways:
            self._gateways.append(gatewayFrom(config))

    def process(self, reply: Reply, **keyargs):
        '''
        @see: HandlerProcessorProceed.process
        
        Adds the default gateways.
        '''
        assert isinstance(reply, Reply), 'Invalid reply %s' % reply

        if reply.gateways is not None:
            reply.gateways = chain(self._gateways, reply.gateways)
        else:
            reply.gateways = self._gateways
class BlogCollaboratorGroupService(SessionSupport, IBlogCollaboratorGroupService, IBlogCollaboratorGroupCleanupService):
    '''
    Implementation for @see: IBlogCollaboratorGroupService
    '''
    
    group_timeout = 3600; wire.config('group_timeout', doc='''
    The number of seconds after which the blog collaborators group expires.
    ''')
    
    # ----------------------------------------------------------------

    def __init__(self):
        '''
        Construct the blog collaborators group service.
        '''
        assert isinstance(self.group_timeout, int), 'Invalid blog collaborators group timeout %s' % self.group_timeout
        self._group_timeout = timedelta(seconds=self.group_timeout)

    # ----------------------------------------------------------------

    def getById(self, groupId):
        '''
        @see IBlogCollaboratorGroupService.getById
        '''
        sql = self.session().query(BlogCollaboratorGroupMapped)
        sql = sql.filter(BlogCollaboratorGroupMapped.Id == groupId)

        try: 
            group = sql.one()
            return group
        except NoResultFound: raise InputError(Ref(_('No collaborator group'), ref=BlogCollaboratorGroupMapped.Id))

    # ----------------------------------------------------------------
            
    def getAllMembers(self, groupId):
        '''
        @see IBlogCollaboratorGroupService.getAllMembers
        '''
        
        sql = self.session().query(BlogCollaboratorGroupMemberMapped).filter(BlogCollaboratorGroupMemberMapped.Group == groupId)
        
        return sql.all()

    # ----------------------------------------------------------------
    
    def insert(self, collaboratorGroup):
        '''
        @see IBlogCollaboratorGroupService.insert
        '''
        
        group = BlogCollaboratorGroupMapped()
        group.Blog = collaboratorGroup.Blog
        group.LastAccessOn = current_timestamp() 
        
        self.session().add(group)
        self.session().flush((group,))
              
        insert = InsertFromSelect(tableFor(BlogCollaboratorGroupMemberMapped), 'fk_group_id, fk_collaborator_id',
                                  select([group.Id, BlogCollaboratorMapped.blogCollaboratorId]).where(BlogCollaboratorMapped.Blog == group.Blog))
        self.session().execute(insert) 
        
        return group.Id  

    # ----------------------------------------------------------------
    
    def delete(self, groupId):
        '''
        @see IBlogCollaboratorGroupService.delete
        '''
        
        self.session().query(BlogCollaboratorGroupMemberMapped).delete(groupId)
        self.session().query(BlogCollaboratorGroupMapped).filter(BlogCollaboratorGroupMapped.Id == groupId).delete()
        
        return True

    # ----------------------------------------------------------------
        
    def addCollaborator(self, groupId, collaboratorId):
        '''
        @see IBlogCollaboratorGroupService.addCollaborator
        '''
        
        updateLastAccessOn(self.session(), groupId) 
        
        sql = self.session().query(BlogCollaboratorGroupMemberMapped)
        sql = sql.filter(BlogCollaboratorGroupMemberMapped.Group == groupId)
        sql = sql.filter(BlogCollaboratorGroupMemberMapped.BlogCollaborator == collaboratorId)
        if sql.count() == 1: return True
        
        member = BlogCollaboratorGroupMemberMapped()
        member.Group = groupId
        member.BlogCollaborator = collaboratorId
        
        self.session().add(member)
        self.session().flush((member,))
        
        return True
            
    # ----------------------------------------------------------------        
    
    def removeCollaborator(self, groupId, collaboratorId):
        '''
        @see IBlogCollaboratorGroupService.removeCollaborator
        '''
        updateLastAccessOn(self.session(), groupId)
        sql = self.session().query(BlogCollaboratorGroupMemberMapped)
        sql = sql.filter(BlogCollaboratorGroupMemberMapped.Group == groupId)
        sql = sql.filter(BlogCollaboratorGroupMemberMapped.BlogCollaborator == collaboratorId)
        sql.delete()
        
        return True
    
    # ----------------------------------------------------------------

    def cleanExpired(self):
        '''
        @see: ICleanupService.cleanExpired
        '''
        olderThan = self.session().query(current_timestamp()).scalar()

        # Cleaning expirated blog collaborators groups
        sqlIn = self.session().query(BlogCollaboratorGroupMapped.Id)
        sqlIn = sqlIn.filter(BlogCollaboratorGroupMapped.LastAccessOn <= olderThan - self._group_timeout)
        
        sql = self.session().query(BlogCollaboratorGroupMemberMapped)
        sql = sql.filter(BlogCollaboratorGroupMemberMapped.Group.in_(sqlIn))
        sql.delete(synchronize_session='fetch')
        
        sql = self.session().query(BlogCollaboratorGroupMapped)
        sql = sql.filter(BlogCollaboratorGroupMapped.LastAccessOn <= olderThan - self._group_timeout)
        deleted = sql.delete(synchronize_session='fetch')
        
        assert log.debug('Cleaned \'%s\' expired authentication requests', deleted) or True
Пример #11
0
class BlogCommentServiceAlchemy(EntityServiceAlchemy, IBlogCommentService):
    '''
    Implementation for @see: IBlogCommentService
    '''
    blog_config_name = 'Comments'
    wire.config('blog_config_name',
                doc='''
    Name of the blog-specific comments permission configuration''')
    source_type_key = 'comment'
    wire.config('source_type_key',
                doc='''
    Type of the sources for blog comments''')
    source_name_default = 'embed'
    wire.config('source_name_default',
                doc='''
    Default name of the sources for blog comments''')
    post_type_key = 'normal'
    wire.config('post_type_key',
                doc='''
    Type of the posts created on the comment that come via blog comments''')
    user_last_name = 'commentator'
    wire.config('user_last_name',
                doc='''
    The name that is used as LastName for the anonymous users of blog comment posts'''
                )
    user_type_key = 'commentator'
    wire.config('user_type_key',
                doc='''
    The user type that is used for the anonymous users of blog comment posts'''
                )

    blogPostService = IBlogPostService
    wire.entity('blogPostService')
    sourceService = ISourceService
    wire.entity('sourceService')
    collaboratorService = ICollaboratorService
    wire.entity('collaboratorService')
    userService = IUserService
    wire.entity('userService')

    def __init__(self):
        '''
        Construct the blog comment service.
        '''
        assert isinstance(
            self.blogPostService, IBlogPostService
        ), 'Invalid blog post service %s' % self.blogPostService
        assert isinstance(
            self.sourceService,
            ISourceService), 'Invalid source service %s' % self.sourceService
        assert isinstance(
            self.collaboratorService, ICollaboratorService
        ), 'Invalid collaborator service %s' % self.collaboratorService
        assert isinstance(
            self.userService,
            IUserService), 'Invalid user service %s' % self.userService

    def getComments(self,
                    blogId,
                    offset=None,
                    limit=None,
                    detailed=False,
                    q=None):
        '''
        @see: IBlogCommentService.getComments
        '''
        sql = self.session().query(BlogPostMapped).filter(
            BlogPostMapped.Blog == blogId)
        sql = sql.join(CollaboratorMapped).join(SourceMapped).join(
            SourceTypeMapped)
        sql = sql.filter(SourceTypeMapped.Key == self.source_type_key)
        if q:
            assert isinstance(q, QBlogPost), 'Invalid query %s' % q
            sql = buildQuery(sql, q, BlogPostMapped)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getOriginalComments(self,
                            blogId,
                            offset=None,
                            limit=None,
                            detailed=False,
                            q=None):
        '''
        @see: IBlogCommentService.getOriginalComments
        TODO: this is just for enabling the comment-post URL in the resources
        '''
        return ()

    def addComment(self, blogId, comment):
        '''
        @see: IBlogCommentService.addComment
        '''
        # checking if the blog exists
        # checking whether comments are allowed shall be done in gateway
        if not self.session().query(
                exists().where(BlogMapped.Id == blogId)).scalar():
            raise InputError(Ref(_('Specified blog does not exist'), ))

        userName = comment.UserName
        commentText = comment.CommentText
        commentSource = comment.CommentSource if comment.CommentSource else self.source_name_default

        # checking the necessary info: user name and comment text
        if not userName:
            raise InputError(Ref(_('No value for the mandatory UserName'), ))
        if not commentText:
            raise InputError(Ref(
                _('No value for the mandatory CommentText'), ))

        # take (or make) the user (for user name) part of creator and collaborator
        userTypeId, = self.session().query(UserTypeMapped.id).filter(
            UserTypeMapped.Key == self.user_type_key).one()
        try:
            sql = self.session().query(UserMapped.userId, UserMapped.Active)
            sql = sql.filter(UserMapped.typeId == userTypeId)
            sql = sql.filter(UserMapped.FirstName == userName)
            userId, isActive = sql.one()
            if not isActive:
                raise InputError(
                    Ref(_('The commentator user was inactivated'), ))
        except:
            user = User()
            user.FirstName = userName
            user.LastName = self.user_last_name
            user.Name = self._freeCommentUserName()
            user.Password = binascii.b2a_hex(os.urandom(32)).decode()
            user.Type = self.user_type_key
            userId = self.userService.insert(user)

        # make the source (for inlet type) part of collaborator
        try:
            sql = self.session().query(SourceMapped.Id).join(SourceTypeMapped)
            sql = sql.filter(
                SourceTypeMapped.Key == self.source_type_key).filter(
                    SourceMapped.Name == commentSource)
            sourceId, = sql.one()
        except NoResultFound:
            source = Source()
            source.Type = self.source_type_key
            source.Name = commentSource
            source.URI = ''
            source.IsModifiable = True
            sourceId = self.sourceService.insert(source)

        # make the collaborator
        sql = self.session().query(CollaboratorMapped.Id)
        sql = sql.filter(CollaboratorMapped.Source == sourceId)
        sql = sql.filter(CollaboratorMapped.User == userId)
        try:
            collabId, = sql.one()
        except NoResultFound:
            collab = Collaborator()
            collab.Source = sourceId
            collab.User = userId
            collabId = self.collaboratorService.insert(collab)

        # create post request
        post = Post()
        post.Type = self.post_type_key
        post.Creator = userId
        post.Author = collabId
        post.Content = commentText
        post.CreatedOn = datetime.now()

        # insert the blog post
        postId = self.blogPostService.insert(blogId, post)

        return postId
        #return (self.blogPostService.getById(blogId, postId),)

    # ------------------------------------------------------------------
    def _freeCommentUserName(self):
        while True:
            userName = '******' + binascii.b2a_hex(os.urandom(8)).decode()
            try:
                self.session().query(UserMapped).filter(
                    UserMapped.Name == userName).one()
            except:
                return userName
Пример #12
0
class SolrArticleSearchProvider(IArticleSearchProvider):
    '''
    Implementation  @see: IArticleSearchProvider
    '''

    solr_server_url = 'localhost:8983/solr/'
    wire.config('solr_server_url', doc='''The Solr server address
    ''')

    def __init__(self):
        assert isinstance(
            self.solr_server_url,
            str), 'Invalid solr server url %s' % self.solr_server_url

    # ----------------------------------------------------------------

    def update(self, article):
        '''
        @see: IArticleSearchProvider.update()
        '''

        return

#        si = SolrInterface('http://%s%s' % (self.solr_server_url, metaData.Type))
#
#        document = dict()
#
#        document["MetaInfoId"] = metaInfo.Id
#        document["MetaDataId"] = metaData.Id
#        document["languageId"] = metaInfo.Language
#
#        # custom processing on some fields
#        field = 'CreationDate'
#        if hasattr(metaInfo, field) and getattr(metaInfo, field):
#            document['CreationData_Year'] = getattr(metaInfo, field).year
#
#        for field in si.schema.fields:
#            if hasattr(metaInfo, field) and getattr(metaInfo, field):
#                document[field] = getattr(metaInfo, field)
#            elif hasattr(metaData, field) and getattr(metaData, field):
#                document[field] = getattr(metaData, field)
#
#        si.add(document)
#        si.commit()

# ----------------------------------------------------------------

    def delete(self, id):
        '''
        @see: IArticleSearchProvider.delete()
        '''
        si = SolrInterface('http://%s%s' % (self.solr_server_url, 'article'))
        si.delete(str(id))
        si.commit()

    # ----------------------------------------------------------------

#    def processQuery(self, session, scheme, q=None):
#        '''
#        Creates the solr query based on received REST queries
#        '''
#
#        si = SolrInterface('http://%sother' % self.solr_server_url)
#        types = [self.queryIndexer.typesByMetaData[key] for key in self.queryIndexer.typesByMetaData.keys()]
#
#        solrQuery = None
#        orClauses = []
#
#        if qa is not None:
#            assert isinstance(qa, QMetaDataInfo), 'Invalid query %s' % qa
#            solrQuery = buildSolrQuery(si, solrQuery, qa, orClauses)
#            if QMetaDataInfo.type in qa: types = qa.type.values
#
#        if qi is not None:
#            solrQuery = buildSolrQuery(si, solrQuery, qi, orClauses)
#
#        if qd is not None:
#            solrQuery = buildSolrQuery(si, solrQuery, qd, orClauses)
#
#        if orClauses:
#            extend = None
#            for clause in orClauses:
#                if extend: extend = extend | clause
#                else: extend = clause
#
#            if solrQuery is None: solrQuery = si.query(extend)
#            else: solrQuery = solrQuery.query(extend)
#
#        if solrQuery is None: solrQuery = si.query()
#        solrQuery = buildShards(solrQuery, self.solr_server_url, types)
#
#        return solrQuery

# ----------------------------------------------------------------

    def buildQuery(self, session, scheme, offset=None, limit=1000, q=None):
        '''
        @see: IArticleSearchProvider.buildQuery()

        Creates the solr query, executes the query against Solr server. Then build a SQL query that will return
        the Solr founded data.
        '''

        sql = session.query(Article)

        if q:
            sql = buildQuery(sql, q, self.Entity)
        sql = buildLimits(sql, offset, limit)

#        solrQuery = self.processQuery(session, scheme, q)
#        solrQuery = buildLimits(solrQuery, offset, limit)
#
#        response = solrQuery.execute()
#        if response.status != 0:
#            return None
#
#        count = response.result.numFound
#        sql = session.query(MetaDataMapped, MetaInfoMapped)
#        sql = sql.join(MetaInfoMapped, MetaDataMapped.Id == MetaInfoMapped.MetaData)
#
#        idList = []
#        for metaDataInfo in response:
#            print(metaDataInfo)
#            idList.append(metaDataInfo["MetaDataId"])
#
#        if idList:
#            sql = sql.filter(MetaInfoMapped.Id.in_(idList))
#
#        # TODO: test
#        self.buildFacetsQuery(session, scheme, qa=None, qi=None, qd=None)
#
#        return (sql, count)

# ----------------------------------------------------------------

    def buildFacetsQuery(self, session, scheme, qa=None, qi=None, qd=None):
        '''
        @see: ISearchProvider.getFacets()

        Creates the solr facets query and then return the list of facets
        '''

        facets = []

        solrQuery = self.processQuery(session, scheme, qa, qi, qd)

        # construct the facets query
        solrQuery = solrQuery.facet_by("Type")
        solrQuery = solrQuery.facet_by("AudioEncoding")
        solrQuery = solrQuery.facet_by("SampleRate")
        solrQuery = solrQuery.facet_by("AudioBitrate")
        solrQuery = solrQuery.facet_by("Genre")
        solrQuery = solrQuery.facet_by("Year")
        solrQuery = solrQuery.facet_by("CameraMake")
        solrQuery = solrQuery.facet_by("VideoEncoding")
        solrQuery = solrQuery.facet_by("VideoBitrate")

        response = solrQuery.execute()
        if response.status != 0:
            return None

        count = response.result.numFound

        # init the list of facets
        print(response.facet_counts.facet_fields)

        return IterPart(facets, count, 0, count)
Пример #13
0
class POFileService(IPOFileService):
    '''
    Implementation for @see: IPOFileService
    '''

    default_charset = 'UTF-8'
    wire.config('default_charset',
                doc='''
    The default character set to use whenever a PO file is uploaded and the character
    set of the content is not specified''')

    poFileManager = IPOFileManager
    wire.entity('poFileManager')
    cdmLocale = ICDM
    wire.entity('cdmLocale')
    pluginService = IPluginService
    wire.entity('pluginService')
    componentService = IComponentService
    wire.entity('componentService')

    def __init__(self):
        assert isinstance(
            self.default_charset,
            str), 'Invalid default charset %s' % self.default_charset
        assert isinstance(
            self.poFileManager,
            IPOFileManager), 'Invalid PO file manager %s' % self.poFileManager
        assert isinstance(self.cdmLocale,
                          ICDM), 'Invalid PO CDM %s' % self.cdmLocale
        assert isinstance(
            self.pluginService,
            IPluginService), 'Invalid plugin service %s' % self.pluginService
        assert isinstance(
            self.componentService, IComponentService
        ), 'Invalid component service %s' % self.componentService

    def getGlobalPOFile(self, locale, scheme):
        '''
        @see: IPOFileService.getGlobalPOFile
        '''
        path = self._cdmPath(locale)
        try:
            try:
                cdmFileTimestamp = self.cdmLocale.getTimestamp(path)
            except PathNotFound:
                republish = True
            else:
                mngFileTimestamp = self.poFileManager.getGlobalPOTimestamp(
                    locale)
                republish = False if mngFileTimestamp is None else cdmFileTimestamp < mngFileTimestamp

            if republish:
                self.cdmLocale.publishFromFile(
                    path, self.poFileManager.getGlobalPOFile(locale))
        except InvalidLocaleError:
            raise InputError(
                _('Invalid locale %(locale)s') % dict(locale=locale))
        return self.cdmLocale.getURI(path, scheme)

    def getComponentPOFile(self, component, locale, scheme):
        '''
        @see: IPOFileService.getComponentPOFile
        '''
        self.componentService.getById(component)
        path = self._cdmPath(locale, component=component)
        try:
            try:
                cdmFileTimestamp = self.cdmLocale.getTimestamp(path)
            except PathNotFound:
                republish = True
            else:
                mngFileTimestamp = max(
                    self.poFileManager.getGlobalPOTimestamp(locale)
                    or datetime.min,
                    self.poFileManager.getComponentPOTimestamp(
                        component, locale) or datetime.min)
                republish = False if mngFileTimestamp is None else cdmFileTimestamp < mngFileTimestamp

            if republish:
                self.cdmLocale.publishFromFile(
                    path,
                    self.poFileManager.getComponentPOFile(component, locale))
        except InvalidLocaleError:
            raise InputError(
                _('Invalid locale %(locale)s') % dict(locale=locale))
        return self.cdmLocale.getURI(path, scheme)

    def getPluginPOFile(self, plugin, locale, scheme):
        '''
        @see: IPOFileService.getPluginPOFile
        '''
        pluginObj = self.pluginService.getById(plugin)
        assert isinstance(pluginObj, Plugin)
        if pluginObj.Component:
            return self.getComponentPOFile(pluginObj.Component, locale, scheme)

        path = self._cdmPath(locale, plugin=plugin)
        try:
            try:
                cdmFileTimestamp = self.cdmLocale.getTimestamp(path)
            except PathNotFound:
                republish = True
            else:
                mngFileTimestamp = max(
                    self.poFileManager.getGlobalPOTimestamp(locale)
                    or datetime.min,
                    self.poFileManager.getPluginPOTimestamp(plugin, locale)
                    or datetime.min)
                republish = False if mngFileTimestamp is None else cdmFileTimestamp < mngFileTimestamp

            if republish:
                self.cdmLocale.publishFromFile(
                    path, self.poFileManager.getPluginPOFile(plugin, locale))
        except InvalidLocaleError:
            raise InputError(
                _('Invalid locale %(locale)s') % dict(locale=locale))
        return self.cdmLocale.getURI(path, scheme)

    # ----------------------------------------------------------------

    def updateGlobalPOFile(self, locale, poFile):
        '''
        @see: IPOFileService.updateGlobalPOFile
        '''
        assert isinstance(poFile, Content), 'Invalid PO content %s' % poFile
        # Convert the byte file to text file
        poFile = codecs.getreader(poFile.charSet
                                  or self.default_charset)(poFile)
        try:
            self.poFileManager.updateGlobalPOFile(locale, poFile)
        except UnicodeDecodeError:
            raise InvalidPOFile(poFile)
        if poFile.next(): raise ToManyFiles()

    def updateComponentPOFile(self, component, locale, poFile):
        '''
        @see: IPOFileService.updateComponentPOFile
        '''
        self.componentService.getById(component)
        assert isinstance(poFile, Content), 'Invalid PO content %s' % poFile
        # Convert the byte file to text file
        poFile = codecs.getreader(poFile.charSet
                                  or self.default_charset)(poFile)
        try:
            self.poFileManager.updateComponentPOFile(component, locale, poFile)
        except UnicodeDecodeError:
            raise InvalidPOFile(poFile)
        if poFile.next(): raise ToManyFiles()

    def updatePluginPOFile(self, plugin, locale, poFile):
        '''
        @see: IPOFileService.updatePluginPOFile
        '''
        assert isinstance(poFile, Content), 'Invalid PO content %s' % poFile
        pluginObj = self.pluginService.getById(plugin)
        assert isinstance(pluginObj, Plugin)
        if pluginObj.Component:
            return self.updateComponentPOFile(pluginObj.Component, locale,
                                              poFile)
        # Convert the byte file to text file
        poFile = codecs.getreader(poFile.charSet
                                  or self.default_charset)(poFile)
        try:
            self.poFileManager.updatePluginPOFile(plugin, locale, poFile)
        except UnicodeDecodeError:
            raise InvalidPOFile(poFile)
        if poFile.next(): raise ToManyFiles()

    # ----------------------------------------------------------------

    def _cdmPath(self, locale, component=None, plugin=None):
        '''
        Returns the path to the CDM PO file corresponding to the given locale and / or
        component / plugin. If no component of plugin was specified it returns the
        name of the global PO file.
        
        @param locale: string
            The locale.
        @param component: string
            The component id.
        @param plugin: string
            The plugin id.
        @return: string
            The file path.
        '''
        assert isinstance(locale, str), 'Invalid locale %s' % locale

        path = []
        if component:
            path.append('component')
            path.append(component)
        elif plugin:
            path.append('plugin')
            path.append(plugin)
        else:
            path.append('global')
        path.append(locale)
        return '%s.po' % '-'.join(path)
Пример #14
0
class UserServiceAlchemy(SessionSupport, IUserService):
    '''
    @see: IUserService
    '''
    default_user_type_key = 'standard'
    wire.config('default_user_type_key',
                doc='''
    Default user type for users without specified the user type key''')

    def __init__(self):
        '''
        Construct the service
        '''

    def getById(self, id):
        '''
        @see: IUserService.getById
        '''
        user = self.session().query(UserMapped).get(id)
        if not user: raise InputError(Ref(_('Unknown user id'), ref=User.Id))
        assert isinstance(user, UserMapped), 'Invalid user %s' % user
        return user

    def getByUuid(self, uuid):
        '''
        @see: IUserService.getByUuid
        '''
        sql = self.session().query(UserMapped)
        sql = sql.filter(UserMapped.Uuid == uuid)
        user = sql.one()

        assert isinstance(user, UserMapped), 'Invalid user %s' % user
        return user

    def getAll(self, offset=None, limit=None, detailed=False, q=None):
        '''
        @see: IUserService.getAll
        '''
        if limit == 0: entities = ()
        else: entities = None
        if detailed or entities is None:
            sql = self.session().query(UserMapped)

            activeUsers = True
            if q:
                assert isinstance(q, QUser), 'Invalid query %s' % q
                sql = buildQuery(sql, q, UserMapped)
                if QUser.all in q:
                    filter = None
                    if AsLike.like in q.all:
                        for col in ALL_NAMES:
                            filter = col.like(
                                q.all.like
                            ) if filter is None else filter | col.like(
                                q.all.like)
                    elif AsLike.ilike in q.all:
                        for col in ALL_NAMES:
                            filter = col.ilike(
                                q.all.ilike
                            ) if filter is None else filter | col.ilike(
                                q.all.ilike)
                    sql = sql.filter(filter)

                if (QUser.inactive in q) and (AsBoolean.value in q.inactive):
                    activeUsers = not q.inactive.value

            sql = sql.filter(UserMapped.Active == activeUsers)
            sql = sql.filter(UserMapped.Type == self.default_user_type_key)

            if entities is None:
                entities = buildLimits(sql, offset, limit).all()
            if detailed: return IterPart(entities, sql.count(), offset, limit)
        return entities

    def insert(self, user):
        '''
        @see: IUserService.insert
        '''
        assert isinstance(user, User), 'Invalid user %s' % user

        if user.Uuid is None: user.Uuid = str(uuid4().hex)
        if user.Cid is None: user.Cid = 0

        userDb = UserMapped()
        userDb.password = user.Password
        userDb.CreatedOn = current_timestamp()
        userDb.typeId = self._userTypeId(user.Type)
        try:
            self.session().add(copy(user, userDb, exclude=('Type', )))
            self.session().flush((userDb, ))
        except SQLAlchemyError as e:
            handle(e, userDb)
        user.Id = userDb.Id
        return user.Id

    def update(self, user):
        '''
        @see: IUserService.update
        Should not this be handeled automatically via entity service?
        '''
        assert isinstance(user, User), 'Invalid user %s' % user

        userDb = self.session().query(UserMapped).get(user.Id)
        if not userDb:
            assert isinstance(userDb, UserMapped), 'Invalid user %s' % userDb
            raise InputError(Ref(_('Unknown user id'), ref=User.Id))
        try:
            if user.Type: userDb.typeId = self._userTypeId(user.Type)
            userDb.Cid = userDb.Cid if userDb.Cid else 0
            userDb.Cid = user.Cid if user.Cid else userDb.Cid + 1
            self.session().flush((copy(user, userDb,
                                       exclude=('Type', 'CId')), ))
        except SQLAlchemyError as e:
            handle(e, userDb)

    def delete(self, id):
        '''
        @see: IUserService.delete
        '''
        userDb = self.session().query(UserMapped).get(id)
        if not userDb or not userDb.Active: return False
        assert isinstance(userDb, UserMapped), 'Invalid user %s' % userDb
        userDb.Active = False
        self.session().merge(userDb)
        return True

    def changePassword(self, id, password):
        '''
        @see: IUserService.changePassword
        '''
        assert isinstance(password,
                          Password), 'Invalid password change %s' % password
        try:
            userDb = self.session().query(UserMapped).filter(
                UserMapped.Id == id).one(
                )  #.filter(UserMapped.password == password.OldPassword).one()
        except NoResultFound:
            userDb = None

        if not userDb:
            assert isinstance(userDb, UserMapped), 'Invalid user %s' % userDb
            raise InputError(
                Ref(_('Invalid user id or old password'), ref=User.Id))

        try:
            userDb.password = password.NewPassword
            self.session().flush((userDb, ))
        except SQLAlchemyError as e:
            handle(e, userDb)

    # ----------------------------------------------------------------

    def _userTypeId(self, key):
        '''
        Provides the user type id that has the provided key.
        '''
        if not key: key = self.default_user_type_key

        try:
            sql = self.session().query(
                UserTypeMapped.id).filter(UserTypeMapped.Key == key)
            return sql.one()[0]
        except NoResultFound:
            raise InputError(
                Ref(_('Invalid user type %(userType)s') % dict(userType=key),
                    ref=User.Type))
Пример #15
0
class RegisterMethodOverride(HandlerProcessorProceed):
    '''
    Provides the method override gateways, basically support for @see: MethodOverrideHandler.
    '''
    pattern_xmethod_override = 'X\-HTTP\-Method\-Override\\:[\s]*%s[\s]*(?i)'; wire.config('pattern_xmethod_override', doc='''
    The header pattern for the method override, needs to contain '%s' where the value will be placed.
    ''')
    methods_override = {
                        HTTP_DELETE: [HTTP_GET],
                        HTTP_PUT: [HTTP_POST],
                        }; wire.config('methods_override', doc='''
    A dictionary containing as a key the overrided method and as a value the methods that are overriden.
    ''')
    
    def __init__(self):
        '''
        Construct the populate method override filter.
        '''
        assert isinstance(self.pattern_xmethod_override, str), \
        'Invalid method override pattern %s' % self.pattern_xmethod_override
        assert isinstance(self.methods_override, dict), 'Invalid methods override %s' % self.methods_override
        super().__init__()
    
    def process(self, reply:Reply, **keyargs):
        '''
        @see: HandlerProcessorProceed.process
        
        Adds the default gateways.
        '''
        assert isinstance(reply, Reply), 'Invalid reply %s' % reply
        if reply.gateways is None: return
        
        reply.gateways = self.register(reply.gateways)
            
    # ----------------------------------------------------------------
            
    def register(self, gateways):
        '''
        Register the method override gateways based on the provided gateways.
        '''
        assert isinstance(gateways, Iterable), 'Invalid gateways %s' % gateways
        for gateway in gateways:
            assert isinstance(gateway, Gateway), 'Invalid gateway %s' % gateway
            yield gateway
            if not gateway.Methods: continue
            
            methods, overrides = set(), set()
            for method in gateway.Methods:
                override = self.methods_override.get(method)
                if override:
                    methods.add(method)
                    overrides.update(override)
            
            # If the override methods are already declared as methods we don't need to declare them anymore
            if methods.union(overrides).issubset(gateway.Methods): continue
                
            ogateway = Gateway()
            copy(gateway, ogateway, exclude=('Methods',))
            ogateway.Methods = list(overrides)
            if Gateway.Headers not in ogateway: ogateway.Headers = []
            for method in methods:
                ogateway.Headers.append(self.pattern_xmethod_override % method)
            yield ogateway
Пример #16
0
class VideoPersistanceAlchemy(SessionSupport, IMetaDataHandler):
    '''
    Provides the service that handles the video persistence @see: IVideoPersistanceService.
    '''

    format_file_name = '%(id)s.%(file)s'
    wire.config('format_file_name',
                doc='''
    The format for the videos file names in the media archive''')
    default_format_thumbnail = '%(size)s/video.jpg'
    wire.config('default_format_thumbnail',
                doc='''
    The format for the video thumbnails in the media archive''')
    format_thumbnail = '%(size)s/%(id)s.%(name)s.jpg'
    wire.config('format_thumbnail',
                doc='''
    The format for the video thumbnails in the media archive''')
    ffmpeg_path = join('/', 'usr', 'bin', 'ffmpeg')
    wire.config('ffmpeg_path',
                doc='''
    The path where the ffmpeg is found''')

    video_supported_files = 'flv, avi, mov, mp4, mpg, wmv, 3gp, asf, rm, swf'

    thumbnailManager = IThumbnailManager
    wire.entity('thumbnailManager')

    # Provides the thumbnail referencer

    def __init__(self):
        assert isinstance(
            self.format_file_name,
            str), 'Invalid format file name %s' % self.format_file_name
        assert isinstance(
            self.default_format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.default_format_thumbnail
        assert isinstance(
            self.format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.format_thumbnail
        assert isinstance(
            self.video_supported_files,
            str), 'Invalid supported files %s' % self.video_supported_files
        assert isinstance(self.ffmpeg_path,
                          str), 'Invalid ffmpeg path %s' % self.ffmpeg_path
        assert isinstance(
            self.thumbnailManager, IThumbnailManager
        ), 'Invalid thumbnail manager %s' % self.thumbnailManager

        self.videoSupportedFiles = set(
            re.split('[\\s]*\\,[\\s]*', self.video_supported_files))
        self._defaultThumbnailFormatId = self._thumbnailFormatId = self._metaTypeId = None

    def addMetaInfo(self, metaDataMapped, languageId):
        videoInfoMapped = VideoInfoMapped()
        videoInfoMapped.MetaData = metaDataMapped.Id
        videoInfoMapped.Language = languageId
        try:
            self.session().add(videoInfoMapped)
            self.session().flush((videoInfoMapped, ))
        except SQLAlchemyError as e:
            handle(e, videoInfoMapped)
        return videoInfoMapped

    def processByInfo(self, metaDataMapped, contentPath, contentType):
        '''
        @see: IMetaDataHandler.processByInfo
        '''
        if contentType is not None and contentType.startswith(META_TYPE_KEY):
            return self.process(metaDataMapped, contentPath)

        extension = splitext(metaDataMapped.Name)[1][1:]
        if extension in self.videoSupportedFiles:
            return self.process(metaDataMapped, contentPath)

        return False

    def process(self, metaDataMapped, contentPath):
        '''
        @see: IMetaDataHandler.process
        '''
        assert isinstance(
            metaDataMapped,
            MetaDataMapped), 'Invalid meta data mapped %s' % metaDataMapped

        thumbnailPath = contentPath + '.jpg'
        p = Popen((self.ffmpeg_path, '-i', contentPath, '-vframes', '1', '-an',
                   '-ss', '2', thumbnailPath),
                  stdin=PIPE,
                  stdout=PIPE,
                  stderr=STDOUT)
        if p.wait() != 0: return False
        if not exists(thumbnailPath): return False

        videoDataEntry = VideoDataEntry()
        videoDataEntry.Id = metaDataMapped.Id
        while True:
            line = p.stdout.readline()
            if not line: break
            line = str(line, 'utf-8')
            if line.find('misdetection possible!') != -1: return False

            if line.find('Video') != -1 and line.find('Stream') != -1:
                try:
                    values = self.extractVideo(line)
                    videoDataEntry.VideoEncoding = values[0]
                    videoDataEntry.Width = values[1]
                    videoDataEntry.Height = values[2]
                    if values[3]: videoDataEntry.VideoBitrate = values[3]
                    videoDataEntry.Fps = values[4]
                except:
                    pass
            elif line.find('Audio') != -1 and line.find('Stream') != -1:
                try:
                    values = self.extractAudio(line)
                    videoDataEntry.AudioEncoding = values[0]
                    videoDataEntry.SampleRate = values[1]
                    videoDataEntry.Channels = values[2]
                    videoDataEntry.AudioBitrate = values[3]
                except:
                    pass
            elif line.find('Duration') != -1 and line.find('start') != -1:
                try:
                    values = self.extractDuration(line)
                    videoDataEntry.Length = values[0]
                    videoDataEntry.VideoBitrate = values[1]
                except:
                    pass
            elif line.find('Output #0') != -1:
                break

        path = self.format_file_name % {
            'id': metaDataMapped.Id,
            'file': metaDataMapped.Name
        }
        path = ''.join((META_TYPE_KEY, '/',
                        self.generateIdPath(metaDataMapped.Id), '/', path))

        metaDataMapped.content = path
        metaDataMapped.typeId = self.metaTypeId()
        metaDataMapped.Type = META_TYPE_KEY
        metaDataMapped.thumbnailFormatId = self.thumbnailFormatId()
        metaDataMapped.IsAvailable = True

        self.thumbnailManager.putThumbnail(self.thumbnailFormatId(),
                                           thumbnailPath, metaDataMapped)
        remove(thumbnailPath)

        try:
            self.session().add(videoDataEntry)
            self.session().flush((videoDataEntry, ))
        except SQLAlchemyError:
            metaDataMapped.IsAvailable = False
            raise InputError(Ref(_('Cannot save the video data'), ))

        return True

    # ----------------------------------------------------------------

    @app.populate
    def populateThumbnail(self):
        '''
        Populates the thumbnail for videos.
        '''
        self.thumbnailManager.putThumbnail(
            self.defaultThumbnailFormatId(),
            abspath(join(pythonPath(), 'resources', 'video.jpg')))

    # ----------------------------------------------------------------

    def metaTypeId(self):
        '''
        Provides the meta type id.
        '''
        if self._metaTypeId is None:
            self._metaTypeId = metaTypeFor(self.session(), META_TYPE_KEY).Id
        return self._metaTypeId

    def defaultThumbnailFormatId(self):
        '''
        Provides the thumbnail format id.
        '''
        if not self._defaultThumbnailFormatId:
            self._defaultThumbnailFormatId = thumbnailFormatFor(
                self.session(), self.default_format_thumbnail).id
        return self._defaultThumbnailFormatId

    def thumbnailFormatId(self):
        '''
        Provides the thumbnail format id.
        '''
        if not self._thumbnailFormatId:
            self._thumbnailFormatId = thumbnailFormatFor(
                self.session(), self.format_thumbnail).id
        return self._thumbnailFormatId

    def extractDuration(self, line):
        # Duration: 00:00:30.06, start: 0.000000, bitrate: 585 kb/s
        properties = line.split(',')

        length = properties[0].partition(':')[2]
        length = length.strip().split(':')
        length = int(length[0]) * 60 + int(length[1]) * 60 + int(
            float(length[2]))

        bitrate = properties[2]
        bitrate = bitrate.partition(':')[2]
        bitrate = bitrate.strip().partition(' ')
        if bitrate[2] == 'kb/s':
            bitrate = int(float(bitrate[0]))
        else:
            bitrate = None

        return (length, bitrate)

    def extractVideo(self, line):
        # Stream #0.0(eng): Video: h264 (Constrained Baseline), yuv420p, 416x240, 518 kb/s, 29.97 fps, 29.97 tbr, 2997 tbn, 59.94 tbc
        properties = (line.rpartition('Video:')[2]).split(',')

        index = 0
        encoding = properties[index].strip()

        index += 2
        size = (properties[index].strip()).partition('x')
        width = int(size[0])
        height = int(size[2])

        index += 1
        bitrate = properties[index].strip().partition(' ')
        if bitrate[2] == 'kb/s':
            bitrate = int(float(bitrate[0]))
            index += 1
        else:
            bitrate = None

        fps = properties[index].strip().partition(' ')
        if fps[2] == 'fps' or fps[2] == 'tbr':
            fps = int(float(fps[0]))
        else:
            fps = None

        return (encoding, width, height, bitrate, fps)

    def extractAudio(self, line):
        # Stream #0.1(eng): Audio: aac, 44100 Hz, stereo, s16, 61 kb/s
        properties = (line.rpartition(':')[2]).split(',')

        index = 0
        encoding = properties[index].strip()

        index += 1
        sampleRate = properties[index].strip().partition(' ')
        if sampleRate[2] == 'Hz':
            sampleRate = int(float(sampleRate[0]))
        else:
            sampleRate = None

        index += 1
        channels = properties[index].strip()

        index += 2
        bitrate = properties[4].strip().partition(' ')
        if bitrate[2] == 'kb/s':
            bitrate = int(float(bitrate[0]))
        else:
            bitrate = None

        return (encoding, sampleRate, channels, bitrate)

    # ----------------------------------------------------------------

    def generateIdPath(self, id):
        return "{0:03d}".format((id // 1000) % 1000)
Пример #17
0
class POFileManager(IPOFileManager):
    '''
    Implementation for @see: IPOFileManager
    '''

    locale_dir_path = join('workspace', 'shared', 'locale')
    wire.config('locale_dir_path', doc='''
    The locale repository path''')
    catalog_config = {
        'header_comment': '''\
# Translations template for PROJECT.
# Copyright (C) YEAR ORGANIZATION
# This file is distributed under the same license as the PROJECT project.
# Gabriel Nistor <*****@*****.**>, YEAR.
#''',
        'project': 'Sourcefabric',
        'version': '1.0',
        'copyright_holder': 'Sourcefabric o.p.s.',
        'msgid_bugs_address': '*****@*****.**',
        'last_translator': 'Automatic',
        'language_team': 'Automatic',
        'fuzzy': False,
    }
    wire.config('catalog_config',
                doc='''
    The global catalog default configuration for templates.

    :param header_comment: the header comment as string, or `None` for the default header
    :param project: the project's name
    :param version: the project's version
    :param copyright_holder: the copyright holder of the catalog
    :param msgid_bugs_address: the email address or URL to submit bug reports to
    :param creation_date: the date the catalog was created
    :param revision_date: the date the catalog was revised
    :param last_translator: the name and email of the last translator
    :param language_team: the name and email of the language team
    :param charset: the encoding to use in the output
    :param fuzzy: the fuzzy bit on the catalog header
    ''')
    write_po_config = {
        'no_location': False,
        'omit_header': False,
        'sort_output': True,
        'sort_by_file': True,
        'ignore_obsolete': True,
        'include_previous': False,
    }
    wire.config('write_po_config',
                doc='''
    The configurations used when writing the PO files.

    :param width: the maximum line width for the generated output; use `None`, 0, or a negative number to
                  completely disable line wrapping
    :param no_location: do not emit a location comment for every message
    :param omit_header: do not include the ``msgid ""`` entry at the top of the output
    :param sort_output: whether to sort the messages in the output by msgid
    :param sort_by_file: whether to sort the messages in the output by their locations
    :param ignore_obsolete: whether to ignore obsolete messages and not include them in the output; by default
                            they are included as comments
    :param include_previous: include the old msgid as a comment when updating the catalog
    ''')

    messageService = IMessageService
    wire.entity('messageService')
    sourceService = ISourceService
    wire.entity('sourceService')

    def __init__(self):
        assert isinstance(
            self.locale_dir_path,
            str), 'Invalid locale directory %s' % self.locale_dir_path
        assert isinstance(
            self.catalog_config,
            dict), 'Invalid catalog configurations %s' % self.catalog_config
        assert isinstance(
            self.write_po_config,
            dict), 'Invalid write PO configurations %s' % self.write_po_config
        assert isinstance(self.messageService, IMessageService
                          ), 'Invalid message service %s' % self.messageService
        assert isinstance(
            self.sourceService, ISourceService
        ), 'Invalid source file service %s' % self.sourceService

        if not os.path.exists(self.locale_dir_path):
            os.makedirs(self.locale_dir_path)
        if not isdir(self.locale_dir_path) or not os.access(
                self.locale_dir_path, os.W_OK):
            raise IOError('Unable to access the locale directory %s' %
                          self.locale_dir_path)

    def getGlobalPOTimestamp(self, locale):
        '''
        @see: IPOFileManager.getGlobalPOTimestamp
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        return self._lastModified(locale)

    def getComponentPOTimestamp(self, component, locale):
        '''
        @see: IPOFileManager.getComponentPOTimestamp
        '''
        assert isinstance(component,
                          str), 'Invalid component id %s' % component
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        return self._lastModified(locale, component=component)

    def getPluginPOTimestamp(self, plugin, locale):
        '''
        @see: IPOFileManager.getComponentPOTimestamp
        '''
        assert isinstance(plugin, str), 'Invalid plugin id %s' % plugin
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        return self._lastModified(locale, plugin=plugin)

    # --------------------------------------------------------------------

    def getGlobalPOFile(self, locale):
        '''
        @see: IPOFileManager.getGlobalPOFile
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        catalog = self._build(locale, self.messageService.getMessages(),
                              self._filePath(locale))
        return self._toPOFile(catalog)

    def getGlobalAsDict(self, locale):
        '''
        @see: IPOFileManager.getGlobalAsDict
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)

        messages = self.messageService.getMessages()
        catalog = self._build(locale, messages, self._filePath(locale))
        return self._toDict('', catalog)

    def getComponentPOFile(self, component, locale):
        '''
        @see: IPOFileManager.getComponentPOFile
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        catalog = self._build(
            locale, self.messageService.getComponentMessages(component),
            self._filePath(locale, component=component),
            self._filePath(locale))
        return self._toPOFile(catalog)

    def getComponentAsDict(self, component, locale):
        '''
        @see: IPOFileManager.getComponentAsDict
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        messages = self.messageService.getComponentMessages(component)
        catalog = self._build(locale, messages,
                              self._filePath(locale, component=component),
                              self._filePath(locale))
        return self._toDict(component, catalog)

    def getPluginPOFile(self, plugin, locale):
        '''
        @see: IPOFileManager.getPluginPOFile
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        catalog = self._build(locale,
                              self.messageService.getPluginMessages(plugin),
                              self._filePath(locale, plugin=plugin),
                              self._filePath(locale))
        return self._toPOFile(catalog)

    def getPluginAsDict(self, plugin, locale):
        '''
        @see: IPOFileManager.getPluginAsDict
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        messages = self.messageService.getPluginMessages(plugin)
        catalog = self._build(locale, messages,
                              self._filePath(locale, plugin=plugin),
                              self._filePath(locale))
        return self._toDict(plugin, catalog)

    def updateGlobalPOFile(self, locale, poFile):
        '''
        @see: IPOFileManager.updateGlobalPOFile
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        assert isinstance(poFile,
                          IInputStream), 'Invalid file object %s' % poFile

        return self._update(locale, self.messageService.getMessages(), poFile,
                            self._filePath(locale),
                            self._filePath(locale, format=FORMAT_MO))

    def updateComponentPOFile(self, component, locale, poFile):
        '''
        @see: IPOFileManager.updateComponentPOFile
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        assert isinstance(poFile,
                          IInputStream), 'Invalid file object %s' % poFile

        return self._update(
            locale, self.messageService.getComponentMessages(component),
            poFile, self._filePath(locale, component=component),
            self._filePath(locale, component=component,
                           format=FORMAT_MO), False)

    def updatePluginPOFile(self, plugin, locale, poFile):
        '''
        @see: IPOFileManager.updatePluginPOFile
        '''
        try:
            locale = Locale.parse(locale)
        except UnknownLocaleError:
            raise InvalidLocaleError(locale)
        assert isinstance(poFile,
                          IInputStream), 'Invalid file object %s' % poFile

        return self._update(
            locale, self.messageService.getPluginMessages(plugin), poFile,
            self._filePath(locale, plugin=plugin),
            self._filePath(locale, plugin=plugin, format=FORMAT_MO), False)

    # --------------------------------------------------------------------

    def _filePath(self, locale, component=None, plugin=None, format=FORMAT_PO):
        '''
        Returns the path to the internal PO file corresponding to the given locale and / or
        component / plugin. If no component of plugin was specified it returns the
        name of the global PO file.

        @param locale: Locale
            The locale.
        @param component: string
            The component id.
        @param plugin: string
            The plugin id.
        @param format: string
            The format pattern for the file, the default is the PO file.
        @return: string
            The file path.
        '''
        assert isinstance(locale, Locale), 'Invalid locale %s' % locale
        assert component is None or isinstance(
            component, str), 'Invalid component %s' % component
        assert plugin is None or isinstance(plugin,
                                            str), 'Invalid plugin %s' % plugin
        assert not (
            component and plugin
        ), 'Cannot process a component id %s and a plugin id %s' % (component,
                                                                    plugin)

        path = [self.locale_dir_path]
        if component:
            path.append('component')
            name = component
        elif plugin:
            path.append('plugin')
            name = plugin
        else:
            name = 'global'

        path.append(format % (name, locale))

        return join(*path)

    def _lastModified(self, locale, component=None, plugin=None):
        '''
        Provides the last modification time stamp for the provided locale. You can specify the component id in order to
        get the last modification for the component domain, or plugin or either to get the global domain modification.

        @param locale: Locale
            The locale to get the last modification for.
        @param component: string|None
            The component id to get the last modification for.
        @param plugin: string|None
            The plugin id to get the last modification for.
        @return: datetime|None
            The last modification time stamp, None if there is no such time stamp available.
        '''
        assert isinstance(locale, Locale), 'Invalid locale %s' % locale
        assert not (
            component and plugin
        ), 'Cannot process a component id %s and a plugin id %s' % (component,
                                                                    plugin)

        q = QSource()
        q.lastModified.orderDesc()
        if component: q.component = component
        elif plugin: q.plugin = plugin
        sources = self.sourceService.getAll(0, 1, q=q)
        try:
            lastModified = next(iter(sources)).LastModified
        except StopIteration:
            lastModified = None

        path = self._filePath(locale, component, plugin)
        if isfile(path):
            lastModified = max(lastModified,
                               datetime.fromtimestamp(os.stat(path).st_mtime))
        return lastModified

    def _processCatalog(self, catalog, messages, fallBack=None):
        '''
        Processes a catalog based on the given messages list. Basically the catalog will be made in sync with the list of
        messages.

        @param catalog: Catalog
            The catalog to keep in sync.
        @param messages: Iterable
            The messages to update the catalog with.
        @param fallBack: Catalog
            The fall back catalog to get the missing catalog messages .
        @return: Catalog
            The same catalog
        '''
        assert isinstance(catalog, Catalog), 'Invalid catalog %s' % catalog
        assert isinstance(messages,
                          Iterable), 'Invalid messages list %s' % messages

        for msg in catalog:
            msg.locations = []

        for msg in messages:
            assert isinstance(msg, Message)
            id = msg.Singular if not msg.Plural else (msg.Singular, ) + tuple(
                msg.Plural)
            src = self.sourceService.getById(msg.Source)
            context = msg.Context if msg.Context != '' else None
            msgC = catalog.get(msg.Singular, context)
            if msgC is None and fallBack is not None:
                assert isinstance(
                    fallBack,
                    Catalog), 'Invalid fall back catalog %s' % fallBack
                msgC = fallBack.get(msg.Singular, context)
                if msgC is not None:
                    msgC.locations = []
                    catalog[msg.Singular] = msgC
            msgCOrig = copy(msgC)
            catalog.add(id,
                        context=msg.Context if msg.Context != '' else None,
                        locations=((src.Path, msg.LineNumber), ),
                        user_comments=(msg.Comments if msg.Comments else '', ))
            if msgC: fixBabelCatalogAddBug(msgC, catalog.num_plurals)
            if msg.Plural and msgC and msgCOrig and isinstance(
                    msgCOrig.string, str) and msgCOrig.string != '':
                copyTranslation(msgCOrig, msgC)

        creationDate = catalog.creation_date  # We need to make sure that the catalog keeps its creation date.
        catalog.creation_date = creationDate
        return catalog

    def _toPOFile(self, catalog):
        '''
        Convert the catalog to a PO file like object.

        @param catalog: Catalog
            The catalog to convert to a file.
        @return: file read object
            A file like object to read the PO file from.
        '''
        assert isinstance(catalog, Catalog), 'Invalid catalog %s' % catalog

        fileObj = BytesIO()
        write_po(fileObj, catalog, **self.write_po_config)
        fileObj.seek(0)
        return fileObj

    def _toDict(self, domain, catalog):
        '''
        Convert the catalog to a dictionary.
        Format description: @see IPOFileManager.getGlobalAsDict

        @param catalog: Catalog
            The catalog to convert to a dictionary.
        @return: dict
            The dictionary in the format specified above.
        '''
        assert isinstance(catalog, Catalog), 'Invalid catalog %s' % catalog

        d = {}
        d[''] = {
            'lang': catalog.locale.language,
            'plural-forms': catalog.plural_forms
        }
        for msg in catalog:
            if not msg or msg.id == '': continue
            if isinstance(msg.id, (list, tuple)):
                key, key_plural = msg.id
                singular, plural = msg.string[0], msg.string[1]
            else:
                key, key_plural = msg.id, ''
                singular, plural = msg.string, ''
            singular = singular if singular is not None else ''
            plural = plural if plural is not None else ''
            key = key if not msg.context else "%s:%s" % (msg.context, key)
            d[key] = [key_plural, singular, plural]
        return {domain: d}

    def _build(self, locale, messages, path, pathGlobal=None):
        '''
        Builds a catalog based on the provided locale paths, the path is used as the main source any messages that are not
        found in path locale but are part of messages will attempt to be extracted from the global path locale.

        @param locale: Locale
            The locale.
        @param messages: Iterable(Message)
            The messages to build the PO file on.
        @param path: string
            The path of the targeted PO file from the locale repository.
        @param pathGlobal: string|None
            The path of the global PO file from the locale repository.
        @return: file like object
            File like object that contains the PO file content
        '''
        assert isinstance(locale, Locale), 'Invalid locale %s' % locale
        assert isinstance(messages, Iterable), 'Invalid messages %s' % messages
        assert isinstance(path, str), 'Invalid path %s' % path
        assert pathGlobal is None or isinstance(
            pathGlobal, str), 'Invalid global path %s' % pathGlobal
        if isfile(path):
            with open(path) as fObj:
                catalog = read_po(fObj, locale)
        else:
            catalog = Catalog(locale,
                              creation_date=datetime.now(),
                              **self.catalog_config)
        if pathGlobal and isfile(pathGlobal):
            with open(pathGlobal) as fObj:
                catalogGlobal = read_po(fObj, locale)
        else:
            catalogGlobal = None

        self._processCatalog(catalog, messages, fallBack=catalogGlobal)
        catalog.revision_date = datetime.now()

        return catalog

    def _update(self, locale, messages, poFile, path, pathMO, isGlobal=True):
        assert isinstance(locale, Locale), 'Invalid locale %s' % locale
        assert isinstance(messages, Iterable), 'Invalid messages %s' % messages
        assert isinstance(poFile,
                          IInputStream), 'Invalid file object %s' % poFile
        assert isinstance(path, str), 'Invalid path %s' % path
        assert isinstance(pathMO, str), 'Invalid path MO %s' % pathMO
        assert isinstance(isGlobal,
                          bool), 'Invalid is global flag %s' % isGlobal

        catalog = read_po(poFile, locale=locale)
        assert isinstance(catalog, Catalog), 'Invalid catalog %s' % catalog
        if not catalog:
            # The catalog has no messages, no need for updating.
            return

        if not isGlobal:
            pathGlobal = self._filePath(locale)
            pathGlobalMO = self._filePath(locale, format=FORMAT_MO)
            if isfile(pathGlobal):
                with open(pathGlobal) as fObj:
                    catalogGlobal = read_po(fObj, locale)
                self._processCatalog(catalogGlobal,
                                     self.messageService.getMessages())
            else:
                isGlobal, path, pathMO = True, pathGlobal, pathGlobalMO
                messages = self.messageService.getMessages()
        self._processCatalog(catalog, messages)

        if isfile(path):
            with open(path) as fObj:
                catalogOld = read_po(fObj, locale)
            for msg in catalog:
                msgO = catalogOld.get(msgId(msg), msg.context)
                if not isMsgTranslated(msg) and msgO and isMsgTranslated(msgO):
                    msg.string = msgO.string
            catalog.creation_date = catalogOld.creation_date
        else:
            pathDir = dirname(path)
            if not isdir(pathDir): os.makedirs(pathDir)
            catalog.creation_date = datetime.now()

        if not isGlobal:
            # We remove all the messages that are not translated or have the same translation as in the global locale
            # or are the only plugin that makes use of the message in the global.
            updatedGlobal = False
            for msg in list(catalog):
                id = msgId(msg)
                if not id: continue
                if not isMsgTranslated(msg):
                    catalog.delete(id, msg.context)
                else:
                    msgG = catalogGlobal.get(id, msg.context)
                    if not msgG or msgG.string == msg.string:
                        catalog.delete(id, msg.context)
                    elif not isMsgTranslated(
                            msgG) or msgG.locations == msg.locations:
                        copyTranslation(msg, msgG)
                        catalog.delete(id, msg.context)
                        updatedGlobal = True

            if updatedGlobal:
                # We remove all the messages that are not translated.
                for msg in list(catalogGlobal):
                    if not isMsgTranslated(msg):
                        catalogGlobal.delete(msgId(msg), msg.context)

                catalogGlobal.revision_date = datetime.now()
                os.makedirs(dirname(pathGlobal), exist_ok=True)
                with open(pathGlobal, 'wb') as fObj:
                    write_po(fObj, catalogGlobal, **self.write_po_config)
                os.makedirs(dirname(pathGlobalMO), exist_ok=True)
                with open(pathGlobalMO, 'wb') as fObj:
                    write_mo(fObj, catalogGlobal)
        else:
            # We remove all the messages that are not translated.
            for msg in list(catalog):
                if not isMsgTranslated(msg):
                    catalog.delete(msgId(msg), msg.context)

        catalog.revision_date = datetime.now()
        os.makedirs(dirname(path), exist_ok=True)
        with open(path, 'wb') as fObj:
            write_po(fObj, catalog, **self.write_po_config)
        os.makedirs(dirname(pathMO), exist_ok=True)
        with open(pathMO, 'wb') as fObj:
            write_mo(fObj, catalog)
Пример #18
0
class AuthenticationServiceAlchemy(SessionSupport, IAuthenticationService,
                                   ICleanupService):
    '''
    The service implementation that provides the authentication.
    '''

    acl = Acl
    wire.entity('acl')
    # The acl repository.
    assemblyGateways = Assembly
    wire.entity('assemblyGateways')
    # The assembly to be used for generating gateways

    authentication_token_size = 5
    wire.config('authentication_token_size',
                doc='''
    The number of characters that the authentication token should have.
    ''')
    session_token_size = 5
    wire.config('session_token_size',
                doc='''
    The number of characters that the authentication token should have.
    ''')
    authentication_timeout = 10
    wire.config('authentication_timeout',
                doc='''
    The number of seconds after which the login token expires.
    ''')
    session_timeout = 3600
    wire.config('session_timeout',
                doc='''
    The number of seconds after which the session expires.
    ''')

    def __init__(self):
        '''
        Construct the authentication service.
        '''
        assert isinstance(self.acl,
                          Acl), 'Invalid acl repository %s' % self.acl
        assert isinstance(
            self.assemblyGateways,
            Assembly), 'Invalid assembly gateways %s' % self.assemblyGateways
        assert isinstance(
            self.authentication_token_size,
            int), 'Invalid token size %s' % self.authentication_token_size
        assert isinstance(
            self.session_token_size,
            int), 'Invalid session token size %s' % self.session_token_size
        assert isinstance(self.authentication_timeout, int), \
        'Invalid authentication timeout %s' % self.authentication_timeout
        assert isinstance(
            self.session_timeout,
            int), 'Invalid session timeout %s' % self.session_timeout

        self._authenticationTimeOut = timedelta(
            seconds=self.authentication_timeout)
        self._sessionTimeOut = timedelta(seconds=self.session_timeout)
        self._processing = self.assemblyGateways.create(
            solicitation=Solicitation, reply=Reply)

    def authenticate(self, session):
        '''
        @see: IAuthenticationService.authenticate
        '''
        olderThan = self.session().query(current_timestamp()).scalar()
        olderThan -= self._sessionTimeOut
        sql = self.session().query(LoginMapped)
        sql = sql.filter(LoginMapped.Session == session)
        sql = sql.filter(LoginMapped.AccessedOn > olderThan)
        try:
            login = sql.one()
        except NoResultFound:
            raise InputError(Ref(_('Invalid session'), ref=Login.Session))
        assert isinstance(login, LoginMapped), 'Invalid login %s' % login
        login.AccessedOn = current_timestamp()
        self.session().flush((login, ))
        self.session().expunge(login)
        commitNow()

        # We need to fore the commit because if there is an exception while processing the request we need to make
        # sure that the last access has been updated.
        proc = self._processing
        assert isinstance(proc, Processing), 'Invalid processing %s' % proc

        solicitation = proc.ctx.solicitation()
        assert isinstance(
            solicitation,
            Solicitation), 'Invalid solicitation %s' % solicitation
        solicitation.userId = login.User
        solicitation.types = self.acl.types

        chain = Chain(proc)
        chain.process(**proc.fillIn(solicitation=solicitation,
                                    reply=proc.ctx.reply())).doAll()

        reply = chain.arg.reply
        assert isinstance(reply, Reply), 'Invalid reply %s' % reply
        if reply.gateways is None: return ()

        return sorted(reply.gateways,
                      key=lambda gateway: (gateway.Pattern, gateway.Methods))

    def requestLogin(self):
        '''
        @see: IAuthenticationService.requestLogin
        '''
        hash = hashlib.sha512()
        hash.update(urandom(self.authentication_token_size))

        token = TokenMapped()
        token.Token = hash.hexdigest()
        token.requestedOn = current_timestamp()

        try:
            self.session().add(token)
        except SQLAlchemyError as e:
            handle(e, token)

        return token

    def performLogin(self, authentication):
        '''
        @see: IAuthenticationService.performLogin
        '''
        assert isinstance(
            authentication,
            Authentication), 'Invalid authentication %s' % authentication

        if authentication.Token is None:
            raise InputError(
                Ref(_('The login token is required'),
                    ref=Authentication.Token))
        if authentication.HashedToken is None:
            raise InputError(
                Ref(_('The hashed login token is required'),
                    ref=Authentication.HashedToken))
        if authentication.UserName is None:
            raise InputError(
                Ref(_('A user name is required for authentication'),
                    ref=Authentication.UserName))

        olderThan = self.session().query(current_timestamp()).scalar()
        olderThan -= self._authenticationTimeOut
        sql = self.session().query(TokenMapped)
        sql = sql.filter(TokenMapped.Token == authentication.Token)
        sql = sql.filter(TokenMapped.requestedOn > olderThan)
        if sql.delete() > 0:
            commitNow()  # We make sure that the delete has been performed

            try:
                user = self.session().query(UserMapped).filter(
                    UserMapped.Name == authentication.UserName).filter(
                        UserMapped.DeletedOn == None).one()
            except NoResultFound:
                user = None

            if user is not None:
                assert isinstance(user, UserMapped), 'Invalid user %s' % user

                hashedToken = hmac.new(bytes(user.Name, 'utf8'),
                                       bytes(user.password, 'utf8'),
                                       hashlib.sha512).hexdigest()
                hashedToken = hmac.new(bytes(hashedToken, 'utf8'),
                                       bytes(authentication.Token, 'utf8'),
                                       hashlib.sha512).hexdigest()

                if authentication.HashedToken == hashedToken:
                    hash = hashlib.sha512()
                    hash.update(urandom(self.authentication_token_size))

                    login = LoginMapped()
                    login.Session = hash.hexdigest()
                    login.User = user.Id
                    login.CreatedOn = login.AccessedOn = current_timestamp()

                    try:
                        self.session().add(login)
                    except SQLAlchemyError as e:
                        handle(e, login)

                    return login

        raise InputError(_('Invalid credentials'))

    # ----------------------------------------------------------------

    def cleanExpired(self):
        '''
        @see: ICleanupService.cleanExpired
        '''
        olderThan = self.session().query(current_timestamp()).scalar()

        # Cleaning the expired tokens.
        sql = self.session().query(TokenMapped)
        sql = sql.filter(
            TokenMapped.requestedOn <= olderThan - self._authenticationTimeOut)
        deleted = sql.delete()
        assert log.debug('Cleaned \'%s\' expired authentication requests',
                         deleted) or True

        # Cleaning the expired sessions.
        sql = self.session().query(LoginMapped)
        sql = sql.filter(
            LoginMapped.AccessedOn <= olderThan - self._sessionTimeOut)
        deleted = sql.delete()
        assert log.debug('Cleaned \'%s\' expired sessions', deleted) or True
Пример #19
0
class IndexAccessHandler(HandlerProcessor):
    '''
    Implementation for a processor that indexes the access invokers by name.
    '''

    input_methods = [HTTP_POST, HTTP_PUT]
    wire.config('input_methods',
                doc='''
    @rtype: list[string]
    The HTTP method names that can have an input model in order to be processed by ACL.
    ''')
    excludable_methods = [HTTP_GET]
    wire.config('excludable_methods',
                doc='''
    @rtype: list[string]
    The HTTP method names that can have excludable names to be processed by ACL.
    ''')
    accessService = IAccessService
    wire.entity('accessService')

    def __init__(self):
        assert isinstance(
            self.input_methods,
            list), 'Invalid input methods %s' % self.input_methods
        assert isinstance(
            self.excludable_methods,
            list), 'Invalid excludable methods %s' % self.excludable_methods
        assert isinstance(
            self.accessService,
            IAccessService), 'Invalid access service %s' % self.accessService
        super().__init__(Invoker=Invoker, Element=Element)

        self.input_methods = set(self.input_methods)
        self.excludable_methods = set(self.excludable_methods)

    def process(self, chain, register: Register, **keyargs):
        '''
        @see: HandlerProcessor.process
        
        Merge the access invokers.
        '''
        assert isinstance(register, Register), 'Invalid register %s' % register
        if not register.invokers: return  # No root to process

        shadows = []
        for invoker in register.invokers:
            assert isinstance(invoker, Invoker), 'Invalid invoker %s' % invoker
            if invoker.filterName is not None: continue
            if invoker.shadowing:
                shadows.append(invoker)
                continue

            self.mergeAccess(invoker)

        for invoker in shadows:
            self.mergeAccess(invoker)

    # ----------------------------------------------------------------

    def mergeAccess(self, invoker):
        '''
        Creates and persist the access for the provided invoker.
        '''
        assert isinstance(invoker, Invoker), 'Invalid invoker %s' % invoker

        access = AccessCreate()
        access.Method = invoker.methodHTTP
        access.Output = signature(
            Non if invoker.output is None else invoker.output)

        # Process the path and types.
        position, items = 1, []
        for el in invoker.path:
            assert isinstance(el, Element), 'Invalid element %s' % el
            if el.property:
                if el.shadowing:
                    assert isinstance(
                        el.shadowing,
                        Element), 'Invalid element %s' % el.shadowing
                    assert isinstance(el.shadowing.accessEntryPosition, int), \
                    'Invalid element position %s' % el.shadowing.accessEntryPosition
                    if access.EntriesShadowing is None:
                        access.EntriesShadowing = {}
                    access.EntriesShadowing[
                        position] = el.shadowing.accessEntryPosition
                elif el.shadowed:
                    assert isinstance(
                        el.shadowed,
                        Element), 'Invalid element %s' % el.shadowed
                    assert isinstance(el.shadowed.accessEntryPosition, int), \
                    'Invalid element position %s' % el.shadowed.accessEntryPosition
                    if access.EntriesShadowed is None:
                        access.EntriesShadowed = {}
                    access.EntriesShadowed[
                        position] = el.shadowed.accessEntryPosition
                else:
                    if access.Entries is None: access.Entries = {}
                    access.Entries[position] = signature(el.property)
                    el.accessEntryPosition = position
                items.append('*')
                position += 1
            else:
                assert isinstance(el.name,
                                  str), 'Invalid element name %s' % el.name
                items.append(el.name)
        access.Path = '/'.join(items)

        # Associate the shadows.
        if invoker.shadowing:
            spath = '/'.join('*' if el.property else el.name
                             for el in invoker.shadowing.path)
            access.Shadowing = generateId(spath, invoker.shadowing.methodHTTP)
            spath = '/'.join('*' if el.property else el.name
                             for el in invoker.shadowed.path)
            access.Shadowed = generateId(spath, invoker.shadowed.methodHTTP)

        # Associate the input model properties.
        if invoker.methodHTTP in self.input_methods and invoker.modelInput:
            assert isinstance(invoker.modelInput,
                              Input), 'Invalid input %s' % invoker.modelInput
            assert isinstance(
                invoker.modelInput.type,
                TypeModel), 'Invalid model %s' % invoker.modelInput.type

            for name, prop in invoker.modelInput.type.properties.items():
                if not isinstance(prop, TypePropertyContainer): continue
                assert isinstance(prop, TypePropertyContainer)
                if access.Properties is None: access.Properties = {}
                access.Properties[name] = signature(prop.type)

        try:
            present = self.accessService.getById(
                generateId(access.Path, access.Method))
        except:
            assert log.debug('There is no access for \'%s\' with %s',
                             access.Path, access.Method) or True
        else:
            assert isinstance(present, Access), 'Invalid access %s' % present
            assert present.Path == access.Path, \
            'Problems with hashing, hash %s it is the same for \'%s\' and \'%s\'' % (access.Id, present.Path, access.Path)
            if present.Hash == generateHash(access): return
            log.info(
                'Removing access %s since is not compatible with the current structure',
                present)
            self.accessService.delete(present.Id)

        # TODO: Gabriel: After refactoring encoder on the new structure you can have access to the encoded names
        # maybe by indexes rather then doEncode, this remains to be seen. After the encoder we can also implement
        # the excludable properties and then convince the Gateway service to do some assemblage and remove unwanted
        # indexes.

        self.accessService.insert(access)
        assert log.debug('Added access %s', access) or True
Пример #20
0
class PostServiceAlchemy(EntityGetServiceAlchemy, IPostService):
    '''
    Implementation for @see: IPostService
    '''
    default_source_name = 'internal'
    wire.config('default_source_name',
                doc='''
    The default source name used when a source was not supplied''')

    meta_max_size = 65535
    wire.config('meta_max_size',
                doc='''
    The maximal size for the meta part of a post; limited only by db system if zero.'''
                )
    content_max_size = 65535
    wire.config('content_max_size',
                doc='''
    The maximal size for the content part of a post; limited only by db system if zero.'''
                )
    content_plain_max_size = 65535
    wire.config('content_plain_max_size',
                doc='''
    The maximal size for the content plain part of a post; limited only by db system if zero.'''
                )

    postVerificationService = IPostVerificationService
    wire.entity('postVerificationService')

    # post verification service used to insert post verification

    def __init__(self):
        '''
        Construct the post service.
        '''
        EntityGetServiceAlchemy.__init__(self, PostMapped)

    def getByUuidAndSource(self, uuid, sourceId):
        '''
        @see: IPostService.getByUuidAndSource
        '''

        sql = self.session().query(PostMapped)
        sql = sql.filter(PostMapped.Feed == sourceId)
        sql = sql.filter(PostMapped.Uuid == uuid)

        try:
            post = sql.distinct().one()
        except Exception:
            post = None

        return post

    def getUnpublished(self,
                       creatorId=None,
                       authorId=None,
                       offset=None,
                       limit=None,
                       detailed=False,
                       q=None):
        '''
        @see: IPostService.getUnpublished
        '''
        assert q is None or isinstance(
            q, QPostUnpublished), 'Invalid query %s' % q
        sql = self._buildQuery(creatorId, authorId, q)
        sql = sql.filter(PostMapped.PublishedOn == None)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPublished(self,
                     creatorId=None,
                     authorId=None,
                     offset=None,
                     limit=None,
                     detailed=False,
                     q=None):
        '''
        @see: IPostService.getPublished
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q
        sql = self._buildQuery(creatorId, authorId, q)
        sql = sql.filter(PostMapped.PublishedOn != None)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getAll(self,
               creatorId=None,
               authorId=None,
               offset=None,
               limit=None,
               detailed=False,
               q=None):
        '''
        @see: IPostService.getPublished
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q
        sql = self._buildQuery(creatorId, authorId, q)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getUnpublishedBySource(self,
                               sourceId,
                               offset=None,
                               limit=None,
                               detailed=False,
                               q=None):
        '''
        @see: IPostService.getUnpublishedBySource
        '''
        assert q is None or isinstance(
            q, QPostUnpublished), 'Invalid query %s' % q

        sql = self._buildQueryBySource(sourceId)
        sql = sql.filter(PostMapped.PublishedOn == None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getUnpublishedBySourceType(self,
                                   sourceTypeKey,
                                   offset=None,
                                   limit=None,
                                   detailed=False,
                                   q=None):
        '''
        @see: IPostService.getUnpublishedBySourceType
        '''
        assert q is None or isinstance(
            q, QPostUnpublished), 'Invalid query %s' % q

        sql = self._buildQueryBySourceType(sourceTypeKey)
        sql = sql.filter(PostMapped.PublishedOn == None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPublishedBySource(self,
                             sourceId,
                             offset=None,
                             limit=None,
                             detailed=False,
                             q=None):
        '''
        @see: IPostService.getPublishedBySource
        '''
        assert q is None or isinstance(q,
                                       QPostPublished), 'Invalid query %s' % q

        sql = self._buildQueryBySource(sourceId)
        sql = sql.filter(PostMapped.PublishedOn != None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPublishedBySourceType(self,
                                 sourceTypeKey,
                                 offset=None,
                                 limit=None,
                                 detailed=False,
                                 q=None):
        '''
        @see: IPostService.getPublishedBySourceType
        '''
        assert q is None or isinstance(q,
                                       QPostPublished), 'Invalid query %s' % q

        sql = self._buildQueryBySourceType(sourceTypeKey)
        sql = sql.filter(PostMapped.PublishedOn != None)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getAllBySource(self,
                       sourceId,
                       offset=None,
                       limit=None,
                       detailed=False,
                       q=None):
        '''
        @see: IPostService.getAllBySource
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q

        sql = self._buildQueryBySource(sourceId)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.distinct(),
                            sql.distinct().count(), offset, limit)
        return sqlLimit.distinct()

    def getAllBySourceType(self,
                           sourceTypeKey,
                           offset=None,
                           limit=None,
                           detailed=False,
                           q=None):
        '''
        @see: IPostService.getAllBySourceType
        '''
        assert q is None or isinstance(q, QPost), 'Invalid query %s' % q

        sql = self._buildQueryBySourceType(sourceTypeKey)

        sql = self._buildQueryWithCId(q, sql)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def insert(self, post):
        '''
        @see: IPostService.insert
        '''
        assert isinstance(post, Post), 'Invalid post %s' % post

        if post.Uuid is None:
            post.Uuid = str(uuid4().hex)

        if post.WasPublished is None:
            if post.PublishedOn is None:
                post.WasPublished = 0
            else:
                post.WasPublished = 1

        postDb = PostMapped()
        copy(post, postDb, exclude=COPY_EXCLUDE)
        postDb.typeId = self._typeId(post.Type)

        postDb = self._adjustTexts(postDb)

        if post.CreatedOn is None: postDb.CreatedOn = current_timestamp()
        if not postDb.Author:
            colls = self.session().query(CollaboratorMapped).filter(
                CollaboratorMapped.User == postDb.Creator).all()
            if not colls:
                coll = CollaboratorMapped()
                coll.User = postDb.Creator
                src = self.session().query(SourceMapped).filter(
                    SourceMapped.Name ==
                    PostServiceAlchemy.default_source_name).one()
                coll.Source = src.Id
                self.session().add(coll)
                self.session().flush((coll, ))
                colls = (coll, )
            postDb.Author = colls[0].Id

        self.session().add(postDb)
        self.session().flush((postDb, ))
        post.Id = postDb.Id

        postVerification = PostVerification()
        postVerification.Id = post.Id
        self.postVerificationService.insert(postVerification)

        return post.Id

    def update(self, post):
        '''
        @see: IPostService.update
        '''
        assert isinstance(post, Post), 'Invalid post %s' % post
        postDb = self.session().query(PostMapped).get(post.Id)
        if not postDb: raise InputError(Ref(_('Unknown post id'), ref=Post.Id))

        if Post.Type in post: postDb.typeId = self._typeId(post.Type)
        if post.UpdatedOn is None: postDb.UpdatedOn = current_timestamp()

        copy(post, postDb, exclude=COPY_EXCLUDE)
        postDb = self._adjustTexts(postDb)
        self.session().flush((postDb, ))

    def delete(self, id):
        '''
        @see: IPostService.delete
        '''
        postDb = self.session().query(PostMapped).get(id)
        if not postDb or postDb.DeletedOn is not None: return False

        postDb.DeletedOn = current_timestamp()
        self.session().flush((postDb, ))
        return True

    # ----------------------------------------------------------------

    def _buildQuery(self, creatorId=None, authorId=None, q=None):
        '''
        Builds the general query for posts.
        '''
        sql = self.session().query(PostMapped)
        if creatorId: sql = sql.filter(PostMapped.Creator == creatorId)
        if authorId: sql = sql.filter(PostMapped.Author == authorId)
        addDeleted = False
        if q:
            sql = buildQuery(sql, q, PostMapped)
            addDeleted = QPostUnpublished.deletedOn in q
        if not addDeleted: sql = sql.filter(PostMapped.DeletedOn == None)
        return sql

    def _typeId(self, key):
        '''
        Provides the post type id that has the provided key.
        '''
        try:
            sql = self.session().query(
                PostTypeMapped.id).filter(PostTypeMapped.Key == key)
            return sql.one()[0]
        except NoResultFound:
            raise InputError(
                Ref(_('Invalid post type %(type)s') % dict(type=key),
                    ref=Post.Type))

    def _buildQueryBySource(self, sourceId):
        sql = self.session().query(PostMapped)
        sql = sql.join(CollaboratorMapped,
                       PostMapped.Author == CollaboratorMapped.Id)
        sql = sql.filter(CollaboratorMapped.Source == sourceId)
        return sql

    def _buildQueryBySourceType(self, sourceTypeKey):
        sql = self.session().query(PostMapped)
        sql = sql.join(CollaboratorMapped,
                       PostMapped.Author == CollaboratorMapped.Id)
        sql = sql.join(SourceMapped,
                       CollaboratorMapped.Source == SourceMapped.Id)
        sql = sql.join(SourceTypeMapped,
                       SourceMapped.typeId == SourceTypeMapped.id)
        sql = sql.filter(SourceTypeMapped.Key == sourceTypeKey)
        return sql

    def _buildQueryWithCId(self, q, sql):
        if q:
            if QWithCId.cId in q and q.cId:
                if AsRange.start in q.cId:
                    sql = sql.filter(PostMapped.Id >= q.cId.start)
                if AsRange.since in q.cId:
                    sql = sql.filter(PostMapped.Id > q.cId.since)
                if AsRange.end in q.cId:
                    sql = sql.filter(PostMapped.Id <= q.cId.end)
                if AsRange.until in q.cId:
                    sql = sql.filter(PostMapped.Id < q.cId.until)
            sql = buildQuery(sql, q, PostMapped)
        return sql

    def _adjustTexts(self, postDb):
        '''
        Corrects the Meta, Content, ContentPlain fields
        '''
        # TODO: implement the proper fix using SQLAlchemy compilation rules
        nohigh = {i: None for i in range(0x10000, 0x110000)}
        if postDb.Meta:
            postDb.Meta = postDb.Meta.translate(nohigh)
            if self.meta_max_size and (len(postDb.Meta) > self.meta_max_size):
                raise InputError(Ref(
                    _('Too long Meta part'), ))  # can not truncate json data
        if postDb.Content:
            postDb.Content = postDb.Content.translate(nohigh)
            if self.content_max_size and (len(postDb.Content) >
                                          self.content_max_size):
                raise InputError(Ref(_('Too long Content part'),
                                     ))  # can not truncate structured data
        if postDb.ContentPlain:
            postDb.ContentPlain = postDb.ContentPlain.translate(nohigh)
            if self.content_plain_max_size:
                postDb.ContentPlain = postDb.ContentPlain[:self.
                                                          content_plain_max_size]

        return postDb
Пример #21
0
class MetaDataServiceAlchemy(MetaDataServiceBaseAlchemy, IMetaDataReferencer,
                             IMetaDataUploadService):
    '''
    Implementation for @see: IMetaDataService, and also provides services as the @see: IMetaDataReferencer
    '''

    format_file_name = '%(id)s.%(name)s'
    wire.config('format_file_name',
                doc='''
    The format for the files names in the media archive''')
    format_thumbnail = '%(size)s/other.jpg'
    wire.config('format_thumbnail',
                doc='''
    The format for the unknown thumbnails in the media archive''')

    cdmArchive = ICDM
    # The archive CDM.
    thumbnailManager = IThumbnailManager
    wire.entity('thumbnailManager')
    # Provides the thumbnail referencer
    metaDataHandlers = list

    # The handlers list used by the meta data in order to get the references.

    def __init__(self):
        '''
        Construct the meta data service.
        '''
        assert isinstance(
            self.format_file_name,
            str), 'Invalid format file name %s' % self.format_file_name
        assert isinstance(
            self.format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.format_thumbnail
        assert isinstance(self.cdmArchive,
                          ICDM), 'Invalid archive CDM %s' % self.cdmArchive
        assert isinstance(
            self.thumbnailManager, IThumbnailManager
        ), 'Invalid thumbnail manager %s' % self.thumbnailManager
        assert isinstance(
            self.metaDataHandlers,
            list), 'Invalid reference handlers %s' % self.referenceHandlers

        MetaDataServiceBaseAlchemy.__init__(self, MetaDataMapped, QMetaData,
                                            self)

    def deploy(self):
        '''
        Deploy the meta data and all handlers.
        '''
        self._thumbnailFormat = thumbnailFormatFor(self.session(),
                                                   self.format_thumbnail)
        self.thumbnailManager.putThumbnail(
            self._thumbnailFormat.id,
            abspath(join(pythonPath(), 'resources', 'other.jpg')))
        self._metaType = metaTypeFor(self.session(), META_TYPE_KEY)

        for handler in self.metaDataHandlers:
            assert isinstance(
                handler,
                IMetaDataHandler), 'Invalid meta data handler %s' % handler
            handler.deploy()

    # ----------------------------------------------------------------

    def populate(self, metaData, scheme, thumbSize=None):
        '''
        @see: IMetaDataReferencer.populate
        '''
        assert isinstance(metaData,
                          MetaDataMapped), 'Invalid meta data %s' % metaData
        metaData.Content = self.cdmArchive.getURI(metaData.content, scheme)
        return self.thumbnailManager.populate(metaData, scheme, thumbSize)

    # ----------------------------------------------------------------

    def insert(self, userId, content):
        '''
        @see: IMetaDataService.insert
        '''
        assert isinstance(content, Content), 'Invalid content %s' % content
        if not content.name:
            raise InputError(_('No name specified for content'))

        metaData = MetaDataMapped()
        metaData.CreatedOn = current_timestamp()
        metaData.Creator = userId
        metaData.Name = content.name

        metaData.typeId = self._metaType.Id
        metaData.thumbnailFormatId = self._thumbnailFormat.id

        try:
            self.session().add(metaData)
            self.session().flush((metaData, ))

            path = self.format_file_name % {
                'id': metaData.Id,
                'name': metaData.Name
            }
            path = ''.join((META_TYPE_KEY, '/',
                            self.generateIdPath(metaData.Id), '/', path))
            contentPath = self.cdmArchive.getURI(path, 'file')

            self.cdmArchive.publishContent(path, content)
            metaData.content = path
            metaData.SizeInBytes = getsize(contentPath)

            for handler in self.metaDataHandlers:
                assert isinstance(
                    handler, IMetaDataHandler), 'Invalid handler %s' % handler
                if handler.processByInfo(metaData, contentPath, content.type):
                    break
            else:
                for handler in self.metaDataHandlers:
                    if handler.process(metaData, contentPath): break

            self.session().merge(metaData)
            self.session().flush((metaData, ))
        except SQLAlchemyError as e:
            handle(e, metaData)

        if metaData.content != path:
            self.cdmArchive.republish(path, metaData.content)

        return metaData.Id

    # ----------------------------------------------------------------

    def generateIdPath(self, id):
        return '{0:03d}'.format((id // 1000) % 1000)
Пример #22
0
class ImagePersistanceAlchemy(SessionSupport, IMetaDataHandler):
    '''
    Provides the service that handles the image persistence @see: IImagePersistanceService.
    '''

    format_file_name = '%(id)s.%(file)s'
    wire.config('format_file_name',
                doc='''
    The format for the images file names in the media archive''')
    default_format_thumbnail = '%(size)s/image.jpg'
    wire.config('default_format_thumbnail',
                doc='''
    The format for the images thumbnails in the media archive''')
    format_thumbnail = '%(size)s/%(id)s.%(name)s.jpg'
    wire.config('format_thumbnail',
                doc='''
    The format for the images thumbnails in the media archive''')
    metadata_extractor_path = join('workspace', 'tools', 'exiv2')
    wire.config('metadata_extractor_path',
                doc='''The path to the metadata extractor file.''')

    image_supported_files = 'gif, png, bmp, jpg'

    thumbnailManager = IThumbnailManager
    wire.entity('thumbnailManager')

    # Provides the thumbnail referencer

    def __init__(self):
        assert isinstance(
            self.format_file_name,
            str), 'Invalid format file name %s' % self.format_file_name
        assert isinstance(
            self.default_format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.default_format_thumbnail
        assert isinstance(
            self.format_thumbnail,
            str), 'Invalid format thumbnail %s' % self.format_thumbnail
        assert isinstance(
            self.image_supported_files,
            str), 'Invalid supported files %s' % self.image_supported_files
        assert isinstance(
            self.thumbnailManager, IThumbnailManager
        ), 'Invalid thumbnail manager %s' % self.thumbnailManager

        self.imageSupportedFiles = set(
            re.split('[\\s]*\\,[\\s]*', self.image_supported_files))

    def deploy(self):
        '''
        @see: IMetaDataHandler.deploy
        '''

        self._defaultThumbnailFormat = thumbnailFormatFor(
            self.session(), self.default_format_thumbnail)
        self.thumbnailManager.putThumbnail(
            self._defaultThumbnailFormat.id,
            abspath(join(pythonPath(), 'resources', 'image.jpg')))

        self._thumbnailFormat = thumbnailFormatFor(self.session(),
                                                   self.format_thumbnail)
        self._metaTypeId = metaTypeFor(self.session(), META_TYPE_KEY).Id

# --------------------------------------------------------------------

    def processByInfo(self, metaDataMapped, contentPath, contentType):
        '''
        @see: IMetaDataHandler.processByInfo
        '''
        if contentType is not None and contentType.startswith(META_TYPE_KEY):
            return self.process(metaDataMapped, contentPath)

        extension = splitext(metaDataMapped.Name)[1][1:]
        if extension in self.imageSupportedFiles:
            return self.process(metaDataMapped, contentPath)

        return False

    def process(self, metaDataMapped, contentPath):
        '''
        @see: IMetaDataHandler.process
        '''
        assert isinstance(
            metaDataMapped,
            MetaDataMapped), 'Invalid meta data mapped %s' % metaDataMapped

        p = Popen([
            join(self.metadata_extractor_path, 'bin', 'exiv2.exe'), contentPath
        ],
                  stdin=PIPE,
                  stdout=PIPE,
                  stderr=PIPE)
        result = p.wait()
        # 253 is the exiv2 code for error: No Exif data found in the file
        if result != 0 and result != 253: return False

        imageDataEntry = ImageDataEntry()
        imageDataEntry.Id = metaDataMapped.Id

        while True:
            line = p.stdout.readline()
            if not line: break
            line = str(line, "utf-8")

            property = self.extractProperty(line)

            if property is None:
                continue

            if property == 'Image size':
                size = self.extractSize(line)
                imageDataEntry.Width = size[0]
                imageDataEntry.Height = size[1]
            elif property == 'Image timestamp':
                imageDataEntry.CreationDate = self.extractDateTime(line)
            elif property == 'Camera make':
                imageDataEntry.CameraMake = self.extractString(line)
            elif property == 'Camera model':
                imageDataEntry.CameraModel = self.extractString(line)

        path = self.format_file_name % {
            'id': metaDataMapped.Id,
            'file': metaDataMapped.Name
        }
        path = ''.join((META_TYPE_KEY, '/',
                        self.generateIdPath(metaDataMapped.Id), '/', path))

        metaDataMapped.content = path
        metaDataMapped.typeId = self._metaTypeId
        metaDataMapped.thumbnailFormatId = self._thumbnailFormat.id
        metaDataMapped.IsAvailable = True

        self.thumbnailManager.putThumbnail(self._thumbnailFormat.id,
                                           contentPath, metaDataMapped)

        try:
            self.session().add(imageDataEntry)
        except SQLAlchemyError as e:
            metaDataMapped.IsAvailable = False
            handle(e, ImageDataEntry)

        return True

    # ----------------------------------------------------------------

    def extractProperty(self, line):
        return line.partition(':')[0].strip()

    def extractString(self, line):
        str = line.partition(':')[2].strip()
        return str

    def extractDateTime(self, line):
        # example:'2010:11:08 18:33:13'
        dateTimeFormat = '%Y:%m:%d %H:%M:%S'
        str = line.partition(':')[2].strip()
        if str is None or str is '': return None
        return datetime.strptime(str, dateTimeFormat)

    def extractSize(self, line):
        str = line.partition(':')[2].strip()
        str = str.partition('x')
        return (str[0], str[2])

    # ----------------------------------------------------------------

    def generateIdPath(self, id):
        return "{0:03d}".format((id // 1000) % 1000)
Пример #23
0
class ThumbnailManagerAlchemy(SessionSupport, IThumbnailManager):
    '''
    Implementation for @see: IThumbnailManager
    '''
    original_name = 'original'
    wire.config('original_name',
                doc='''
    Provides the size name for the original sized images from which the thumbnails are created'''
                )
    thumbnail_sizes = {
        'tiny': [16, 16],
        'small': [32, 32],
        'medium': [64, 64],
        'large': [128, 128],
        'huge': [256, 256]
    }
    wire.config('thumbnail_sizes',
                doc='''
    This is basically just a simple dictionary{string, tuple(integer, integer)} that has as key a path safe name and as
    a value a tuple with the width/height of the thumbnail, example: {'small': [100, 100]}.
    ''')
    thumbnailProcessor = IThumbnailProcessor
    wire.entity('thumbnailProcessor')
    cdm = ICDM

    # the content delivery manager where to publish thumbnails

    def __init__(self):
        assert isinstance(self.original_name,
                          str), 'Invalid original name %s' % self.original_name
        assert isinstance(
            self.thumbnail_sizes,
            dict), 'Invalid thumbnail sizes %s' % self.thumbnail_sizes
        assert isinstance(self.thumbnailProcessor, IThumbnailProcessor), \
        'Invalid thumbnail processor %s' % self.thumbnailProcessor
        assert isinstance(self.cdm,
                          ICDM), 'Invalid thumbnail CDM %s' % self.cdm

        # We order the thumbnail sizes in descending order
        thumbnailSizes = [(key, sizes)
                          for key, sizes in self.thumbnail_sizes.items()]
        thumbnailSizes.sort(key=lambda pack: pack[1][0] * pack[1][1])
        self.thumbnailSizes = OrderedDict(thumbnailSizes)
        self._cache_thumbnail = {}

    def putThumbnail(self, thumbnailFormatId, imagePath, metaData=None):
        '''
        @see IThumbnailManager.putThumbnail
        '''
        assert isinstance(
            thumbnailFormatId,
            int), 'Invalid thumbnail format identifier %s' % thumbnailFormatId
        assert isinstance(imagePath, str), 'Invalid file path %s' % imagePath

        thumbPath = self.thumbnailPath(thumbnailFormatId, metaData)
        try:
            thumbTimestamp = self.cdm.getTimestamp(thumbPath)
        except PathNotFound:
            thumbTimestamp = None

        if not thumbTimestamp or thumbTimestamp < timestampURI(imagePath):
            imageExt, thumbProcPath = splitext(imagePath)[1], thumbPath
            thumbName, thumbExt = splitext(thumbPath)
            if imageExt != thumbExt: thumbPath = thumbName + imageExt

            self.cdm.publishFromFile(thumbPath, imagePath)

            if thumbPath != thumbProcPath:
                thumbPath, thumbProcPath = self.cdm.getURI(
                    thumbPath, 'file'), self.cdm.getURI(thumbProcPath, 'file')
                self.thumbnailProcessor.processThumbnail(
                    thumbPath, thumbProcPath)

    def populate(self, metaData, scheme, size=None):
        '''
        @see: IMetaDataReferencer.populate
        '''
        assert isinstance(metaData, MetaData), 'Invalid metaData %s' % metaData
        assert not size or isinstance(
            size, str
        ) and size in self.thumbnailSizes, 'Invalid size value %s' % size

        if not metaData.thumbnailFormatId: return metaData

        thumbPath = self.thumbnailPath(metaData.thumbnailFormatId, metaData,
                                       size)
        try:
            self.cdm.getTimestamp(thumbPath)
        except PathNotFound:
            original = self.thumbnailPath(metaData.thumbnailFormatId, metaData)
            original = self.cdm.getURI(original, 'file')

            width, height = self.thumbnailSizes[size]
            self.thumbnailProcessor.processThumbnail(
                original, self.cdm.getURI(thumbPath, 'file'), width, height)

        metaData.Thumbnail = self.cdm.getURI(thumbPath, scheme)
        return metaData

    # ----------------------------------------------------------------

    def thumbnailPath(self, thumbnailFormatId, metaData=None, size=None):
        '''
        Construct the reference based on the provided parameters.
        '''
        format = self._cache_thumbnail.get(thumbnailFormatId)
        if format is None:
            thumbnailFormat = self.session().query(ThumbnailFormat).get(
                thumbnailFormatId)
            assert isinstance(
                thumbnailFormat, ThumbnailFormat
            ), 'Invalid thumbnail format id %s' % thumbnailFormatId
            format = self._cache_thumbnail[
                thumbnailFormat.id] = thumbnailFormat.format

        keys = dict(size=size or self.original_name)
        if metaData is not None:
            assert isinstance(metaData,
                              MetaData), 'Invalid meta data %s' % metaData

            keys.update(id=metaData.Id,
                        file=metaData.Name,
                        name=splitext(metaData.Name)[0])

        return format % keys
Пример #24
0
class InletServiceAlchemy(EntityServiceAlchemy, IInletService):
    '''
    Implementation for @see: IInletService
    '''
    sms_source_type_key = 'FrontlineSMS'
    wire.config('sms_source_type_key',
                doc='''
    Type of the sources for the SMS inlet feeds''')
    sms_post_type_key = 'normal'
    wire.config('sms_post_type_key',
                doc='''
    Type of the posts created on the SMS that come via inlet feeds''')
    user_type_key = 'sms'
    wire.config('user_type_key',
                doc='''
    The user type that is used for the anonymous users of SMS posts''')

    postService = IPostService
    wire.entity('postService')
    sourceService = ISourceService
    wire.entity('sourceService')
    collaboratorService = ICollaboratorService
    wire.entity('collaboratorService')
    userService = IUserService
    wire.entity('userService')

    def __init__(self):
        '''
        Construct the frontline inlet service.
        '''
        assert isinstance(
            self.postService,
            IPostService), 'Invalid post service %s' % self.postService
        assert isinstance(
            self.sourceService,
            ISourceService), 'Invalid source service %s' % self.sourceService
        assert isinstance(
            self.collaboratorService, ICollaboratorService
        ), 'Invalid collaborator service %s' % self.collaboratorService
        assert isinstance(
            self.userService,
            IUserService), 'Invalid user service %s' % self.userService

    def pushMessage(self,
                    typeKey,
                    phoneNumber=None,
                    messageText=None,
                    timeStamp=None):
        '''
        @see: IInletService.pushMessage
        '''
        # checking the necessary info: phone number and message text
        if (phoneNumber is None) or (phoneNumber == ''):
            raise InputError(
                Ref(_('No value for the mandatory phoneNumber parameter'), ))
        if (messageText is None) or (messageText == ''):
            raise InputError(
                Ref(_('No value for the mandatory messageText parameter'), ))

        # take (or make) the user (for phone number) part of creator and collaborator
        try:
            userId, = self.session().query(PersonMapped.Id).filter(
                PersonMapped.PhoneNumber == phoneNumber).one()
        except:
            user = User()
            user.PhoneNumber = phoneNumber
            user.Name = self._freeSMSUserName()
            user.Password = binascii.b2a_hex(os.urandom(32)).decode()
            user.Type = self.user_type_key
            userId = self.userService.insert(user)

        # make the source (for inlet type) part of collaborator
        try:
            sql = self.session().query(SourceMapped.Id).join(SourceTypeMapped)
            sql = sql.filter(
                SourceTypeMapped.Key == self.sms_source_type_key).filter(
                    SourceMapped.Name == typeKey)
            sourceId, = sql.one()
        except NoResultFound:
            source = Source()
            source.Type = self.sms_source_type_key
            source.Name = typeKey
            source.URI = ''
            source.IsModifiable = True
            sourceId = self.sourceService.insert(source)

        # make the collaborator
        sql = self.session().query(CollaboratorMapped.Id)
        sql = sql.filter(CollaboratorMapped.Source == sourceId)
        sql = sql.filter(CollaboratorMapped.User == userId)
        try:
            collabId, = sql.one()
        except NoResultFound:
            collab = Collaborator()
            collab.Source = sourceId
            collab.User = userId
            collabId = self.collaboratorService.insert(collab)

        # take / make time stamp
        if timeStamp:
            try:
                timeStamp = datetime.strptime(timeStamp,
                                              '%Y-%m-%d %H:%M:%S.%f')
            except:
                timeStamp = None

        if not timeStamp:
            timeStamp = datetime.now()

        # create post request
        post = Post()
        post.Type = self.sms_post_type_key
        post.Creator = userId
        post.Author = collabId
        post.Content = messageText
        post.CreatedOn = timeStamp

        # insert the post
        postId = self.postService.insert(post)

        return (self.postService.getById(postId), )

    # ------------------------------------------------------------------

    def _freeSMSUserName(self):
        while True:
            userName = '******' + binascii.b2a_hex(os.urandom(8)).decode()
            try:
                self.session().query(UserMapped).filter(
                    UserMapped.Name == userName).one()
            except:
                return userName
Пример #25
0
class BlogCollaboratorServiceAlchemy(SessionSupport, IBlogCollaboratorService):
    '''
    Implementation for @see: IBlogCollaboratorService
    '''

    collaboratorSpecification = CollaboratorSpecification
    wire.entity('collaboratorSpecification')
    userActionService = IUserActionService
    wire.entity('userActionService')
    default_user_type_key = 'standard'
    wire.config('default_user_type_key',
                doc='''
    Default user type for users without specified the user type key''')
    internal_source_name = 'internal'
    wire.config('internal_source_name',
                doc='''
    Source for collaborators''')

    def __init__(self):
        '''
        Construct the blog collaborator service.
        '''
        assert isinstance(self.collaboratorSpecification, CollaboratorSpecification), \
        'Invalid collaborator specification %s' % self.collaboratorSpecification
        assert isinstance(self.userActionService, IUserActionService), \
        'Invalid user actions service %s' % self.userActionService
        super().__init__()

        self._collaboratorTypeIds = {}

    def getAllTypes(self):
        '''
        @see: IBlogCollaboratorService.getAllTypes
        '''
        return self.session().query(BlogCollaboratorTypeMapped).all()

    def getActions(self, userId, blogId, path=None, origPath=None):
        '''
        @see: IBlogCollaboratorService.getActions
        '''
        actions = list(self.userActionService.getAll(userId, path))
        paths = {a.Path for a in actions}
        for name, f in self.collaboratorSpecification.type_filter:
            assert isinstance(f, Filter), 'Invalid filter'
            assert isinstance(f.filter, IAclFilter)
            if f.filter.isAllowed(userId, blogId):
                collActions = list(
                    self.collaboratorSpecification.type_actions.get(name))
                collPaths = {a.Path for a in collActions}.difference(paths)
                actions.extend([
                    action for action in collActions
                    if action.Path in collPaths
                ])
                break
        return actions

    def getById(self, blogId, collaboratorId):
        '''
        @see: IBlogCollaboratorService.getById
        '''
        sql = self.session().query(BlogCollaboratorMapped)
        sql = sql.filter(BlogCollaboratorMapped.Blog == blogId)
        sql = sql.filter(BlogCollaboratorMapped.Id == collaboratorId)

        try:
            return sql.one()
        except NoResultFound:
            raise InputError(
                Ref(_('No collaborator'), ref=BlogCollaboratorMapped.Id))

    def getAll(self, blogId, offset=None, limit=None, detailed=True):
        '''
        @see: IBlogCollaboratorService.getAll
        '''
        sql = self.session().query(BlogCollaboratorMapped).filter(
            BlogCollaboratorMapped.Blog == blogId)
        sql = sql.join(UserMapped).join(SourceMapped).order_by(
            BlogCollaboratorMapped.Name)
        sql = sql.filter(UserMapped.Active == True)
        sql = sql.filter(UserMapped.Type == self.default_user_type_key)

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getPotential(self,
                     blogId,
                     excludeSources=True,
                     offset=None,
                     limit=None,
                     detailed=True,
                     qu=None,
                     qs=None):
        '''
        @see: IBlogCollaboratorService.getPotential
        '''
        sqlBlog = self.session().query(BlogCollaboratorMapped.Id).filter(
            BlogCollaboratorMapped.Blog == blogId)
        sql = self.session().query(CollaboratorMapped)
        sql = sql.join(UserMapped, CollaboratorMapped.User == UserMapped.Id)
        sql = sql.join(SourceMapped,
                       SourceMapped.Id == CollaboratorMapped.Source)
        sql = sql.filter(not_(CollaboratorMapped.Id.in_(sqlBlog)))
        sql = sql.filter(UserMapped.Active == True)
        sql = sql.filter(UserMapped.Type == self.default_user_type_key)
        sql = sql.filter(SourceMapped.Name == self.internal_source_name)
        sql = sql.order_by(CollaboratorMapped.Name)
        if excludeSources: sql = sql.filter(CollaboratorMapped.User != None)
        if qu: sql = buildQuery(sql, qu, UserMapped)
        if qs: sql = buildQuery(sql, qs, SourceMapped)
        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.distinct(),
                            sql.distinct().count(), offset, limit)
        return sqlLimit.distinct()

    def addCollaboratorAsDefault(self, blogId, collaboratorId):
        '''
        @see: IBlogCollaboratorService.addCollaboratorAsDefault
        '''
        self.addCollaborator(
            blogId, collaboratorId,
            self.collaboratorSpecification.collaborator_types[0])

    def addCollaborator(self, blogId, collaboratorId, typeName):
        '''
        @see: IBlogCollaboratorService.addCollaborator
        '''
        typeId = self.collaboratorTypeIds()[typeName]
        if typeId is None:
            raise InputError(
                Ref(_('Invalid collaborator type'), ref=BlogCollaborator.Type))

        sql = self.session().query(BlogCollaboratorEntry)
        sql = sql.filter(BlogCollaboratorEntry.Blog == blogId)
        sql = sql.filter(
            BlogCollaboratorEntry.blogCollaboratorId == collaboratorId)
        if sql.update({BlogCollaboratorEntry.typeId: typeId}) > 0: return

        sql = self.session().query(BlogCollaboratorMapped.Id)
        sql = sql.join(BlogMapped)
        sql = sql.filter(BlogCollaboratorMapped.User == BlogMapped.Creator)
        sql = sql.filter(BlogMapped.Id == blogId)
        sql = sql.filter(BlogCollaboratorMapped.Id == collaboratorId)
        if sql.count() > 0:
            raise InputError(
                _('The blog creator cannot be assigned as a collaborator'))

        bgc = BlogCollaboratorEntry()
        bgc.Blog = blogId
        bgc.blogCollaboratorId = collaboratorId
        bgc.typeId = typeId
        self.session().add(bgc)
        self.session().flush((bgc, ))

    def removeCollaborator(self, blogId, collaboratorId):
        '''
        @see: IBlogCollaboratorService.removeCollaborator
        '''
        try:
            sql = self.session().query(BlogCollaboratorEntry)
            sql = sql.filter(BlogCollaboratorEntry.Blog == blogId)
            sql = sql.filter(
                BlogCollaboratorEntry.blogCollaboratorId == collaboratorId)
            return sql.delete() > 0
        except OperationalError:
            raise InputError(
                Ref(_('Cannot remove'), model=BlogCollaboratorMapped))

    # ----------------------------------------------------------------

    def collaboratorTypeIds(self):
        '''
        Provides the collaborator types ids dictionary.
        '''
        if not self._collaboratorTypeIds:
            for name in self.collaboratorSpecification.collaborator_types:
                sql = self.session().query(BlogCollaboratorTypeMapped)
                sql = sql.filter(BlogCollaboratorTypeMapped.Name == name)
                try:
                    bt = sql.one()
                except NoResultFound:
                    bt = BlogCollaboratorTypeMapped()
                    bt.Name = name
                    self.session().add(bt)
                    self.session().flush((bt, ))
                self._collaboratorTypeIds[name] = bt.id
        return self._collaboratorTypeIds
Пример #26
0
class BlogSyncServiceAlchemy(EntityServiceAlchemy, IBlogSyncService):
    '''
    Implementation for @see IBlogSyncService
    '''

    blog_provider_type = 'blog provider'
    wire.config('blog_provider_type',
                doc='''
    Key of the source type for blog providers''')
    sms_provider_type = 'sms provider'
    wire.config('sms_provider_type',
                doc='''
    Key of the source type for sms providers''')

    def __init__(self):
        '''
        Construct the blog sync service.
        '''
        EntityServiceAlchemy.__init__(self, BlogSyncMapped, QBlogSync)

    def checkTimeout(self, blogSyncId, timeout):
        '''
        @see IBlogSyncService.checkTimeout
        '''
        crtTime = datetime.datetime.now().replace(microsecond=0)
        referenceTime = crtTime - datetime.timedelta(seconds=timeout)

        sql = self.session().query(BlogSyncMapped)
        sql = sql.filter(BlogSyncMapped.Id == blogSyncId)
        sql = sql.filter(
            or_(BlogSyncMapped.LastActivity == None,
                BlogSyncMapped.LastActivity < referenceTime))
        result = sql.update({BlogSyncMapped.LastActivity: crtTime})
        self.session().commit()

        return result

    def getBySourceType(self,
                        sourceType,
                        offset=None,
                        limit=None,
                        detailed=False,
                        q=None):
        '''
        @see IBlogSyncService.getBySourceType
        '''
        sql = self.session().query(BlogSyncMapped)
        if q:
            assert isinstance(q, QBlogSync), 'Invalid blog sync query %s' % q
            sql = buildQuery(sql, q, BlogSyncMapped)

        sql = sql.join(SourceMapped, SourceMapped.Id == BlogSyncMapped.Source)
        sql = sql.join(BlogSourceDB, SourceMapped.Id == BlogSourceDB.source)

        sql_prov = self.session().query(SourceMapped.URI)
        sql_prov = sql_prov.join(SourceTypeMapped,
                                 SourceTypeMapped.id == SourceMapped.typeId)
        sql_prov = sql_prov.filter(SourceTypeMapped.Key == sourceType)

        sql = sql.filter(SourceMapped.OriginURI.in_(sql_prov))

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getByBlog(self,
                  blogId,
                  offset=None,
                  limit=None,
                  detailed=False,
                  q=None):
        '''
        @see IBlogSyncService.getByBlog
        '''
        sql = self.session().query(BlogSyncMapped)
        if q:
            assert isinstance(q, QBlogSync), 'Invalid blog sync query %s' % q
            sql = buildQuery(sql, q, BlogSyncMapped)

        sql = sql.join(SourceMapped, SourceMapped.Id == BlogSyncMapped.Source)
        sql = sql.join(BlogSourceDB, SourceMapped.Id == BlogSourceDB.source)
        sql = sql.filter(BlogSourceDB.blog == blogId)

        sql_prov = self.session().query(SourceMapped.URI)
        sql_prov = sql_prov.join(SourceTypeMapped,
                                 SourceTypeMapped.id == SourceMapped.typeId)
        sql_prov = sql_prov.filter(
            SourceTypeMapped.Key == self.blog_provider_type)

        sql = sql.filter(SourceMapped.OriginURI.in_(sql_prov))

        sqlLimit = buildLimits(sql, offset, limit)
        if detailed:
            return IterPart(sqlLimit.all(), sql.count(), offset, limit)
        return sqlLimit.all()

    def getById(self, id):
        '''
        @see: IEntityGetService.getById
        '''
        blogSync = EntityServiceAlchemy.getById(self, id)
        log.info('read Id=%d, Auto=%s' % (blogSync.Id, blogSync.Auto))
        return blogSync
Пример #27
0
class ContentPublisherService(IContentPublisherService):
    '''
    Implementation for @see: IContentPublisherService
    '''

    mongodb_server = 'localhost'
    wire.config('mongodb_server', doc='''The address of the mongoDb server''')
    mongodb_port = 27017
    wire.config('mongodb_port', doc='''The port of the mongoDb server''')
    mongodb_database = 'mongodb'
    wire.config('mongodb_database', doc='''The name of the mongoDb database''')

    itemService = IItemService
    wire.entity('itemService')
    # item service used to convert article content to NewsML structure
    itemContentService = IItemContentService
    wire.entity('itemContentService')

    # item content service used to convert article content to NewsML structure

    def __init__(self):
        '''
        Construct the content publisher service.
        '''
        assert isinstance(
            self.mongodb_server,
            str), 'Invalid mongoDb server address %s' % self.mongodb_server
        assert isinstance(
            self.mongodb_port,
            int), 'Invalid mongoDb server port %s' % self.mongodb_port
        assert isinstance(
            self.mongodb_database,
            str), 'Invalid mongoDb database name %s' % self.mongodb_database

        mongoengine.connect(self.mongodb_database,
                            host=self.mongodb_server,
                            port=self.mongodb_port)

    def publish(self, guid):
        '''
        Implementation for @see: IContentPublisherService.publish
        '''
        # Test add document
        myItem = self.itemService.getById(guid)
        assert isinstance(myItem, PackageItem)

        item = Item()
        item.guid = myItem.GUId
        item.version = myItem.Version
        item.itemClass = myItem.ItemClass
        item.urgency = myItem.Urgency
        item.headline = myItem.HeadLine
        item.slugline = myItem.SlugLine
        item.byline = myItem.Byline
        item.creditline = myItem.CreditLine
        item.firstCreated = myItem.FirstCreated
        item.versionCreated = myItem.VersionCreated

        q = QItemContent()
        q.item = myItem.GUId
        contents = self.itemContentService.getAll(q=q)
        for c in contents:
            assert isinstance(c, ItemContent)
            content = Content()
            content.contenttype = c.ContentType
            content.content = c.Content
            content.residRef = c.ResidRef
            content.href = c.HRef
            content.size = c.Size
            content.rendition = c.Rendition
            item.contents.append(content)

        self.unpublish(item.guid)
        item.save(safe=True)
        return True

    def unpublish(self, guid):
        '''
        Implementation for @see: IContentPublisherService.unpublish
        '''
        # Test delete document
        Item.objects(guid=guid).delete(safe=True)
        return True
Пример #28
0
class URLInfoService(IURLInfoService):
    '''
    @see IURLInfoService
    '''

    # TODO: This is just a hacky way for fixing some broken web sites.
    #       Manual xml processing would be a more proper way here.
    html_fixes = [{'from': '<DOCTYPE html PUBLIC "-//W3C//DTD XHTML', 'to': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML'}]; wire.config('html_fixes', doc='''
    Web page repairing: list of "from -> to" string pairs''')

    def __init__(self):
        '''
        Construct the URLInfoService service.
        '''
        assert isinstance(self.html_fixes, list), 'Invalid html_fixes config %s' % self.html_fixes
        super().__init__()

    def getURLInfo(self, url=None):
        '''
        @see: IURLInfoService.getURLInfo
        '''
        if not url: raise InputError('Invalid URL %s' % url)
        assert isinstance(url, str), 'Invalid URL %s' % url
        url = unquote(url)

        try:
            with urlopen(url) as conn:
                urlInfo = URLInfo()
                urlInfo.URL = url
                urlInfo.Date = datetime.now()
                contentType = None
                charset = 'utf_8'
                for tag, val in conn.info().items():
                    if tag == 'Content-Type':
                        contentTypeInfo = val.split(';')
                        contentType = contentTypeInfo[0].strip().lower();
                        if 2 == len(contentTypeInfo):
                            charset = contentTypeInfo[1].split('=')[1]
                        break
                if not contentType or contentType != 'text/html':
                    req = Request(url)
                    selector = req.get_selector().strip('/')
                    if selector:
                        parts = selector.split('/')
                        if parts: urlInfo.Title = parts[len(parts) - 1]
                    else:
                        urlInfo.Title = req.get_host()
                    return urlInfo
                elif contentType == 'text/html': urlInfo.ContentType = contentType
                extr = HTMLInfoExtractor(urlInfo)
                try:
                    readData = conn.read()
                    decodedData = ''
                    try:
                        decodedData = readData.decode(charset, 'ignore')
                    except Exception:
                        decodedData = readData.decode('utf_8', 'ignore')
                    for onePair in self.html_fixes:
                        decodedData = re.sub(onePair['from'], onePair['to'], decodedData)
                    extr.feed(decodedData)
                except (AssertionError, HTMLParseError, UnicodeDecodeError): pass
                return extr.urlInfo
        except (URLError, ValueError): raise InputError('Invalid URL %s' % url)
Пример #29
0
class JSONFileService(IJSONLocaleFileService):
    '''
    Implementation for @see: IJSONLocaleFileService
    '''

    default_charset = 'UTF-8'
    wire.config('default_charset',
                doc='''
    The default character set to use whenever a JSON locale file is uploaded and
    the character set of the content is not specified''')

    poFileManager = IPOFileManager
    wire.entity('poFileManager')
    cdmLocale = ICDM
    wire.entity('cdmLocale')
    pluginService = IPluginService
    wire.entity('pluginService')
    componentService = IComponentService
    wire.entity('componentService')

    def __init__(self):
        assert isinstance(
            self.default_charset,
            str), 'Invalid default charset %s' % self.default_charset
        assert isinstance(
            self.poFileManager,
            IPOFileManager), 'Invalid PO file manager %s' % self.poFileManager
        assert isinstance(self.cdmLocale,
                          ICDM), 'Invalid PO CDM %s' % self.cdmLocale
        assert isinstance(
            self.pluginService,
            IPluginService), 'Invalid plugin service %s' % self.pluginService
        assert isinstance(
            self.componentService, IComponentService
        ), 'Invalid component service %s' % self.componentService

    def getGlobalJSONFile(self, locale, scheme):
        '''
        @see: IPOService.getGlobalPOFile
        '''
        path = self._cdmPath(locale)
        try:
            try:
                cdmFileTimestamp = self.cdmLocale.getTimestamp(path)
            except PathNotFound:
                republish = True
            else:
                mngFileTimestamp = self.poFileManager.getGlobalPOTimestamp(
                    locale)
                republish = False if mngFileTimestamp is None else cdmFileTimestamp < mngFileTimestamp

            if republish:
                jsonString = JSONEncoder(ensure_ascii=False).encode(
                    self.poFileManager.getGlobalAsDict(locale))
                self.cdmLocale.publishContent(
                    path, BytesIO(bytes(jsonString, getdefaultencoding())))
        except InvalidLocaleError:
            raise InputError(
                _('Invalid locale %(locale)s') % dict(locale=locale))
        return self.cdmLocale.getURI(path, scheme)

    def getComponentJSONFile(self, component, locale, scheme):
        '''
        @see: IPOService.getComponentPOFile
        '''
        self.componentService.getById(component)
        path = self._cdmPath(locale, component=component)
        try:
            try:
                cdmFileTimestamp = self.cdmLocale.getTimestamp(path)
            except PathNotFound:
                republish = True
            else:
                mngFileTimestamp = max(
                    self.poFileManager.getGlobalPOTimestamp(locale)
                    or datetime.min,
                    self.poFileManager.getComponentPOTimestamp(
                        component, locale) or datetime.min)
                republish = False if mngFileTimestamp is None else cdmFileTimestamp < mngFileTimestamp

            if republish:
                jsonString = JSONEncoder(ensure_ascii=False).encode(
                    self.poFileManager.getComponentAsDict(component, locale))
                self.cdmLocale.publishContent(
                    path, BytesIO(bytes(jsonString, getdefaultencoding())))
        except InvalidLocaleError:
            raise InputError(
                _('Invalid locale %(locale)s') % dict(locale=locale))
        return self.cdmLocale.getURI(path, scheme)

    def getPluginJSONFile(self, plugin, locale, scheme):
        '''
        @see: IPOService.getPluginPOFile
        '''
        pluginObj = self.pluginService.getById(plugin)
        assert isinstance(pluginObj, Plugin)
        if pluginObj.Component:
            return self.getComponentJSONFile(pluginObj.Component, locale,
                                             scheme)

        path = self._cdmPath(locale, plugin=plugin)
        try:
            try:
                cdmFileTimestamp = self.cdmLocale.getTimestamp(path)
            except PathNotFound:
                republish = True
            else:
                mngFileTimestamp = max(
                    self.poFileManager.getGlobalPOTimestamp(locale)
                    or datetime.min,
                    self.poFileManager.getPluginPOTimestamp(plugin, locale)
                    or datetime.min)
                republish = False if mngFileTimestamp is None else cdmFileTimestamp < mngFileTimestamp

            if republish:
                jsonString = JSONEncoder(ensure_ascii=False).encode(
                    self.poFileManager.getPluginAsDict(plugin, locale))
                self.cdmLocale.publishContent(
                    path, BytesIO(bytes(jsonString, getdefaultencoding())))
        except InvalidLocaleError:
            raise InputError(
                _('Invalid locale %(locale)s') % dict(locale=locale))
        return self.cdmLocale.getURI(path, scheme)

    # ----------------------------------------------------------------

    def _cdmPath(self, locale, component=None, plugin=None):
        '''
        Returns the path to the CDM JSON file corresponding to the given locale and / or
        component / plugin. If no component of plugin was specified it returns the
        name of the global JSON file.

        @param locale: string
            The locale.
        @param component: string
            The component id.
        @param plugin: string
            The plugin id.
        @return: string
            The file path.
        '''
        assert isinstance(locale, str), 'Invalid locale %s' % locale

        path = []
        if component:
            path.append('component')
            path.append(component)
        elif plugin:
            path.append('plugin')
            path.append(plugin)
        else:
            path.append('global')
        path.append(locale)
        return '%s.json' % '-'.join(path)
Пример #30
0
class SynchronizeRightsHandler(HandlerProcessor):
    '''
    Implementation for a processor that synchronizes the rights in the configuration file with the database.
    '''

    type_name = 'GUI Access'
    wire.config('type_name',
                doc='''
    The right type name to be used in inserting the configured rights. 
    ''')
    role_name = 'Admin'
    wire.config('role_name',
                doc='''
    The root role that will contain all the rights. 
    ''')

    rightService = IRightService
    wire.entity('rightService')
    rightTypeService = IRightTypeService
    wire.entity('rightTypeService')
    roleService = IRoleService
    wire.entity('roleService')

    def __init__(self):
        assert isinstance(self.type_name,
                          str), 'Invalid type name %s' % self.type_name
        assert isinstance(
            self.rightService,
            IRightService), 'Invalid right service %s' % self.rightService
        assert isinstance(
            self.rightTypeService, IRightTypeService
        ), 'Invalid right type service %s' % self.rightTypeService
        super().__init__(Repository=RepositoryRight)

    def process(self, chain, solicit: Solicit, **keyargs):
        '''
        @see: HandlerProcessor.process
        
        Synchronize the rights of the groups in the configuration file with the database.
        '''
        assert isinstance(chain, Chain), 'Invalid chain %s' % chain
        assert isinstance(solicit, Solicit), 'Invalid solicit %s' % solicit
        assert isinstance(
            solicit.repository,
            RepositoryRight), 'Invalid repository %s' % solicit.repository

        try:
            self.rightTypeService.getById(self.type_name)
        except:
            rightType = RightType()
            rightType.Name = self.type_name
            self.rightTypeService.insert(rightType)

        #maps name to id
        rightsDb = {
            e.Name: e.Id
            for e in [
                self.rightService.getById(id)
                for id in self.rightService.getAll(self.type_name)
            ]
        }
        #maps right_name to arguments required for right creation
        rightRepositories = listBFS(solicit.repository,
                                    RepositoryRight.children,
                                    RepositoryRight.rightName)
        #do rights inheritance
        self.doInheritance(rightRepositories)
        rights = {
            r.rightName: (partial(self.createEntity, r), r)
            for r in rightRepositories
        }
        rightIds = syncWithDatabase(self.rightService, rights, rightsDb)

        #add id to right repositories
        for r in rightRepositories:
            r.rightId = rightIds.get(r.rightName)

        #create root role ("Admin") and add all the rights on it
        try:
            self.roleService.getById(self.role_name)
        except:
            role = Role()
            role.Name = self.role_name
            self.roleService.insert(role)
        for rightId in rightIds.values():
            self.roleService.addRight(self.role_name, rightId)

    def doInheritance(self, repositories):
        '''
        Will add actions and accesses from inherited to inheriting rights. 
        @param repositories: list of right repositories 
        '''
        #first we have to group the repositories by rightName
        rights = {}
        for repository in repositories:
            assert isinstance(repository,
                              RepositoryRight), 'Invalid right %s' % repository
            if repository.rightName in rights:
                rights[repository.rightName].append(repository)
            else:
                rights[repository.rightName] = [repository]

        #detect cyclic inheritance
        for rightName in rights:
            result = self.isCyclicInheritance(rightName, rights)
            if result:
                log.warning('Cyclic inheritance detected for rights: %s',
                            result)
                return

        handled = set()
        for rightName in rights:
            self.handleRight(rightName, rights, handled)

    def isCyclicInheritance(self, rightName, rights, visited=None, path=None):
        '''
        Will detect if there is cyclic inheritance for the given rights.
        @param rightName: The right from which to start the search for cyclic inheritance
        @param rights: mapping rightName: [list of repositories]
        @return: False if there is no cyclic inheritance or a list containing the rights in the inheritance cycle 
        '''
        if visited is None:
            visited = set()
            path = []

        if rightName in visited: return path

        parents = [
            parent for right in rights[rightName] if right.rightInherits
            for parent in right.rightInherits
        ]
        if not parents: return False

        visited.add(rightName)
        path.append(rightName)

        for parent in parents:
            if not parent in rights: continue
            if self.isCyclicInheritance(parent, rights, visited, path):
                return path
        return False

    def handleRight(self, rightName, rights, handled):
        '''
        Recursively solves inheritance of actions and accesses for the right.
        @param rightName: The right from which to start the search for cyclic inheritance
        @param rights: mapping rightName: [list of repositories]
        '''
        assert isinstance(handled, set), 'Invalid handled set %s' % handled
        if rightName in handled: return

        parents = [
            parent for right in rights[rightName] if right.rightInherits
            for parent in right.rightInherits
        ]
        if not parents:
            handled.add(rightName)
            return

        #handle inherits
        for parent in parents:
            self.handleRight(parent, rights, handled)

        #now add the actions from parent rights
        actions = set(action.path for right in rights[rightName]
                      if right.actions for action in right.actions)
        actionsInherited = {
            action.path: action
            for parent in parents for right in rights[parent] if right.actions
            for action in right.actions
        }
        accessesInherited = [
            access for parent in parents for right in rights[parent]
            if right.accesses for access in right.accesses
        ]

        #we will add the actions and accesses from the parents to one of the repositories of this right (the first one)
        for action in actionsInherited:
            if not action in actions:
                rights[rightName][0].actions.append(actionsInherited[action])
        #add accesses from the parent to the child
        for access in accessesInherited:
            rights[rightName][0].accesses.append(access)

        #finished handling this right, mark it as handled
        handled.add(rightName)

    def createEntity(self, rightRepository, rightName):
        assert isinstance(
            rightRepository,
            RepositoryRight), 'Invalid repository %s' % rightRepository
        right = Right()
        right.Name = rightName
        right.Type = self.type_name
        right.Description = rightRepository.description
        return right