Пример #1
0
    def invalidateWorkingResult(self):
        """ invalidate working results"""
        self.setSparql_results("")
        self.invalidateSparqlCacheResults()

        pr = getToolByName(self, 'portal_repository')
        comment = "Invalidated last working result"
        comment = comment.encode('utf')
        try:
            pr.save(obj=self, comment=comment)
        except FileTooLargeToVersionError:
            commands = view.getCommandSet('plone')
            commands.issuePortalMessage(
                """Changes Saved. Versioning for this file
                   has been disabled because it is too large.""",
                msgtype="warn")

        async_service = queryUtility(IAsyncService)
        if async_service is None:
            logger.warn(
                "Can't invalidateWorkingResult. plone.app.async NOT installed!"
            )
            return

        self.scheduled_at = DateTime.DateTime()
        async_queue = async_service.getQueues()['']
        async_service.queueJobInQueue(async_queue, ('sparql', ),
                                      async_updateLastWorkingResults,
                                      self,
                                      scheduled_at=self.scheduled_at,
                                      bookmarks_folder_added=False)
Пример #2
0
    def invalidateWorkingResult(self):
        """ invalidate working results"""
        self.setSparql_results("")
        self.invalidateSparqlCacheResults()

        pr = getToolByName(self, 'portal_repository')
        comment = "Invalidated last working result"
        comment = comment.encode('utf')
        try:
            pr.save(obj=self, comment=comment)
        except FileTooLargeToVersionError:
            commands = view.getCommandSet('plone')
            commands.issuePortalMessage(
                """Changes Saved. Versioning for this file
                   has been disabled because it is too large.""",
                msgtype="warn")

        async_service = queryUtility(IAsyncService)
        if async_service is None:
            logger.warn(
                "Can't invalidateWorkingResult. plone.app.async NOT installed!")
            return

        self.scheduled_at = DateTime.DateTime()
        async_queue = async_service.getQueues()['']
        async_service.queueJobInQueue(
            async_queue, ('sparql',),
            async_updateLastWorkingResults,
            self,
            scheduled_at=self.scheduled_at,
            bookmarks_folder_added=False
        )
Пример #3
0
    def updateLastWorkingResults(self, **arg_values):
        """ update cached last working results of a query
        """
        cached_result = self.getSparqlCacheResults()
        cooked_query = interpolate_query(self.query, arg_values)

        args = (self.endpoint_url, cooked_query)
        try:
            new_result = run_with_timeout(
                max(getattr(self, 'timeout', 10), 10),
                query_and_get_result,
                *args)
        except QueryTimeout:
            new_result = {'exception': "query has ran - an timeout has"
                                       " been received"}

        force_save = False

        if new_result.get("result", {}) != {}:
            if new_result != cached_result:
                if len(new_result.get("result", {}).get("rows", {})) > 0:
                    force_save = True
                else:
                    if len(cached_result.get('result', {}).get('rows', {})) \
                            == 0:
                        force_save = True

        pr = getToolByName(self, 'portal_repository')
        comment = "query has run - no result changes"
        if force_save:
            self.setSparqlCacheResults(new_result)
            new_sparql_results = []
            rows = new_result.get('result', {}).get('rows', {})
            if rows:
                for row in rows:
                    for val in row:
                        new_sparql_results.append(unicode(val) + " | ")
                new_sparql_results[-1] = new_sparql_results[-1][0:-3]
            new_sparql_results_str = "".join(new_sparql_results) + "\n"
            self.setSparql_results(new_sparql_results_str)
            comment = "query has run - result changed"
        if self.portal_type in pr.getVersionableContentTypes():
            comment = comment.encode('utf')
            try:
                oldSecurityManager = getSecurityManager()
                newSecurityManager(None, SpecialUsers.system)
                pr.save(obj=self, comment=comment)
                setSecurityManager(oldSecurityManager)
            except FileTooLargeToVersionError:
                commands = view.getCommandSet('plone')
                commands.issuePortalMessage(
                    """Changes Saved. Versioning for this file
                       has been disabled because it is too large.""",
                    msgtype="warn")

        if new_result.get('exception', None):
            cached_result['exception'] = new_result['exception']
            self.setSparqlCacheResults(cached_result)
Пример #4
0
    def invalidateWorkingResult(self):
        """ invalidate working results"""
        self.cached_result = {}
        self.setSparql_results("")
        pr = getToolByName(self, 'portal_repository')
        comment = "Invalidated last working result"
        comment = comment.encode('utf')
        try:
            pr.save(obj=self, comment=comment)
        except FileTooLargeToVersionError:
            commands = view.getCommandSet('plone')
            commands.issuePortalMessage(
                """Changes Saved. Versioning for this file
                   has been disabled because it is too large.""",
                msgtype="warn")

        async = getUtility(IAsyncService)

        self.scheduled_at = DateTime.DateTime()
        async.queueJob(async_updateLastWorkingResults,
                       self,
                       scheduled_at=self.scheduled_at,
                       bookmarks_folder_added=False)
Пример #5
0
    def updateLastWorkingResults(self, **arg_values):
        """ update cached last working results of a query (json exhibit)
        """

        cached_result = self.getSparqlCacheResults()
        cooked_query = interpolate_query(self.query, arg_values)

        args = (self.endpoint_url, cooked_query)
        try:
            new_result = run_with_timeout(
                max(getattr(self, 'timeout', 10), 10), query_and_get_result,
                *args)
        except QueryTimeout:
            new_result = {
                'exception': "query has ran - an timeout has"
                " been received"
            }
        force_save = False

        if new_result.get("result", {}) != {}:
            if new_result != cached_result:
                if new_result.get("result", {}).get("rows", {}):
                    force_save = True
                else:
                    if not cached_result.get('result', {}).get('rows', {}):
                        force_save = True

        pr = getToolByName(self, 'portal_repository')
        comment = "query has run - no result changes"

        if force_save:
            self.setSparqlCacheResults(new_result)
            self._updateOtherCachedFormats(self.last_scheduled_at,
                                           self.endpoint_url, cooked_query)

            new_sparql_results = []
            rows = new_result.get('result', {}).get('rows', {})
            if rows:
                for row in rows:
                    for val in row:
                        new_sparql_results.append(unicode(val) + " | ")
                new_sparql_results[-1] = new_sparql_results[-1][0:-3]
            new_sparql_results_str = "".join(new_sparql_results) + "\n"
            self.setSparql_results(new_sparql_results_str)
            comment = "query has run - result changed"
        if self.portal_type in pr.getVersionableContentTypes():
            comment = comment.encode('utf')
            try:
                oldSecurityManager = getSecurityManager()
                newSecurityManager(None, SpecialUsers.system)
                pr.save(obj=self, comment=comment)
                setSecurityManager(oldSecurityManager)
            except FileTooLargeToVersionError:
                commands = view.getCommandSet('plone')
                commands.issuePortalMessage(
                    """Changes Saved. Versioning for this file
                       has been disabled because it is too large.""",
                    msgtype="warn")

        if new_result.get('exception', None):
            cached_result['exception'] = new_result['exception']
            self.setSparqlCacheResults(cached_result)