def handleQueue(self, action): data, errors = self.extractData() if errors: return False taskqueue.add(data.get('url')) plone_utils = getToolByName(self.context, 'plone_utils') plone_utils.addPortalMessage("Queued a new request")
def trigger_mopage_refresh(obj, event): event_pages = filter(None, map(lambda parent: IEventPage(parent, None), aq_chain(obj))) if not event_pages: # We are not within an event page. # We only trigger when publishing an event page # or a child of an event page. return triggers = filter(None, map(lambda parent: IPublisherMopageTrigger(parent, None), aq_chain(obj))) if not triggers or not triggers[0].is_enabled(): return for events in event_pages: IMopageModificationDate(events).touch() from collective.taskqueue import taskqueue trigger_url = triggers[0].build_trigger_url() callback_path = '/'.join(getSite().getPhysicalPath() + ('taskqueue_events_trigger_mopage_refresh',)) taskqueue.add(callback_path, params={'target': trigger_url})
def testCommitToQueue(self): taskqueue.add("/", queue=self.queue) self.assertEqual(len(self.task_queue), 0) transaction.commit() self.assertEqual(len(self.task_queue), 1) taskqueue.add("/Plone", queue=self.queue) self.assertEqual(len(self.task_queue), 1) transaction.commit() self.assertEqual(len(self.task_queue), 2)
def handleQueue(self, action): data, errors = self.extractData() if errors: return False path = "/".join(self.context.getPhysicalPath()) for i in range(data["amount"]): taskqueue.add("/{0:s}/send-email-view".format(path), method="POST") plone_utils = getToolByName(self.context, "plone_utils") plone_utils.addPortalMessage("Queued {0:d} new email(s)".format(data["amount"]))
def handleQueue(self, action): data, errors = self.extractData() if errors: return False path = '/'.join(self.context.getPhysicalPath()) for i in range(data['amount']): taskqueue.add('/{0:s}/send-email-view'.format(path), method='POST') plone_utils = getToolByName(self.context, 'plone_utils') plone_utils.addPortalMessage( "Queued {0:d} new email(s)".format(data['amount']))
def testTaskId(self): self.assertEqual(len(self.task_queue), 0) a = taskqueue.add("/", queue=self.queue) b = taskqueue.add("/Plone", queue=self.queue) transaction.commit() self.assertEqual(len(self.task_queue), 2) handler = InstalledHandler("collective.taskqueue") runAsyncTest(self._testConsumeFromQueue) messages = [record.getMessage() for record in handler.records] self.assertEqual(messages[-2:], [a, b])
def handleQueue(self, action): data, errors = self.extractData() if errors: return False _authenticator = self.request.form.get("_authenticator") if not _authenticator: taskqueue.add(data.get("url")) else: taskqueue.add(data.get("url"), params={"_authenticator": _authenticator}) plone_utils = getToolByName(self.context, "plone_utils") plone_utils.addPortalMessage("Queued a new request")
def handleQueue(self, action): data, errors = self.extractData() if errors: return False _authenticator = self.request.form.get('_authenticator') if not _authenticator: taskqueue.add(data.get('url')) else: taskqueue.add(data.get('url'), params={'_authenticator': _authenticator}) plone_utils = getToolByName(self.context, 'plone_utils') plone_utils.addPortalMessage("Queued a new request")
def testConsumeFromQueue(self): self.assertEqual(len(self.task_queue), 0) taskqueue.add("/", queue=self.queue) taskqueue.add("/Plone", queue=self.queue) transaction.commit() self.assertEqual(len(self.task_queue), 2) handler = InstalledHandler("collective.taskqueue") runAsyncTest(self._testConsumeFromQueue) messages = [record.getMessage() for record in handler.records] self.assertEqual(messages[-2:], ["http://nohost:/", "http://nohost:/Plone"])
def testConsume100FromQueue(self): self.assertEqual(len(self.task_queue), 0) expected_result = [] for i in range(100): taskqueue.add("/{0:02d}".format(i), queue=self.queue) expected_result.append("http://nohost:/{0:02d}".format(i)) transaction.commit() self.assertEqual(len(self.task_queue), 100) handler = InstalledHandler("collective.taskqueue") runAsyncTest(self._testConsumeFromQueue) messages = [record.getMessage() for record in handler.records] self.assertEqual(sorted(messages[-100:]), expected_result)
def start(self, context): """Queues issue for sendout through collective.taskqueue """ jobid = taskqueue.add('/'.join(context.getPhysicalPath() + (VIEW_NAME, )), queue=QUEUE_NAME) return jobid
def render(self): from collective.taskqueue import taskqueue task_id = taskqueue.add( '{}/xmldirector-test'.format(plone.api.portal.get().absolute_url(1))) check_permission(permissions.ModifyPortalContent, self.context) IPersistentLogger(self.context).log('convert') handle = self.context.webdav_handle() zip_tmp = tempfile.mktemp(suffix='.zip') with fs.zipfs.ZipFS(zip_tmp, 'w') as zip_fp: with zip_fp.open('word/index.docx', 'wb') as fp: with handle.open('src/word/index.docx', 'rb') as fp_in: fp.write(fp_in.read()) zip_out = convert_crex(zip_tmp) store_zip(self.context, zip_out, 'current') with close_and_delete(open(zip_out, 'rb')) as fp: self.request.response.setHeader( 'content-length', str(os.path.getsize(zip_out))) self.request.response.setHeader('content-type', 'application/zip') self.request.response.setHeader( 'content-disposition', 'attachment; filename={}.zip'.format(self.context.getId())) self.request.response.write(fp.read())
def queue_sync(self): redirect_url = self.context.absolute_url() QUEUE_LIMIT = 1 QUEUE_VIEW = "sync_person" queue_view_path = self.context.getPhysicalPath() queue_size = len(getUtility(ITaskQueue, name='sync')) queue_view_path_url = "/".join(queue_view_path) queue_view_url = "%s/%s" % (queue_view_path_url, QUEUE_VIEW) print("URL: %s" % (queue_view_url)) print("Queue size: %s" % (queue_size)) messages = IStatusMessage(self.request) if queue_size < QUEUE_LIMIT: sync_id = taskqueue.add(url=queue_view_url, queue="sync") print("Run sync with ID: '%s'" % (sync_id)) messages.add(u"Sync ID '%s' is now triggered." % (sync_id), type=u"info") else: messages.add( u"There is one sync currently running. Try again later.", type=u"warning") raise Redirect(redirect_url)
def start(self, context): """Queues issue for sendout through collective.taskqueue """ jobid = taskqueue.add( '/'.join(context.getPhysicalPath() + (VIEW_NAME, )), queue=QUEUE_NAME ) return jobid
def review_verified_objects(settings, event): """Get the new entries on settings and apply them on verified objects""" new_entries = get_new_entries(event.oldValue, event.newValue) # if there's nothing new, we are done if not new_entries: return new_entries = '\n'.join(new_entries) catalog = api.portal.get_tool('portal_catalog') brains = catalog(object_provides=IStopWordsVerified.__identifier__) if not ASYNC: for brain in brains: verify_brain(brain, new_entries) else: # split the work to review the verified objects for new stop words in # batches batch = 1 amount = 300 count = len(brains) while count > 0: view_path = '/{0}/@@review-objects'.format( api.portal.get().id ) params = { 'start': amount * batch - amount, 'size': amount, 'entries': new_entries, } logger.warn('Queued request {0} {1}'.format(view_path, params)) taskqueue.add( view_path, params=params ) batch += 1 count -= amount
def __call__(self, jobinfo): """Queues job for zipping """ jobid = taskqueue.add( '/'.join(getSite().getPhysicalPath() + (VIEW_NAME, )), method='post', queue=QUEUE_NAME, payload=json.dumps(jobinfo), ) state = IZipState(jobinfo['uid']) state['task'] = 'pending' state['taskqueue_jobid'] = jobid state['queued'] = time.time() return jobid
def enqueue_deferred_extraction(obj, action, filepath, additional_data, attempt=1, token=None, path=None): callback_path = '/'.join(api.portal.get().getPhysicalPath() + ('taskqueue_publisher_extract_object',)) if obj is None and (path is None or token is None): raise ValueError('When obj is None, path and token must be provided.') elif obj is not None: path = '/'.join(obj.getPhysicalPath()) # Set a token on the object so that we can make sure that we extract # this version of the object later in the worker. if token is None: token = str(uuid.uuid4()) IAnnotations(obj)[TOKEN_ANNOTATION_KEY] = token taskqueue.add(callback_path, params={ 'action': action, 'filepath': filepath, 'path': path, 'attempt:int': attempt, 'token': token, 'additional_data': json.dumps(decode_for_json(dict(dict(additional_data))))})
def enqueue_deferred_extraction(obj, action, filepath, additional_data, attempt=1, token=None, path=None): callback_path = '/'.join(api.portal.get().getPhysicalPath() + ('taskqueue_publisher_extract_object', )) if obj is None and (path is None or token is None): raise ValueError('When obj is None, path and token must be provided.') elif obj is not None: path = '/'.join(obj.getPhysicalPath()) # Set a token on the object so that we can make sure that we extract # this version of the object later in the worker. if token is None: token = str(uuid.uuid4()) IAnnotations(obj)[TOKEN_ANNOTATION_KEY] = token taskqueue.add(callback_path, params={ 'action': action, 'filepath': filepath, 'path': path, 'attempt:int': attempt, 'token': token, 'additional_data': json.dumps(decode_for_json(dict(dict(additional_data)))) })
def _render(self): conversion_information = self.get_crex_info() status = conversion_information.get('status') if not status or status in (CREX_STATUS_ERROR, CREX_STATUS_SUCCESS): task_id = taskqueue.add( url=self.context.absolute_url(1) + '/xmldirector-convert', method=self.request.REQUEST_METHOD, headers={'accept': 'application/json', 'content-type': 'application/json'}, payload=self.request.BODY, params=dict(status=u'async', msg=u'Queued') ) data = {'task_id': task_id, 'created': datetime.datetime.utcnow().isoformat(), 'creator': plone.api.user.get_current().getUserName(), 'status': u'spooled'} self.set_crex_info(data) return data else: self.request.response.setStatus(409) # Conflict data = conversion_information.copy() data['msg'] = u'Conversion request could not be spooled' return data
def testAddToQueue(self): taskqueue.add("/", queue=self.queue) self.assertEqual(len(self.task_queue), 0)
def _queue_upload(obj, asset_changed=True): """if descriptions is None, this is an add """ url = '/'.join(obj.getPhysicalPath()) + '/@@soundcloud_uploader' add(url, params={'asset_changed': asset_changed}, payload=None)