def run(self, name, *_, **kw): # Positional args. args = kw.get('args') or () if isinstance(args, string_t): args = json.loads(args) # Keyword args. kwargs = kw.get('kwargs') or {} if isinstance(kwargs, string_t): kwargs = json.loads(kwargs) # Expires can be int/float. expires = kw.get('expires') or None try: expires = float(expires) except (TypeError, ValueError): # or a string describing an ISO 8601 datetime. try: expires = maybe_iso8601(expires) except (TypeError, ValueError): raise res = self.app.send_task(name, args=args, kwargs=kwargs, countdown=kw.get('countdown'), serializer=kw.get('serializer'), queue=kw.get('queue'), exchange=kw.get('exchange'), routing_key=kw.get('routing_key'), eta=maybe_iso8601(kw.get('eta')), expires=expires) self.out(res.id)
def _send_task(self, name, args=None, kwargs=None, countdown=None, serializer=None, queue=None, exchange=None, routing_key=None, eta=None, expires=None): # arguments args = loads(args) if isinstance(args, string_t) else args kwargs = loads(kwargs) if isinstance(kwargs, string_t) else kwargs # Expires can be int/float. try: expires = float(expires) except (TypeError, ValueError): # or a string describing an ISO 8601 datetime. try: expires = maybe_iso8601(expires) except (TypeError, ValueError): raise # send the task and print the id. self.out(self.app.send_task( name, args=args or (), kwargs=kwargs or {}, countdown=countdown, serializer=serializer, queue=queue, exchange=exchange, routing_key=routing_key, eta=maybe_iso8601(eta), expires=expires, ).id)
def _clean_json(self, field): value = self.cleaned_data[field] try: loads(value) except ValueError as exc: raise forms.ValidationError( _('Unable to parse JSON: %s') % exc, ) return value
def restore_by_tag(self, tag, client=None, leftmost=False): with self.channel.conn_or_acquire(client) as client: p, _, _ = self._remove_from_indices( tag, client.pipeline().hget(self.unacked_key, tag)).execute() if p: M, EX, RK = loads(bytes_to_str(p)) # json is unicode self.channel._do_restore_message(M, EX, RK, client, leftmost)
def test_is_JSON_serializable(self): s = self.MockTask.subtask( (2, ), {'cache': True}, {'routing_key': 'CPU-bound'}, ) s.args = list(s.args) # tuples are not preserved # but this doesn't matter. self.assertEqual(s, self.subtask(json.loads(json.dumps(s))))
def _get(self, queue, timeout=None): """Get the first available message from the queue. Before it does so it acquires a lock on the store so only one node reads at the same time. This is for read consistency Arguments: queue (str): The name of the queue. timeout (int): Optional seconds to wait for a response. """ with self._queue_lock(queue): key = self._key_prefix(queue) logger.debug('Fetching key %s with index %s', key, self.index) try: result = self.client.read( key=key, recursive=True, index=self.index, timeout=self.timeout) if result is None: raise Empty() item = result._children[-1] logger.debug('Removing key {0}'.format(item['key'])) msg_content = loads(item['value']) self.client.delete(key=item['key']) return msg_content except (TypeError, IndexError, etcd.EtcdError) as error: logger.debug('_get failed: {0}:{1}'.format(type(error), error)) raise Empty()
def _get(self, queue): with self.conn_or_acquire() as client: for pri in PRIORITY_STEPS: item = client.rpop(self._q_for_pri(queue, pri)) if item: return loads(bytes_to_str(item)) raise Empty()
def _message_to_python(self, message, queue_name, queue): try: body = base64.b64decode(message['Body'].encode()) except TypeError: body = message['Body'].encode() payload = loads(bytes_to_str(body)) if queue_name in self._noack_queues: queue = self._new_queue(queue_name) self.asynsqs.delete_message(queue, message['ReceiptHandle']) else: try: properties = payload['properties'] delivery_info = payload['properties']['delivery_info'] except KeyError: # json message not sent by kombu? delivery_info = {} properties = {'delivery_info': delivery_info} payload.update({ 'body': bytes_to_str(body), 'properties': properties, }) # set delivery tag to SQS receipt handle delivery_info.update({ 'sqs_message': message, 'sqs_queue': queue, }) properties['delivery_tag'] = message['ReceiptHandle'] return payload
def _get(self, queue, timeout=None): """Get the first available message from the queue. Before it does so it acquires a lock on the Key/Value store so only one node reads at the same time. This is for read consistency """ with self._queue_lock(queue, raising=Empty): key = '{0}/msg/'.format(self._key_prefix(queue)) logger.debug('Fetching key %s with index %s', key, self.index) self.index, data = self.client.kv.get( key=key, recurse=True, index=self.index, wait=self.timeout, ) try: if data is None: raise Empty() logger.debug('Removing key %s with modifyindex %s', data[0]['Key'], data[0]['ModifyIndex']) self.client.kv.delete(key=data[0]['Key'], cas=data[0]['ModifyIndex']) return loads(data[0]['Value']) except TypeError: pass raise Empty()
def _get(self, queue): result = self._query(queue, limit=1) if not result: raise Empty() item = result.rows[0].value self.client.delete(item) return loads(bytes_to_str(item['payload']))
def test_prepare(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, headers={}) self.assertDictEqual(message, json.loads(m)) self.assertEqual(ctype, 'application/json') self.assertEqual(cencoding, 'utf-8')
def _get(self, queue): queue = self._get_queue(queue) msg = queue.get() if msg is None: raise Empty() return loads(bytes_to_str(msg))
def test_prepare(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') m, ctype, cencoding = p._prepare(message, headers={}) assert json.loads(m) == message assert ctype == 'application/json' assert cencoding == 'utf-8'
def run_tasks(self, request, queryset): self.celery_app.loader.import_default_modules() tasks = [(self.celery_app.tasks.get(task.task), loads(task.args), loads(task.kwargs)) for task in queryset] task_ids = [task.delay(*args, **kwargs) for task, args, kwargs in tasks] tasks_run = len(task_ids) self.message_user( request, _('{0} task{1} {2} successfully run').format( tasks_run, pluralize(tasks_run), pluralize(tasks_run, _('was,were')), ), )
def _message_to_python(self, message, queue_name, queue): payload = loads(bytes_to_str(message.get_body())) if queue_name in self._noack_queues: queue.delete_message(message) else: payload['properties']['delivery_info'].update({ 'sqs_message': message, 'sqs_queue': queue, }) return payload
def _get(self, queue, timeout=None): """Try to retrieve a single message off ``queue``.""" message = self.queue_service.receive_queue_message( self.entity_name(queue), timeout=timeout, peek_lock=False) if message.body is None: raise Empty() return loads(bytes_to_str(message.body))
def _restore(self, message, leftmost=False): tag = message.delivery_tag with self.conn_or_acquire() as client: P, _ = client.pipeline() \ .hget(self.unacked_key, tag) \ .hdel(self.unacked_key, tag) \ .execute() if P: M, EX, RK = loads(bytes_to_str(P)) # json is unicode self._do_restore_message(M, EX, RK, client, leftmost)
def __init__(self, model, app=None): """Initialize the model entry.""" self.app = app or current_app._get_current_object() self.name = model.name self.task = model.task try: self.schedule = model.schedule except model.DoesNotExist: logger.error( 'Disabling schedule %s that was removed from database', self.name, ) self._disable(model) try: self.args = loads(model.args or '[]') self.kwargs = loads(model.kwargs or '{}') except ValueError as exc: logger.exception( 'Removing schedule %s for argument deseralization error: %r', self.name, exc, ) self._disable(model) self.options = {} for option in ['queue', 'exchange', 'routing_key', 'expires', 'priority']: value = getattr(model, option) if value is None: continue self.options[option] = value self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() last_run_at = model.last_run_at if getattr(settings, 'DJANGO_CELERY_BEAT_TZ_AWARE', True): last_run_at = make_aware(last_run_at) self.last_run_at = last_run_at
def _get(self, queue): result = self._query(queue, limit=1) if not result: raise Empty() try: item = result[0]['value'] except LookupError: raise Empty() self.client.delete(item['_id']) return loads(bytes_to_str(item['payload']))
def _message_to_python(self, message, queue_name, queue): payload = loads(bytes_to_str(message.get_body())) if queue_name in self._noack_queues: queue.delete_message(message) else: payload['properties']['delivery_info'].update({ 'sqs_message': message, 'sqs_queue': queue, }) # set delivery tag to SQS receipt handle payload['properties']['delivery_tag'] = message.receipt_handle return payload
def _get(self, queue, timeout=None): """Try to retrieve a single message off ``queue``.""" message = self.queue_service.receive_queue_message( self.entity_name(queue), timeout=timeout or self.wait_time_seconds, peek_lock=self.peek_lock) if message.body is None: raise Empty() return loads(bytes_to_str(message.body))
def _approve_authorize_request(cls, response): # Check this is the kind of response we expect assert response.headers["Content-Type"] == Any.string.matching("^text/html") assert response.text == Any.string.containing( "requesting access to your Hypothesis account" ) result = response.form.submit() js_settings = result.html.find("script", class_="js-hypothesis-settings") return json.loads(js_settings.string)
def run_tasks(self, request, queryset): self.celery_app.loader.import_default_modules() tasks = [(self.celery_app.tasks.get(periodic_task.task), loads(periodic_task.args), loads(periodic_task.kwargs)) for periodic_task in queryset] task_ids = [ task.delay(*args, **kwargs) for task, args, kwargs in tasks if task and task.delay ] tasks_run = len(task_ids) self.message_user( request, _('{0} task{1} {2} successfully run').format( tasks_run, pluralize(tasks_run), pluralize(tasks_run, _('was,were')), ), )
def __init__(self, model, app=None): """Initialize the model entry.""" self.app = app or current_app._get_current_object() self.name = model.name self.task = model.task try: self.schedule = model.schedule except model.DoesNotExist: logger.error( 'Disabling schedule %s that was removed from database', self.name, ) self._disable(model) try: self.args = loads(model.args or '[]') self.kwargs = loads(model.kwargs or '{}') except ValueError as exc: logger.exception( 'Removing schedule %s for argument deseralization error: %r', self.name, exc, ) self._disable(model) self.options = {} for option in [ 'queue', 'exchange', 'routing_key', 'expires', 'priority' ]: value = getattr(model, option) if value is None: continue self.options[option] = value self.options['headers'] = loads(model.headers or '{}') self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() self.last_run_at = model.last_run_at
def callback(self, message): self.log_debug("store event") event_dict = json.loads(message.data) try: self.event_store.store(event_dict) except Exception: logger.exception("Could add event to store %s", self.event_store.name) message.nack() else: message.ack() self.inc_counter("stored_events", event_dict['_zentral']['type'])
def callback(self, message): self.log_debug("store event") event_dict = json.loads(message.data) try: self.event_store.store(event_dict) except Exception: logger.exception("Could add event to store %s", self.event_store.name) message.nack() else: message.ack() if self.prometheus_setup_done: self.stored_events_counter.labels(event_dict['_zentral']['type']).inc()
def test_prepare_compression(self): message = {'the quick brown fox': 'jumps over the lazy dog'} channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') headers = {} m, ctype, cencoding = p._prepare(message, compression='zlib', headers=headers) assert ctype == 'application/json' assert cencoding == 'utf-8' assert headers['compression'] == 'application/x-gzip' import zlib assert json.loads(zlib.decompress(m).decode('utf-8')) == message
def _restore(self, message, leftmost=False): if not self.ack_emulation: return super(Channel, self)._restore(message) tag = message.delivery_tag with self.conn_or_acquire() as client: with client.pipeline() as pipe: P, _ = pipe.hget(self.unacked_key, tag) \ .hdel(self.unacked_key, tag) \ .execute() if P: M, EX, RK = loads(bytes_to_str(P)) # json is unicode self._do_restore_message(M, EX, RK, client, leftmost)
def _send_task(self, name, args=None, kwargs=None, countdown=None, serializer=None, queue=None, exchange=None, routing_key=None, eta=None, expires=None, **_): # arguments args = loads(args) if isinstance(args, string_t) else args kwargs = loads(kwargs) if isinstance(kwargs, string_t) else kwargs # Expires can be int/float. try: expires = float(expires) except (TypeError, ValueError): # or a string describing an ISO 8601 datetime. try: expires = maybe_iso8601(expires) except (TypeError, ValueError): raise # send the task and print the id. self.out( self.app.send_task( name, args=args or (), kwargs=kwargs or {}, countdown=countdown, serializer=serializer, queue=queue, exchange=exchange, routing_key=routing_key, eta=maybe_iso8601(eta), expires=expires, ).id)
def _get(self, queue): """Try to retrieve a single message off ``queue``.""" q = self._new_queue(queue) rs = q.pop(1) if rs["items"]: m = rs["items"][0] payload = loads(bytes_to_str(m["body"])) if queue in self._noack_queues: q.message(m["id"]).delete() else: payload["properties"]["delivery_info"].update({"slmq_message_id": m["id"], "slmq_queue_name": q.name}) return payload raise Empty()
def _get_one_delivery_tag(self, n='test_uniq_tag'): with self.create_connection() as conn1: chan = conn1.default_channel chan.exchange_declare(n) chan.queue_declare(n) chan.queue_bind(n, n, n) msg = chan.prepare_message('quick brown fox') chan.basic_publish(msg, n, n) q, payload = chan.client.brpop([n]) self.assertEqual(q, n) self.assertTrue(payload) pymsg = chan.message_to_python(loads(payload)) return pymsg.delivery_tag
def fernet_loads(encoded_message): statsd.incr('basket.news.celery.fernet_loads') if FERNET: try: encoded_message = FERNET.decrypt(force_bytes(encoded_message)) except InvalidToken: statsd.incr('basket.news.celery.fernet_loads.unencrypted') else: statsd.incr('basket.news.celery.fernet_loads.encrypted') else: statsd.incr('basket.news.celery.fernet_loads.unencrypted') return json.loads(encoded_message)
def _parse_job(self, job): item, dest = None, None if job: try: item = loads(bytes_to_str(job.body)) dest = job.stats()['tube'] except Exception: job.bury() else: job.delete() else: raise Empty() return item, dest
def __init__(self, model, app=None): """Initialize the model entry.""" self.app = app or current_app._get_current_object() self.name = model.name self.task = model.task try: self.schedule = model.schedule except model.DoesNotExist: logger.error( 'Disabling schedule %s that was removed from database', self.name, ) self._disable(model) try: self.args = loads(model.args or '[]') self.kwargs = loads(model.kwargs or '{}') except ValueError as exc: logger.exception( 'Removing schedule %s for argument deseralization error: %r', self.name, exc, ) self._disable(model) self.options = { 'queue': model.queue, 'exchange': model.exchange, 'routing_key': model.routing_key, 'expires': model.expires, } self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() last_run_at = model.last_run_at if settings.DJANGO_CELERY_BEAT_TZ_AWARE: last_run_at = make_aware(last_run_at) self.last_run_at = last_run_at
def __init__(self, model, app=None): self.app = app or current_app._get_current_object() self.name = model.name self.task = model.task try: self.schedule = model.schedule except model.DoesNotExist: logger.error( 'Disabling schedule %s that was removed from database', self.name, ) self._disable(model) try: self.args = loads(model.args or '[]') self.kwargs = loads(model.kwargs or '{}') except ValueError as exc: logger.exception( 'Removing schedule %s for argument deseralization error: %r', self.name, exc, ) self._disable(model) self.options = { 'queue': model.queue, 'exchange': model.exchange, 'routing_key': model.routing_key, 'expires': model.expires, } self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() orig = self.last_run_at = model.last_run_at if not is_naive(self.last_run_at): self.last_run_at = self.last_run_at.replace(tzinfo=None) assert orig.hour == self.last_run_at.hour # timezone sanity
def __init__(self, model, app=None): """Initialize the model entry.""" self.app = app or current_app._get_current_object() self.name = "{}_{}".format(model.name, model.pk) self.task = model.task try: # Nautobot scheduled jobs pass args/kwargs as constructed objects, # but Celery built-in jobs such as celery.backend_cleanup pass them as JSON to be parsed self.args = model.args if isinstance( model.args, (tuple, list)) else loads(model.args or "[]") self.kwargs = model.kwargs if isinstance( model.kwargs, dict) else loads(model.kwargs or "{}") except (TypeError, ValueError) as exc: logger.exception( "Removing schedule %s for argument deserialization error: %s", self.name, exc) self._disable(model) try: self.schedule = model.schedule except model.DoesNotExist: logger.error( "Disabling schedule %s that was removed from database", self.name, ) self._disable(model) self.options = {} if model.queue: self.options["queue"] = model.queue self.options["headers"] = {} self.total_run_count = model.total_run_count self.model = model if not model.last_run_at: model.last_run_at = self._default_now() self.last_run_at = model.last_run_at
def run_tasks(self, request, queryset): self.celery_app.loader.import_default_modules() tasks = [( self.celery_app.tasks.get(task.task), loads(task.args), loads(task.kwargs), task.queue, ) for task in queryset] if any(t[0] is None for t in tasks): for i, t in enumerate(tasks): if t[0] is None: break # variable "i" will be set because list "tasks" is not empty not_found_task_name = queryset[i].task self.message_user( request, _('task "{0}" not found'.format(not_found_task_name)), level=messages.ERROR, ) return task_ids = [ task.apply_async(args=args, kwargs=kwargs, queue=queue) if queue and len(queue) else task.apply_async(args=args, kwargs=kwargs) for task, args, kwargs, queue in tasks ] tasks_run = len(task_ids) self.message_user( request, _("{0} task{1} {2} successfully run").format( tasks_run, pluralize(tasks_run), pluralize(tasks_run, _("was,were")), ), )
def _get(self, queue): """Try to retrieve a single message off ``queue``.""" q = self._new_queue(queue) rs = q.pop(1) if rs['items']: m = rs['items'][0] payload = loads(bytes_to_str(m['body'])) if queue in self._noack_queues: q.message(m['id']).delete() else: payload['properties']['delivery_info'].update({ 'slmq_message_id': m['id'], 'slmq_queue_name': q.name}) return payload raise Empty()
def _get(self, queue, timeout=None): """Try to retrieve a single message off ``queue``.""" q = self._ensure_queue(queue) messages = q.receive_messages(messages_per_page=1, timeout=timeout) try: message = next(messages) except StopIteration: raise Empty() content = loads(message.content) q.delete_message(message=message) return content
def test_datetime(self): now = datetime.utcnow() now_utc = now.replace(tzinfo=pytz.utc) serialized = loads(dumps({ 'datetime': now, 'tz': now_utc, 'date': now.date(), 'time': now.time()}, )) assert serialized == { 'datetime': now, 'tz': now_utc, 'time': now.time().isoformat(), 'date': datetime(now.year, now.month, now.day, 0, 0, 0, 0), }
def toutiao_news_api(url): toutiao_data = requests.get(url).text data = json.loads(toutiao_data) items = data['data'] link_head = 'http://toutiao.com' for n in items: print(n) if 'title' in n and n['tag'] != 'ad': print(n['title']) print(n['tag']) print(n['source']) print(link_head + n['source_url'])
def get_table(self, exchange): filename = '{}.exchange'.format(exchange) filename = os.path.join(self.control_folder,filename) try: f = open(filename,'r') exchange_table = loads(bytes_to_str(f.read())) result = [tuple(q) for q in exchange_table] return result except FileNotFoundError: return [] except OSError: raise ChannelError( f'Cannot open {filename}') finally: f.close()
def test_publish(self): channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') message = {'the quick brown fox': 'jumps over the lazy dog'} ret = p.publish(message, routing_key='process') self.assertIn('prepare_message', channel) self.assertIn('basic_publish', channel) m, exc, rkey = ret self.assertDictEqual(message, json.loads(m['body'])) self.assertDictContainsSubset({'content_type': 'application/json', 'content_encoding': 'utf-8', 'priority': 0}, m) self.assertDictContainsSubset({'delivery_mode': 2}, m['properties']) self.assertEqual(exc, p.exchange.name) self.assertEqual(rkey, 'process')
def _get(self, queue, timeout=None): """Try to retrieve a single message off ``queue``.""" q = self._ensure_queue(queue) messages = self.queue_service.get_messages(q, num_messages=1, timeout=timeout) if not messages: raise Empty() message = messages[0] raw_content = self.queue_service.decode_function(message.content) content = loads(raw_content) self.queue_service.delete_message(q, message.id, message.pop_receipt) return content
def test_publish(self): channel = self.connection.channel() p = Producer(channel, self.exchange, serializer='json') message = {'the quick brown fox': 'jumps over the lazy dog'} ret = p.publish(message, routing_key='process') assert 'prepare_message' in channel assert 'basic_publish' in channel m, exc, rkey = ret assert json.loads(m['body']) == message assert m['content_type'] == 'application/json' assert m['content_encoding'] == 'utf-8' assert m['priority'] == 0 assert m['properties']['delivery_mode'] == 2 assert exc == p.exchange.name assert rkey == 'process'
def run_task(request, task_id): """ get: Runs a task with the given task id """ app.loader.import_default_modules() tasks = PeriodicTask.objects.filter(id=task_id) celery_task = [(app.tasks.get(task.task), loads(task.kwargs)) for task in tasks] if any(task is None for task in tasks): for task in tasks: if task is None: break not_found_name = tasks[0].name return JsonResponse({"message": f"No valid task for {not_found_name}"}) task_ids = [task.apply_async(kwargs=kwargs) for task, kwargs in celery_task] return JsonResponse({"message": "success"})
def callback(self, message): event_dict = json.loads(message.data) try: for event in self.enrich_event(event_dict): new_message = json.dumps(event.serialize(machine_metadata=False)).encode("utf-8") self.publisher_client.publish(self.enriched_events_topic, new_message) if self.prometheus_setup_done: self.produced_events_counter.labels(event.event_type).inc() except Exception as exception: logger.exception("Requeuing message with 1s delay: %s", exception) time.sleep(1) message.nack() else: message.ack() if self.prometheus_setup_done: self.enriched_events_counter.labels(event_dict['_zentral']['type']).inc()
def test_datetime(self): now = datetime.utcnow() now_utc = now.replace(tzinfo=pytz.utc) stripped = datetime(*now.timetuple()[:3]) serialized = loads(dumps({ 'datetime': now, 'tz': now_utc, 'date': now.date(), 'time': now.time()}, )) assert serialized == { 'datetime': now.isoformat(), 'tz': '{0}Z'.format(now_utc.isoformat().split('+', 1)[0]), 'time': now.time().isoformat(), 'date': stripped.isoformat(), }
def test_on_decode_error_callback(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') thrown = [] def on_decode_error(msg, exc): thrown.append((msg.body, exc)) consumer = Consumer(channel, [b1], on_decode_error=on_decode_error) consumer.channel.throw_decode_error = True consumer._receive_callback({'foo': 'bar'}) assert thrown m, exc = thrown[0] assert json.loads(m) == {'foo': 'bar'} assert isinstance(exc, ValueError)
def test_on_decode_error_callback(self): channel = self.connection.channel() b1 = Queue('qname1', self.exchange, 'rkey') thrown = [] def on_decode_error(msg, exc): thrown.append((msg.body, exc)) consumer = Consumer(channel, [b1], on_decode_error=on_decode_error) consumer.channel.throw_decode_error = True consumer._receive_callback({'foo': 'bar'}) self.assertTrue(thrown) m, exc = thrown[0] self.assertEqual(json.loads(m), {'foo': 'bar'}) self.assertIsInstance(exc, ValueError)