def enqueue(self, obj, *args, **kwargs): """Enqueue a function call or :doc:`job` instance. :param func: Function or :doc:`job <job>`. Must be serializable and importable by :doc:`worker <worker>` processes. :type func: callable | :doc:`kq.Job <job>` :param args: Positional arguments for the function. Ignored if **func** is a :doc:`job <job>` object. :param kwargs: Keyword arguments for the function. Ignored if **func** is a :doc:`job <job>` object. :return: Enqueued job. :rtype: :doc:`kq.Job <job>` """ timestamp = int(time.time() * 1000) if isinstance(obj, Job): job_id = uuid.uuid4().hex if obj.id is None else obj.id func = obj.func args = tuple() if obj.args is None else obj.args kwargs = {} if obj.kwargs is None else obj.kwargs timeout = self._timeout if obj.timeout is None else obj.timeout key = self._key if obj.key is None else obj.key partition = self._part if obj.partition is None else obj.partition assert is_str(job_id), 'Job.id must be a str' assert callable(func), 'Job.func must be a callable' assert is_iter(args), 'Job.args must be a list or tuple' assert is_dict(kwargs), 'Job.kwargs must be a dict' assert is_number(timeout), 'Job.timeout must be an int or float' assert is_none_or_bytes(key), 'Job.key must be a bytes' assert is_none_or_int(partition), 'Job.partition must be an int' else: assert callable(obj), 'first argument must be a callable' job_id = uuid.uuid4().hex func = obj args = args kwargs = kwargs timeout = self._timeout key = self._key partition = self._part job = Job(id=job_id, timestamp=timestamp, topic=self._topic, func=func, args=args, kwargs=kwargs, timeout=timeout, key=key, partition=partition) self._logger.info('Enqueueing {} ...'.format(job)) self._producer.send(self._topic, value=self._serializer(job), key=self._serializer(key) if key else None, partition=partition, timestamp_ms=timestamp) return job
def enqueue_with_key(self, key, obj, *args, **kwargs): """Place the function call (or the job) in the Kafka topic with key. For example: .. code-block:: python import requests from kq import Queue q = Queue() url = 'https://www.google.com' # You can queue the function call with its arguments job = q.enqueue_with_key('my_key', requests.get, url) # Or you can queue a kq.job.Job instance directly q.enqueue_with_key('my_key', job) :param key: The key for the Kafka message. Jobs with the same key are guaranteed to be placed in the same Kafka partition and processed sequentially. If a job object is enqueued, its key is overwritten. :type key: str :param obj: Function or the job object to enqueue. If a function is given, the function *must* be pickle-able. :type obj: callable | kq.job.Job :param args: Arguments for the function. Ignored if a KQ job object is given for the first argument instead. :type args: list :param kwargs: Keyword arguments for the function. Ignored if a KQ job instance is given as the first argument instead. :type kwargs: dict :return: The job that was enqueued :rtype: kq.job.Job """ if isinstance(obj, Job): func = obj.func args = obj.args kwargs = obj.kwargs else: func = obj if not callable(func): raise ValueError('{} is not a callable'.format(func)) job = Job(id=str(uuid.uuid4()), timestamp=int(time.time()), topic=self._topic, func=func, args=args, kwargs=kwargs, timeout=self._timeout, key=key) self._producer.send(self._topic, dill.dumps(job), key=key) self._logger.info('Enqueued: {}'.format(job)) return job
def test_job(): job = Job(1, 2, 3, 4, 5, 6, 7) assert job.id == 1 assert job.timestamp == 2 assert job.topic == 3 assert job.func == 4 assert job.args == 5 assert job.kwargs == 6 assert job.timeout == 7
def enqueue(self, obj, *args, **kwargs): """Place the function call (or the job) in the Kafka topic. For example: .. code-block:: python import requests from kq import Queue q = Queue() # You can queue the function call with its arguments job = q.enqueue(requests.get, 'https://www.google.com') # Or you can queue a kq.job.Job instance directly q.enqueue(job) :param obj: Function or the job object to enqueue. If a function is given, the function *must* be pickle-able. :type obj: callable | kq.job.Job :param args: Arguments for the function. Ignored if a KQ job object is given for the first argument instead. :type args: list :param kwargs: Keyword arguments for the function. Ignored if a KQ job instance is given as the first argument instead. :type kwargs: dict :return: The job that was enqueued :rtype: kq.job.Job """ if isinstance(obj, Job): func = obj.func args = obj.args kwargs = obj.kwargs key = obj.key else: func = obj key = None if not callable(func): raise ValueError('{} is not a callable'.format(func)) job = Job(id=str(uuid.uuid4()), timestamp=int(time.time()), topic=self._topic, func=func, args=args, kwargs=kwargs, timeout=self._timeout, key=key) self._producer.send(self._topic, dill.dumps(job), key=key) self._logger.info('Enqueued: {}'.format(job)) return job
def enqueue(self, obj: Union[Callable[..., Any], Job], *args: Any, **kwargs: Any) -> Job: """Enqueue a function call or :doc:`job` instance. :param obj: Function or :doc:`job <job>`. Must be serializable and importable by :doc:`worker <worker>` processes. :type obj: callable | :doc:`kq.Job <job>` :param args: Positional arguments for the function. Ignored if **func** is a :doc:`job <job>` object. :param kwargs: Keyword arguments for the function. Ignored if **func** is a :doc:`job <job>` object. :return: Enqueued job. :rtype: :doc:`kq.Job <job>` """ timestamp = int(time.time() * 1000) if isinstance(obj, Job): if obj.id is None: job_id = uuid.uuid4().hex else: assert is_str(obj.id), "Job.id must be a str" job_id = obj.id if obj.args is None: args = tuple() else: assert is_seq(obj.args), "Job.args must be a list or tuple" args = tuple(obj.args) assert callable(obj.func), "Job.func must be a callable" func = obj.func kwargs = {} if obj.kwargs is None else obj.kwargs timeout = self._timeout if obj.timeout is None else obj.timeout key = self._key if obj.key is None else obj.key part = self._partition if obj.partition is None else obj.partition assert is_dict(kwargs), "Job.kwargs must be a dict" assert is_number(timeout), "Job.timeout must be an int or float" assert is_none_or_bytes(key), "Job.key must be a bytes" assert is_none_or_int(part), "Job.partition must be an int" else: assert callable(obj), "first argument must be a callable" job_id = uuid.uuid4().hex func = obj args = args kwargs = kwargs timeout = self._timeout key = self._key part = self._partition job = Job( id=job_id, timestamp=timestamp, topic=self._topic, func=func, args=args, kwargs=kwargs, timeout=timeout, key=key, partition=part, ) self._logger.info(f"Enqueueing {job} ...") self._producer.send( self._topic, value=self._serializer(job), key=self._serializer(key) if key else None, partition=part, timestamp_ms=timestamp, ) self._producer.flush() return job
def enqueue(self, obj, *args, **kwargs): """Serialize the function call and place it in the Kafka topic. For example: .. code-block:: python import requests from kq import Queue q = Queue() # You can queue the function call with its arguments job = q.enqueue(requests.get, 'https://www.google.com') # Or you can queue a kq.job.Job instance directly q.enqueue(job) :param obj: Function or the job object to enqueue. If a function is given, the function *must* be pickle-able. :type obj: callable | kq.job.Job :param args: Arguments for the function. Ignored if a KQ job object is given for the first argument instead. :type args: list :param kwargs: Keyword arguments for the function. Ignored if a KQ job instance is given as the first argument instead. :type kwargs: dict :param key: Queue the job with a key. Jobs queued with a specific key are processed in order they were queued. Setting it to None (default) disables this behaviour. :type key: str | unicode :return: The job that was enqueued :rtype: kq.job.Job """ key = None if isinstance(obj, Job): func = obj.func args = obj.args kwargs = obj.kwargs key = obj.key else: func = obj if not callable(func): raise ValueError( '{} is not a callable'.format(func) ) job = Job( id=str(uuid.uuid4()), timestamp=int(time.time()), topic=self._topic, func=func, args=args, kwargs=kwargs, timeout=self._timeout, key=key ) future = self._producer.send(self._topic, dill.dumps(job), key=key) try: future.get(timeout=self._timeout or 5) except KafkaError as e: self._logger.error('Queuing failed: {}', str(e)) return None self._logger.info('Enqueued: {}'.format(job)) return job