Ejemplo n.º 1
0
def backup_and_extract_content(sender, instance, **kwargs):
    """Faz backup e extrai conteúdo de um arquivo após sua criação."""
    if bool(os.getenv("ENABLE_SIGNAL_FOR_FILE_TASKS", False)):
        from .tasks import backup_file, content_from_file

        if instance.s3_url is None or instance.content is None:
            pipeline([
                backup_file.message(instance.pk),
                content_from_file.message_with_options(pipe_ignore=True,
                                                       args=(instance.pk, )),
            ]).run()
Ejemplo n.º 2
0
def save_file(url, content_type, object_id, checksum=None):
    file_, created = File.objects.get_or_create(
        url=url,
        content_type=content_type,
        object_id=object_id,
        checksum=checksum,
    )
    if file_.s3_url is None or file_.content is None:
        pipeline([
            backup_file.message(file_.pk),
            content_from_file.message_with_options(pipe_ignore=True,
                                                   args=(file_.pk, )),
        ]).run()
Ejemplo n.º 3
0
def find_something_in_something_and_make_some_improvements():
    pipe = dramatiq.pipeline([
        prepare_big_random_list.message(),
        find_smallest_in_big_array.message(),
        make_smallest_a_bit_bigger.message()
    ]).run()

    return pipe.get_result(block=True, timeout=200000)
Ejemplo n.º 4
0
    def handle(self, *args, **options):
        if options.get("date"):
            # converte para datetime para verificar se o formato está correto
            target_date = datetime.strptime(options.get("date"), "%Y-%m-%d").date()
        else:
            # ontem
            target_date = date.today() - timedelta(days=1)
        pipeline(
            [
                get_city_council_updates.message(target_date.strftime("%Y-%m-%d")),
                distribute_city_council_objects_to_sync.message(),
            ]
        ).run()

        self.stdout.write(
            f"Syncronização com a Câmara iniciada (data alvo: {target_date})."
        )
Ejemplo n.º 5
0
def scrape_contracts(limit=None, date_of_grant_start=None, date_of_grant_end=None):
    offset = 0
    total_records = 0
    default_limit = 1000

    while offset <= total_records:
        real_limit = limit or default_limit
        contracts = get_contracts(
            offset,
            real_limit,
            date_of_grant_start=date_of_grant_start,
            date_of_grant_end=date_of_grant_end,
        )

        if not total_records:
            total_records = limit if limit else contracts["recordsFiltered"]

        for contract in contracts["data"]:
            dramatiq.pipeline(
                [expand_contract.message(contract), update_contract.message()]
            ).run()

        offset += real_limit
Ejemplo n.º 6
0
def test_pipelines_flatten_child_pipelines(stub_broker):
    # Given an actor that adds two numbers together
    @dramatiq.actor
    def add(x, y):
        return x + y

    # When I pipe a message intended for that actor and another pipeline together
    pipe = pipeline([add.message(1, 2), add.message(3) | add.message(4), add.message(5)])

    # Then the inner pipeline should be flattened into the outer pipeline
    assert len(pipe) == 4
    assert pipe.messages[0].args == (1, 2)
    assert pipe.messages[1].args == (3,)
    assert pipe.messages[2].args == (4,)
    assert pipe.messages[3].args == (5,)
Ejemplo n.º 7
0
def run_scheduler():
    scheduler = BlockingScheduler()
    scheduler.add_job(
        pipeline([
            dispatch_taking_screenshots.message(),
            write_games_screenshots_json_report.message(),
        ]).run,
        IntervalTrigger(seconds=settings.TASK_INTERVAL_SCREENSHOT),
        name="dispatch_taking_screenshots",
    )
    scheduler.add_job(
        delete_expired_screenshots_cache.send,
        IntervalTrigger(seconds=settings.TASK_INTERVAL_DELETE_CACHE),
        name="delete_expired_screenshots_cache",
    )
    try:
        logger.info("Start scheduler")
        scheduler.start()
    except KeyboardInterrupt:
        logger.info("Shutdown scheduler")
        scheduler.shutdown()
#!/usr/bin/env python
# vim: set fileencoding=utf-8

import time
import dramatiq

from dramatiq.brokers.redis import RedisBroker

from test_worker_10 import test_worker_A, test_worker_B, test_worker_C, setup_broker_and_backend

setup_broker_and_backend()

for parameter in "XYZ":
    p = dramatiq.pipeline([
        test_worker_A.message(parameter),
        test_worker_B.message(),
        test_worker_C.message(),
        test_worker_A.message(),
        test_worker_B.message(),
        test_worker_C.message()
    ]).run()
    print(p.get_result(block=True, timeout=5000))
#!/usr/bin/env python
# vim: set fileencoding=utf-8

import time
import dramatiq

from dramatiq.brokers.redis import RedisBroker

from test_worker_9 import test_worker_A, test_worker_B, test_worker_C, setup_broker_and_backend

setup_broker_and_backend()

p = dramatiq.pipeline([
    test_worker_A.message("!"),
    test_worker_B.message(),
    test_worker_C.message()
]).run()

print(p.get_result(block=True, timeout=5000))
Ejemplo n.º 10
0
    def run(self, *, delay=None, dispatch_window=None):
        """Run the actors in this group over a dispatch window.

        Parameters:
          delay(int): The minimum amount of time, in milliseconds,
            each message in the group should be delayed by.
        """

        if self.completion_callbacks:
            from dramatiq.middleware.group_callbacks import GROUP_CALLBACK_BARRIER_TTL, GroupCallbacks

            rate_limiter_backend = None
            for middleware in self.broker.middleware:
                if isinstance(middleware, GroupCallbacks):
                    rate_limiter_backend = middleware.rate_limiter_backend
                    break
            else:
                raise RuntimeError(
                    "GroupCallbacks middleware not found! Did you forget "
                    "to set it up? It is required if you want to use "
                    "group callbacks.")

            # Generate a new completion uuid on every run so that if a
            # group is re-run, the barriers are all separate.
            # Re-using a barrier's name is an unsafe operation.
            completion_uuid = str(uuid4())
            completion_barrier = Barrier(rate_limiter_backend,
                                         completion_uuid,
                                         ttl=GROUP_CALLBACK_BARRIER_TTL)
            completion_barrier.create(len(self.children))

            children = []
            for child in self.children:
                if isinstance(child, group):
                    raise NotImplementedError

                elif isinstance(child, pipeline):
                    pipeline_children = child.messages[:]
                    pipeline_children[-1] = pipeline_children[-1].copy(
                        options={
                            "group_completion_uuid":
                            completion_uuid,
                            "group_completion_callbacks":
                            self.completion_callbacks,
                        })

                    children.append(
                        pipeline(pipeline_children, broker=child.broker))

                else:
                    children.append(
                        child.copy(
                            options={
                                "group_completion_uuid":
                                completion_uuid,
                                "group_completion_callbacks":
                                self.completion_callbacks,
                            }))
        else:
            children = self.children

        for child in children:
            if isinstance(child, (group, pipeline)):
                child.run(delay=delay)
            else:
                delay = random.randint(
                    0, dispatch_window) if dispatch_window else delay
                self.broker.enqueue(child, delay=delay)

        return self