def create_new_job(payload, python_main=MAIN, drop_collections=True):
    configure_loguru_stdout("DEBUG")
    connect_to_mongodb(host=config.MONGODB_HOST, db=config.MONGODB_DB)
    logger.info("Connected to mongodb")
    time.sleep(2)

    if drop_collections:
        logger.info("Dropping Job and Worker collections...")
        Job.drop_collection()
        Worker.drop_collection()

    logger.debug("Creating new job...")
    payload = dict(payload)
    payload.update({
        "message_broker": config.TRANSPORT,
    })

    job = Job(
        type="test",
        payload=payload,
        runnable={
            "image": "akadaner/test-worker",
            "python_main": python_main,
        },
        running_timeout=60,
    )
    job.save()
Beispiel #2
0
def test_sample():
    from utils_ak.loguru import configure_loguru_stdout

    configure_loguru_stdout("INFO")
    boiling_plan_df = read_boiling_plan(
        config.abs_path("app/data/inputs/ricotta/sample_boiling_plan.xlsx"))
    print(make_schedule(boiling_plan_df))
Beispiel #3
0
def run():
    configure_loguru_stdout("DEBUG")
    run_listener_async("monitor_out", message_broker=config.TRANSPORT)
    time.sleep(1)

    multiprocessing.Process(target=run_monitor).start()
    time.sleep(3)
    multiprocessing.Process(target=run_worker).start()
def _test_microservice_worker(worker_cls, payload, run_listener=True):
    configure_loguru_stdout("DEBUG")
    if run_listener:
        run_listener_async("monitor_in",
                           message_broker=payload["message_broker"])
    time.sleep(2)
    run_worker(worker_cls, {"worker_id": "worker_id", "payload": payload})
    logger.info("Finished")
Beispiel #5
0
def test():
    configure_loguru_stdout("DEBUG")

    @log_function()
    def f(*args, **kwargs):
        logger.info("Inside the function")
        return 1

    f("a", b=1)
Beispiel #6
0
def run_worker():
    configure_loguru_stdout("DEBUG")
    logger.info("Running SampleWorker instance...")
    worker = SampleWorker(
        "WorkerId",
        {
            "type": "batch",
            "message_broker": config.TRANSPORT
        },
    )
    worker.run()
Beispiel #7
0
def test_kubernetes_controller():
    import anyconfig
    import time
    from utils_ak.loguru import logger, configure_loguru_stdout

    configure_loguru_stdout("DEBUG")

    deployment = anyconfig.load("../../example/deployment.yml")
    ctrl = KubernetesController()
    ctrl.start(deployment)
    time.sleep(10)
    ctrl.log(deployment["id"])
    ctrl.stop(deployment["id"])
def run_job_orchestrator(payload=None):
    configure_loguru_stdout("DEBUG")
    connect_to_mongodb(host=config.MONGODB_HOST, db=config.MONGODB_DB)
    logger.info("Connected to mongodb")
    controller = ProcessController()

    run_listener_async("job_orchestrator", message_broker=config.TRANSPORT)
    logger.info('Running job orchestrator...')
    job_orchestrator = JobOrchestrator(controller, config.TRANSPORT)
    multiprocessing.Process(target=run_monitor).start()
    if payload:
        multiprocessing.Process(target=create_new_job,
                                args=(payload, )).start()
    job_orchestrator.run()
Beispiel #9
0
def test_dummy_push():
    from utils_ak.loguru import configure_loguru_stdout

    configure_loguru_stdout()

    def brute_validator(parent, block):
        for c in parent.children[:-1]:
            validate_disjoint_by_axis(c, block, axis=parent.props["axis"])

    logger.debug("Dummy push test")
    root = ParallelepipedBlock("root", n_dims=1, x=[2], axis=0)
    a = ParallelepipedBlock("a", n_dims=1, size=[4], axis=0)
    b = ParallelepipedBlock("b", n_dims=1, size=[3], axis=0)
    dummy_push(root, a, brute_validator)
    dummy_push(root, b, brute_validator, start_from=0)
    logger.debug("Root", root=root)
def _test_controller(controller_cls):
    configure_loguru_stdout("DEBUG")

    deployment_fn = os.path.join(
        os.path.dirname(os.path.dirname(__file__)),
        "examples/hello_world/deployment.yml",
    )
    deployment = read_deployment(deployment_fn)
    ctrl = controller_cls()
    logger.info("Starting")
    ctrl.start(deployment)
    time.sleep(3)
    logger.info("Logs")
    ctrl.log(deployment["id"])
    logger.info("Stopping")
    ctrl.stop(deployment["id"])
Beispiel #11
0
def test1():
    configure_loguru_stdout("DEBUG")

    boiling_plan_df = read_boiling_plan(
        r"C:\Users\Mi\Desktop\master\code\git\2020.10-umalat\umalat\app\data\inputs\2021-02-19 План по варкам.xlsx"
    )
    boiling_plan_df = boiling_plan_df[boiling_plan_df["boiling"].apply(
        lambda b: b.line.name == LineName.SALT)]

    for _, grp in boiling_plan_df.groupby("group_id"):
        grp["packing_speed"] = grp["sku"].apply(lambda sku: sku.packing_speed)
        print(grp[["sku_name", "kg"]])
        boilings = make_boilings_parallel_dynamic(grp)
        for boiling in boilings:
            print(boiling)
            mp = boiling["melting_and_packing"]
            mp.props.update(x=(0, 0))
def run_pong():
    configure_loguru_stdout()
    Pong(message_broker=(
        "zmq",
        {
            "endpoints": {
                "ping": {
                    "type": "sub",
                    "endpoint": endpoint("localhost", 6554)
                },
                "pong": {
                    "type": "sub",
                    "endpoint": endpoint("localhost", 6555)
                },
            }
        },
    )).run()
def _test_microservice_worker_deployment(
    payload,
    python_main,
    controller,
    run_listener=True,
):
    configure_loguru_stdout("DEBUG")

    deployment = create_deployment("worker",
                                   "<deployment_id>",
                                   payload,
                                   python_main=python_main)

    if run_listener:
        run_listener_async("monitor_in",
                           message_broker=payload["message_broker"])

    controller.stop(deployment["id"])
    controller.start(deployment)
    time.sleep(5)
    controller.stop(deployment["id"])
Beispiel #14
0
def run_ping():
    configure_loguru_stdout("TRACE")
    ping = Ping(message_broker=("kafka", {}), )

    ping.run()
Beispiel #15
0
from utils_ak.zmq import endpoint
from utils_ak.simple_microservice import SimpleMicroservice, run_listener_async
from utils_ak.loguru import configure_loguru_stdout


class Publisher(SimpleMicroservice):
    def __init__(self, *args, **kwargs):
        super().__init__("Publisher", *args, **kwargs)
        self.add_timer(self.timer_function, 2)

    def timer_function(self):
        self.publish("collection", "")


if __name__ == "__main__":
    configure_loguru_stdout()
    run_listener_async(
        "collection",
        message_broker=(
            "zmq",
            {
                "endpoints": {
                    "collection": {
                        "type": "sub",
                        "endpoint": endpoint("localhost", 6554),
                    }
                }
            },
        ),
    )
    Publisher(message_broker=(
Beispiel #16
0
def run_listener(collection, topic="", *args, **kwargs):
    """ Run listener process for current collection. """
    configure_loguru_stdout("DEBUG")
    Listener(collection, topic, *args, **kwargs).run()
Beispiel #17
0
def run_monitor():
    configure_loguru_stdout("DEBUG")
    logger.info("Running monitor...")
    monitor = Monitor(config.TRANSPORT)
    monitor.microservice.run()
Beispiel #18
0
def run_pong():
    configure_loguru_stdout("TRACE")

    Pong(message_broker=("kafka", {}), ).run()