Beispiel #1
0
def test_cron_with_message_larger_then_max():
    cj = CronItem(command=''.join(
        random.choice(string.ascii_uppercase + string.digits)
        for _ in range(6000)))
    packets = list(UdpSerializer.dump(cj))
    assert len(packets) > 1
    assert cj == UdpSerializer.load(packets)
Beispiel #2
0
def test_manual_run_is_executed_exactly_once():
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)

    command = "echo 'hello world'"
    cron_job = CronItem(command=command)
    cron_job.assigned_to = get_ip()

    storage = Storage()

    tab = CronTab(tab="""* * * * * command""")
    processor = Processor(12345, storage, cron=tab)

    for packet in UdpSerializer.dump(cron_job):
        processor.queue.put_nowait(packet)

    for packet in UdpSerializer.dump(Run(cron_job)):
        processor.queue.put_nowait(packet)

    loop.run_until_complete(processor.process())

    assert 1 == len(storage.cluster_jobs)

    assert command == storage.cluster_jobs[0].command

    assert 1 == len(storage.cluster_jobs[0].log)
    assert 'exit code: 0' in storage.cluster_jobs[0].log[
        0] and 'hello world' in storage.cluster_jobs[0].log[0]

    assert processor.queue.empty()

    loop.close()
Beispiel #3
0
def test_add_same_job_twice_adds_cron_once():
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)

    command = "echo 'hello world'"
    cron_job = CronItem(command=command)
    cron_job.assigned_to = get_ip()

    storage = Storage()

    tab = CronTab(tab="""* * * * * command""")
    processor = Processor(12345, storage, cron=tab)

    for packet in UdpSerializer.dump(cron_job):
        processor.queue.put_nowait(packet)

    for packet in UdpSerializer.dump(cron_job):
        processor.queue.put_nowait(packet)

    loop.run_until_complete(processor.process())

    assert 1 == len(storage.cluster_jobs)

    assert command == storage.cluster_jobs[0].command

    assert None is not next(tab.find_command(command), None)
    assert 1 == len(list(tab.find_command(command)))

    loop.close()
Beispiel #4
0
 def timed_broadcast():
     """
     periodically broadcast system status and known jobs
     """
     while running:
         broadcast(
             args.udp_communication_port,
             UdpSerializer.dump(Status(get_ip(), get_load()), hash_key))
         for job in storage.cluster_jobs:
             if job.assigned_to == get_ip():
                 job.pid = check_process(job.command)
             for packet in UdpSerializer.dump(job, hash_key):
                 client(args.udp_communication_port, packet)
         time.sleep(args.broadcast_interval)
Beispiel #5
0
def test_store_retrieve_sorts_correctly():
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)

    storage = Storage()
    processor = Processor(12345,
                          storage,
                          cron=CronTab(tab="""* * * * * command"""))

    ip = '127.0.0.1'

    messages = []
    for i in range(10):
        messages.append(Status(ip, 10))

    for message in messages:
        packets = UdpSerializer.dump(message)
        for packet in packets:
            processor.queue.put_nowait(packet)

    while not processor.queue.empty():
        loop.run_until_complete(asyncio.gather(processor.process()))

    assert messages[len(messages) - 1].time == storage.node_state(ip).time

    loop.close()
Beispiel #6
0
def test_store_cron_job_message_to_disk():
    tmp_dir = mkdtemp()
    ser = path.join(tmp_dir, 'cluster_jobs.json')

    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)

    storage = Storage(path_prefix=tmp_dir)

    processor = Processor(12345,
                          storage,
                          cron=CronTab(tab="""* * * * * command"""))

    message = CronItem(command="echo 'hello world'")
    message.append_log("test log message")

    for packet in UdpSerializer.dump(message):
        processor.queue.put_nowait(packet)

    loop.run_until_complete(asyncio.gather(processor.process()))

    loop.run_until_complete(asyncio.gather(storage.save()))

    assert processor.queue.qsize() == 0
    assert len(storage.cluster_jobs) == 1
    assert message == storage.cluster_jobs[0]
    assert exists(ser)

    loop.close()

    shutil.rmtree(tmp_dir)
Beispiel #7
0
 def timed_schedule():
     """
     periodically check if cluster needs re-balancing
     """
     while running:
         time.sleep(23)
         if not scheduler.check_cluster_state():
             logger.info("re-balancing cluster")
             jobs = storage.cluster_jobs.copy()
             for packet in UdpSerializer.dump(
                     ReBalance(timestamp=datetime.now()), hash_key):
                 client(args.udp_communication_port, packet)
             time.sleep(5)
             for job in jobs:
                 for packet in UdpSerializer.dump(job, hash_key):
                     client(args.udp_communication_port, packet)
Beispiel #8
0
    async def import_data(self, request):
        data = await request.post()

        if 'payload' not in data:
            return web.Response(status=500, text='no payload found')

        self.logger.debug("received import request {0}".format(
            data['payload']))

        try:
            imports = json.loads(data['payload'])
            for line in imports:
                if 'pattern' in line and 'command' in line and 'enabled' in line:
                    cron_item = CronItem(command=line['command'])
                    cron_item.set_all(line['pattern'])
                    cron_item.enable(line['enabled'])
                    self.logger.debug(
                        "received new job from import {0}, broadcasting it.".
                        format(cron_item))
                    broadcast(self.udp_port,
                              UdpSerializer.dump(cron_item, self.hash_key))
                else:
                    self.logger.error(
                        "import element invalid: {0}".format(line))
            return web.HTTPOk()
        except ValueError as e:
            self.logger.error(e)
            return web.HTTPClientError(text='invalid json received')
Beispiel #9
0
    async def add_job(self, request):
        data = await request.post()

        self.logger.debug("received add request {0}".format(data))

        if 'command' not in data or \
                'minute' not in data or \
                'hour' not in data or \
                'dom' not in data or \
                'month' not in data or \
                'dow' not in data:
            return web.Response(
                status=500, text='not all mandatory fields submitted via form')

        cron_item = self.generate_cron_item(data)

        if 'disabled' in data:
            cron_item.enable(False)

        if cron_item in self.storage.cluster_jobs:
            raise web.HTTPConflict(text='job already exists')

        self.logger.debug("broadcasting add result")

        broadcast(self.udp_port, UdpSerializer.dump(cron_item, self.hash_key))

        raise web.HTTPCreated()
Beispiel #10
0
    async def toggle_job(self, request):
        data = await request.post()

        self.logger.debug("received toggle request {0}".format(data))

        if 'command' not in data or \
                'minute' not in data or \
                'hour' not in data or \
                'dom' not in data or \
                'month' not in data or \
                'dow' not in data:
            return web.Response(status=500,
                                text='not all mandatory fields submitted')

        cron_item = self.generate_cron_item(data)

        if cron_item not in self.storage.cluster_jobs:
            raise web.HTTPConflict(text='job not found on cluster')

        self.logger.debug("broadcasting run result")

        broadcast(self.udp_port,
                  UdpSerializer.dump(Toggle(cron_item), self.hash_key))

        raise web.HTTPAccepted()
Beispiel #11
0
    async def re_balance(self, request):
        self.logger.debug("rebalance request received")

        self.scheduler.re_balance()

        jobs = self.storage.cluster_jobs.copy()

        broadcast(
            self.udp_port,
            UdpSerializer.dump(ReBalance(timestamp=datetime.now()),
                               self.hash_key))

        time.sleep(5)
        for job in jobs:
            broadcast(self.udp_port, UdpSerializer.dump(job, self.hash_key))

        raise web.HTTPAccepted()
Beispiel #12
0
 async def run(self, run, uuid):
     self.logger.debug("got full run in buffer {0}".format(run.job))
     job = next(iter([j for j in self.storage.cluster_jobs if j == run.job]), None)
     if job and job.assigned_to == get_ip():
         self.logger.info("am owner for job {0}".format(job))
         run.timestamp = datetime.now()
         process = subprocess.Popen(run.job.command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, shell=True)
         self.logger.info("{0} has been defined, going to execute".format(job.command))
         std_out, std_err = process.communicate()
         exit_code = process.wait()
         if std_err:
             self.logger.warning("error during execution of {0}: {1}".format(run.job.command, std_err))
         self.logger.info("output of {0} with code {1}: {2}".format(job.command, exit_code, std_out))
         job.append_log("{0:%b %d %H:%M:%S} localhost CRON[{1}] exit code: {2}, out: {3}, err: {4}".format(datetime.now(), process.pid, exit_code, std_out, std_err))
         broadcast(self.udp_port, UdpSerializer.dump(job, self.hash_key))
         self.clean_buffer(uuid)
Beispiel #13
0
 async def process(self):
     """
     processor for our queue
     """
     data = await self.queue.get()
     logging.debug("got {0} on processor queue".format(data))
     packet = Packet.decode(data)
     if packet:
         self._buffer.append(packet)
         packet_groups = group(self._buffer)
         for uuid in packet_groups.keys():
             self.logger.debug("identifying packet group for {0}".format(uuid))
             obj = UdpSerializer.load(packet_groups[uuid], self.hash_key)
             if obj:
                 self.logger.debug("got object {0} from {1}".format(obj, uuid))
                 if isinstance(obj, Status):
                     self.update_status(obj)
                     self.clean_buffer(uuid)
                 elif isinstance(obj, ReBalance):
                     self.logger.info("re-balance received")
                     self.storage.cluster_jobs.clear()
                     self.cron.remove_all()
                     self.cron.write()
                     self._buffer.clear()
                 elif isinstance(obj, CronItem):
                     if obj.remove:
                         self.remove_job(obj)
                     else:
                         self.add_job(obj)
                     self.clean_buffer(uuid)
                 elif isinstance(obj, Run):
                     await self.run(obj, uuid)
                 elif isinstance(obj, Kill):
                     self.kill(obj)
                     self.clean_buffer(uuid)
                 elif isinstance(obj, Toggle):
                     self.toggle_job(obj)
                     self.clean_buffer(uuid)
     self.storage.prune()
     self.queue.task_done()
     if not self.queue.empty():
         await self.process()
Beispiel #14
0
def test_store_status_message():
    loop = asyncio.new_event_loop()
    asyncio.set_event_loop(loop)

    storage = Storage()
    processor = Processor(12345,
                          storage,
                          cron=CronTab(tab="""* * * * * command"""))

    ip = '127.0.0.1'

    message = Status(ip, 10)

    packets = UdpSerializer.dump(message)
    for packet in packets:
        processor.queue.put_nowait(packet)

    loop.run_until_complete(asyncio.gather(*[processor.process()]))

    assert processor.queue.qsize() == 0
    assert message == storage.node_state(ip)

    loop.close()
Beispiel #15
0
def test_cron_job_message_dumps_loads():
    cj = CronItem(command="echo 'hello world'")
    packets = list(UdpSerializer.dump(cj))
    assert cj == UdpSerializer.load(packets)
Beispiel #16
0
def test_status_message_dumps_loads():
    sm = Status('127.0.0.1', 0)
    packets = list(UdpSerializer.dump(sm))
    assert len(packets) == 1
    assert sm == UdpSerializer.load(packets)