def test_socket_open_close(self): for action in ('open', 'close'): action = 'socket_%s' % action meth = getattr(self.relay, action) meth() recv = json.loads(self._pull.recv()) self.assertEqual(recv['data_type'], action)
def _handle_recv_back(self, msg): # let's remove the agent id and track the time it took agent_id = msg[0] msg = msg[1:] # grabbing the data to update the agents statuses if needed data = json.loads(msg[-1]) if 'error' in data: result = data['error'] else: result = data['result'] if result.get('command') in ('_STATUS', 'STOP', 'QUIT'): statuses = result['status'].values() run_id = self.ctrl.update_status(agent_id, statuses) if run_id is not None: # if the tests are finished, publish this on the pubsub. self._publisher.send( json.dumps({ 'data_type': 'run-finished', 'run_id': run_id })) return # other things are pass-through try: self._frontstream.send_multipart(msg) except Exception, e: logger.error('Could not send to front') logger.error(msg) # we don't want to die on error. we just log it exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) logger.error('\n'.join(exc))
def test_socket_open_close(self): for action in ("open", "close"): action = "socket_%s" % action meth = getattr(self.relay, action) meth() recv = json.loads(self._pull.recv()) self.assertEqual(recv["data_type"], action)
def _handle_recv_back(self, msg): # let's remove the agent id and track the time it took agent_id = msg[0] msg = msg[1:] # grabbing the data to update the agents statuses if needed data = json.loads(msg[-1]) if 'error' in data: result = data['error'] else: result = data['result'] if result.get('command') in ('_STATUS', 'STOP', 'QUIT'): statuses = result['status'].values() run_id = self.ctrl.update_status(agent_id, statuses) if run_id is not None: # if the tests are finished, publish this on the pubsub. self._publisher.send(json.dumps({'data_type': 'run-finished', 'run_id': run_id})) return # other things are pass-through try: self._frontstream.send_multipart(msg) except Exception, e: logger.error('Could not send to front') logger.error(msg) # we don't want to die on error. we just log it exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) logger.error('\n'.join(exc))
def test_incr_counter(self): args = "test", (1, 1, 1, 1), "metric" self.relay.incr_counter(*args) wanted = {"test": "test", "loads_status": [1, 1, 1, 1], "agent_id": None} recv = self._pull.recv() self.assertDictContainsSubset(wanted, json.loads(recv))
def _reader(): if decompress: for record, line in read_zfile(filename): yield record, line else: with open(filename, 'rb') as f: for line in f: yield json.loads(line), line
def _handle_recv(self, msg): # publishing all the data received from agents self._publisher.send(msg[0]) # saving the data locally data = json.loads(msg[0]) agent_id = str(data.get('agent_id')) self.ctrl.save_data(agent_id, data)
def get_data(self, run_id, data_type=None, groupby=False, start=None, size=None): key = 'data:%s' % run_id len = self._redis.llen(key) if len == 0: raise StopIteration() if not groupby: if start is None: start = 0 if size is None: end = len else: end = start + size if end > len: end = len for index in range(start, end): data = json.loads(self._redis.lindex(key, index)) if data_type is None or data_type == data.get('data_type'): yield data else: # XXX not sure how to batch this yet if start is not None or size is not None: raise NotImplementedError() bcounters = 'bcounters:%s' % run_id for hash in self._redis.smembers(bcounters): data = json.loads( self._redis.get('bvalue:%s:%s' % (run_id, hash))) filtered = (data_type is not None and data_type != data.get('data_type')) if filtered: continue counter = self._redis.get('bcount:%s:%s' % (run_id, hash)) data['count'] = int(counter) yield data
def test_incr_counter(self): args = 'test', (1, 1, 1, 1), 'metric' self.relay.incr_counter(*args) wanted = { 'test': 'test', 'loads_status': [1, 1, 1, 1], 'agent_id': None } recv = self._pull.recv() self.assertDictContainsSubset(wanted, json.loads(recv))
def _process_result(self, msg): data = json.loads(msg[0]) data_type = data.pop("data_type") # run_id is only used when in distributed mode, which isn't the # case here, so we get rid of it. data.pop("run_id") if hasattr(self.test_result, data_type): method = getattr(self.test_result, data_type) method(**data)
def _process_result(self, msg): data = json.loads(msg[0]) data_type = data.pop('data_type') # run_id is only used when in distributed mode, which isn't the # case here, so we get rid of it. data.pop('run_id') if hasattr(self.test_result, data_type): method = getattr(self.test_result, data_type) method(**data)
def _handle_recv(self, msg): # publishing all the data received from agents self._publisher.send(msg[0]) data = json.loads(msg[0]) agent_id = str(data.get("agent_id")) hostname = data.get("hostname", "?") # telling the controller that the agent is alive self.ctrl.register_agent({"pid": agent_id, "hostname": hostname}) # saving the data locally self.ctrl.save_data(agent_id, data)
def get_data(self, run_id, data_type=None, groupby=False, start=None, size=None): key = 'data:%s' % run_id len = self._redis.llen(key) if len == 0: raise StopIteration() if not groupby: if start is None: start = 0 if size is None: end = len else: end = start + size if end > len: end = len for index in range(start, end): data = json.loads(self._redis.lindex(key, index)) if data_type is None or data_type == data.get('data_type'): yield data else: # XXX not sure how to batch this yet if start is not None or size is not None: raise NotImplementedError() bcounters = 'bcounters:%s' % run_id for hash in self._redis.smembers(bcounters): data = json.loads(self._redis.get('bvalue:%s:%s' % (run_id, hash))) filtered = (data_type is not None and data_type != data.get('data_type')) if filtered: continue counter = self._redis.get('bcount:%s:%s' % (run_id, hash)) data['count'] = int(counter) yield data
def _handle_recv(self, msg): # publishing all the data received from agents self._publisher.send(msg[0]) data = json.loads(msg[0]) agent_id = str(data.get('agent_id')) hostname = data.get('hostname', '?') # telling the controller that the agent is alive self.ctrl.register_agent({'pid': agent_id, 'hostname': hostname}) # saving the data locally self.ctrl.save_data(agent_id, data)
def _handle_recv_back(self, msg): # let's remove the agent id and track the time it took agent_id = msg[0] if len(msg) == 7: client_id = msg[4] else: client_id = None # grabbing the data to update the agents statuses if needed try: data = json.loads(msg[-1]) except ValueError: logger.error("Could not load the received message") logger.error(str(msg)) return if 'error' in data: result = data['error'] else: result = data['result'] command = result.get('command') # results from commands sent by the broker if command in ('_STATUS', 'STOP', 'QUIT'): run_id = self.ctrl.update_status(agent_id, result) if run_id is not None: # if the tests are finished, publish this on the pubsub. self._publisher.send( json.dumps({ 'data_type': 'run-finished', 'run_id': run_id })) return # other things are pass-through (asked by a client) if client_id is None: return try: self._frontstream.send_multipart([client_id, '', msg[-1]]) except Exception, e: logger.error('Could not send to front') logger.error(msg) # we don't want to die on error. we just log it exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) logger.error('\n'.join(exc))
def test_add_error(self): exc = get_tb() __, __, tb = exc string_tb = StringIO() traceback.print_tb(tb, file=string_tb) string_tb.seek(0) self.relay.addError(mock.sentinel.test, exc, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv["loads_status"], str(mock.sentinel.loads_status)) self.assertEqual(recv["test"], str(mock.sentinel.test)) exc_info = ["<type 'exceptions.Exception'>", "Error message", string_tb.read()] self.assertEqual(recv["exc_info"], exc_info)
def _handle_recv_back(self, msg): # let's remove the agent id and track the time it took agent_id = msg[0] if len(msg) == 7: client_id = msg[4] else: client_id = None # grabbing the data to update the agents statuses if needed try: data = json.loads(msg[-1]) except ValueError: logger.error("Could not load the received message") logger.error(str(msg)) return if "error" in data: result = data["error"] else: result = data["result"] command = result.get("command") # results from commands sent by the broker if command in ("_STATUS", "STOP", "QUIT"): run_id = self.ctrl.update_status(agent_id, result) if run_id is not None: # if the tests are finished, publish this on the pubsub. self._publisher.send(json.dumps({"data_type": "run-finished", "run_id": run_id})) return # other things are pass-through (asked by a client) if client_id is None: return try: self._frontstream.send_multipart([client_id, "", msg[-1]]) except Exception, e: logger.error("Could not send to front") logger.error(msg) # we don't want to die on error. we just log it exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) logger.error("\n".join(exc))
def get_errors(self, run_id, start=None, size=None): key = 'errors:%s' % run_id len = self._redis.llen(key) if len == 0: raise StopIteration() if start is None: start = 0 if size is None: end = len else: end = start + size if end > len: end = len for index in range(start, end): yield json.loads(self._redis.lindex(key, index))
def test_run_command(self): msg = ['somedata', '', 'target'] data = {'agents': 1, 'args': {}, 'agent_id': '1'} self.ctrl.run_command('RUN', msg, data) self.ctrl.run_command('AGENT_STATUS', msg, data) runs = self.broker.msgs.values()[0][-1] self.assertEqual(runs['result']['agents'], ['agent1']) msg = {"command": "_STATUS", "args": {}, "agents": 1, "agent_id": "1"} msg = msg.items() msg.sort() self.assertTrue(len(self.broker._backstream.msgs), 1) self.assertTrue(len(self.broker._backstream.msgs[0]), 1) got = self.broker._backstream.msgs[0][-1] got = json.loads(got) got = got.items() got.sort() self.assertEqual(msg, got)
def test_run_command(self): msg = ['somedata', '', 'target'] data = {'agents': 1, 'args': {}, 'agent_id': '1'} self.ctrl.run_command('RUN', msg, data) self.ctrl.run_command('AGENT_STATUS', msg, data) runs = self.broker.msgs.values()[0][-1] self.assertEqual(runs['result']['agents'], ['agent1']) msg = {"command": "STATUS", "args": {}, "agents": 1, "agent_id": "1"} msg = msg.items() msg.sort() self.assertTrue(len(self.broker._backstream.msgs), 1) self.assertTrue(len(self.broker._backstream.msgs[0]), 1) got = self.broker._backstream.msgs[0][3] got = json.loads(got) got = got.items() got.sort() self.assertEqual(msg, got)
def test_add_error(self): exc = get_tb() __, __, tb = exc string_tb = StringIO() traceback.print_tb(tb, file=string_tb) string_tb.seek(0) self.relay.addError(mock.sentinel.test, exc, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test)) exc_info = [ "<type 'exceptions.Exception'>", 'Error message', string_tb.read() ] self.assertEqual(recv['exc_info'], exc_info)
def read_zfile(filename): remaining = '' with open(filename, 'rb') as f: while True: data = remaining + f.read(1024) if not data: raise StopIteration() size = len(data) pos = 0 while pos < size: # grabbing a record rstart = data.find(ZLIB_START, pos) rend = data.find(ZLIB_END, rstart+1) if rend == -1 or rstart == rend: # not a full record break line = data[rstart:rend] if not line: break try: line = zlib.decompress(line) except zlib.error: raise ValueError(line) record = json.loads(line) yield record, line pos = rend + len(ZLIB_END) if pos < size: remaining = data[pos:] else: remaining = ''
def _execute(self, job, timeout=None): if not isinstance(job, Message): job = Message(**job) if timeout is None: timeout = self.timeout_max_overflow with self.lock: send(self.master, job.serialize()) while True: try: socks = dict(self.poller.poll(timeout)) break except zmq.ZMQError as e: if e.errno != errno.EINTR: raise if socks.get(self.master) == zmq.POLLIN: return json.loads(recv(self.master)) raise TimeoutError(timeout)
def _handle_recv_front(self, msg, tentative=0): """front => back All commands starting with CTRL_ are sent to the controller. """ target = msg[0] try: data = json.loads(msg[-1]) except ValueError: exc = "Invalid JSON received." logger.exception(exc) self.send_json(target, {"error": exc}) return cmd = data["command"] # a command handled by the controller if cmd.startswith("CTRL_"): cmd = cmd[len("CTRL_") :] logger.debug("calling %s" % cmd) try: res = self.ctrl.run_command(cmd, msg, data) except Exception, e: logger.debug("Failed") exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) self.send_json(target, {"error": exc}) else: # sending back a synchronous result if needed. if res is not None: logger.debug("sync success %s" % str(res)) self.send_json(target, res) else: logger.debug("async success")
def _handle_recv_front(self, msg, tentative=0): """front => back All commands starting with CTRL_ are sent to the controller. """ target = msg[0] try: data = json.loads(msg[-1]) except ValueError: exc = 'Invalid JSON received.' logger.exception(exc) self.send_json(target, {'error': exc}) return cmd = data['command'] # a command handled by the controller if cmd.startswith('CTRL_'): cmd = cmd[len('CTRL_'):] logger.debug('calling %s' % cmd) try: res = self.ctrl.run_command(cmd, msg, data) except Exception, e: logger.debug('Failed') exc_type, exc_value, exc_traceback = sys.exc_info() exc = traceback.format_tb(exc_traceback) exc.insert(0, str(e)) self.send_json(target, {'error': exc}) else: # sending back a synchronous result if needed. if res is not None: logger.debug('sync success %s' % str(res)) self.send_json(target, res) else: logger.debug('async success')
def _execute(self, job, timeout=None): if not isinstance(job, Message): job = Message(**job) if timeout is None: timeout = self.timeout_max_overflow with self.lock: send(self.master, job.serialize()) while True: try: socks = dict(self.poller.poll(timeout)) break except zmq.ZMQError as e: if e.errno != errno.EINTR: raise if socks.get(self.master) == zmq.POLLIN: data = recv(self.master) return json.loads(data) raise TimeoutError(timeout)
def get_metadata(self, run_id): key = 'metadata:%s' % run_id metadata = self._redis.get(key) if metadata is None: return {} return json.loads(metadata)
def test_add_success(self): self.relay.addSuccess(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test))
def test_stop_test(self): self.relay.stopTest(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv['loads_status'], str(mock.sentinel.loads_status)) self.assertEqual(recv['test'], str(mock.sentinel.test))
def test_stop_testrun(self): self.relay.stopTestRun() recv = json.loads(self._pull.recv()) self.assertEqual(recv['data_type'], 'stopTestRun')
def test_add_hit(self): args = {"foo": "bar", "baz": "foobar"} self.relay.add_hit(**args) recv = self._pull.recv() self.assertDictContainsSubset(args, json.loads(recv))
def test_stop_testrun(self): self.relay.stopTestRun() recv = json.loads(self._pull.recv()) self.assertEqual(recv["data_type"], "stopTestRun")
def load_from_string(cls, data): return cls(**json.loads(data))
def _handle_reg(self, msg): if msg[0] == 'REGISTER': self.ctrl.register_agent(json.loads(msg[1])) elif msg[0] == 'UNREGISTER': self.ctrl.unregister_agent(msg[1], 'asked via UNREGISTER')
def test_add_success(self): self.relay.addSuccess(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv["loads_status"], str(mock.sentinel.loads_status)) self.assertEqual(recv["test"], str(mock.sentinel.test))
def test_add_hit(self): args = {'foo': 'bar', 'baz': 'foobar'} self.relay.add_hit(**args) recv = self._pull.recv() self.assertDictContainsSubset(args, json.loads(recv))
def test_socket_message_received(self): self.relay.socket_message(123) recv = self._pull.recv() self.assertEqual(json.loads(recv)['size'], 123)
def test_brokerdb(self): self.assertEqual(list(self.db.get_data('swwqqsw')), []) self.assertTrue(self.db.ping()) def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() # let's check if we got the data in the file data = [ json.loads(self._redis.lindex('data:1', i)) for i in range(self._redis.llen('data:1')) ] data.sort() data2 = [ json.loads(self._redis.lindex('data:2', i)) for i in range(self._redis.llen('data:2')) ] data2.sort() self.assertEqual(len(data), 14) self.assertEqual(len(data2), 14) counts = self.db.get_counts('1') for type_ in ('addSuccess', 'stopTestRun', 'stopTest', 'startTest', 'startTestRun', 'add_hit'): self.assertEqual(dict(counts)[type_], 2) # we got 12 lines, let's try batching batch = list(self.db.get_data('1', size=2)) self.assertEqual(len(batch), 2) batch = list(self.db.get_data('1', start=2)) self.assertEqual(len(batch), 12) batch = list(self.db.get_data('1', start=2, size=5)) self.assertEqual(len(batch), 5) data3 = list(self.db.get_data('1')) data3.sort() self.assertEqual(data3, data) # filtered data3 = list(self.db.get_data('1', data_type='add_hit')) self.assertEqual(len(data3), 2) # group by res = list(self.db.get_data('1', groupby=True)) self.assertEqual(len(res), 7) self.assertEqual(res[0]['count'], 2) res = list(self.db.get_data('1', data_type='add_hit', groupby=True)) self.assertEqual(res[0]['count'], 2) self.assertTrue('1' in self.db.get_runs()) self.assertTrue('2' in self.db.get_runs()) # len(data) < asked ize batch = list(self.db.get_data('1', start=2, size=5000)) self.assertEqual(len(batch), 12)
def _handle_reg(self, msg): if msg[0] == "REGISTER": self.ctrl.register_agent(json.loads(msg[1])) elif msg[0] == "UNREGISTER": self.ctrl.unregister_agent(msg[1], "asked via UNREGISTER")
def test_brokerdb(self): self.assertEqual(list(self.db.get_data('swwqqsw')), []) self.assertTrue(self.db.ping()) def add_data(): for line in ONE_RUN: data = dict(line) data['run_id'] = '1' self.db.add(data) data['run_id'] = '2' self.db.add(data) self.loop.add_callback(add_data) self.loop.add_callback(add_data) self.loop.add_timeout(time.time() + .5, self.loop.stop) self.loop.start() # let's check if we got the data in the file data = [json.loads(self._redis.lindex('data:1', i)) for i in range(self._redis.llen('data:1'))] data.sort() data2 = [json.loads(self._redis.lindex('data:2', i)) for i in range(self._redis.llen('data:2'))] data2.sort() self.assertEqual(len(data), 14) self.assertEqual(len(data2), 14) counts = self.db.get_counts('1') for type_ in ('addSuccess', 'stopTestRun', 'stopTest', 'startTest', 'startTestRun', 'add_hit'): self.assertEqual(dict(counts)[type_], 2) # we got 12 lines, let's try batching batch = list(self.db.get_data('1', size=2)) self.assertEqual(len(batch), 2) batch = list(self.db.get_data('1', start=2)) self.assertEqual(len(batch), 12) batch = list(self.db.get_data('1', start=2, size=5)) self.assertEqual(len(batch), 5) data3 = list(self.db.get_data('1')) data3.sort() self.assertEqual(data3, data) # filtered data3 = list(self.db.get_data('1', data_type='add_hit')) self.assertEqual(len(data3), 2) # group by res = list(self.db.get_data('1', groupby=True)) self.assertEqual(len(res), 7) self.assertEqual(res[0]['count'], 2) res = list(self.db.get_data('1', data_type='add_hit', groupby=True)) self.assertEqual(res[0]['count'], 2) self.assertTrue('1' in self.db.get_runs()) self.assertTrue('2' in self.db.get_runs()) # len(data) < asked ize batch = list(self.db.get_data('1', start=2, size=5000)) self.assertEqual(len(batch), 12)
def test_socket_message_received(self): self.relay.socket_message(123) recv = self._pull.recv() self.assertEqual(json.loads(recv)["size"], 123)
def test_stop_test(self): self.relay.stopTest(mock.sentinel.test, str(mock.sentinel.loads_status)) recv = json.loads(self._pull.recv()) self.assertEqual(recv["loads_status"], str(mock.sentinel.loads_status)) self.assertEqual(recv["test"], str(mock.sentinel.test))