def test_check_system_load(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'check_load': { 'mem_low_threshold': 100 * 1024**3, 'cpu_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('SystemOverloadError', resp['err']) job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'check_load': { 'cpu_low_threshold': 100.1, 'mem_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws2 = self._create_client() ws2.send(utfjson.dump(job_desc)) resp = ws2.recv() resp = utfjson.load(resp) ws2.close() self.assertEqual('SystemOverloadError', resp['err'])
def test_decode(self): cases = ( ('key1', utfjson.dump(u'我', encoding=None), '"\\u6211"'), # when save '"\xb6\xd4"' with etcd but the etcd cannot # convert them, so the default '\ufffd\ufffd' was saved. # when get it from etcd, '\ufffd\ufffd' was converted into # '"\xef\xbf\xbd\xef\xbf\xbd"'. ('key2', utfjson.dump(u'对', encoding='gbk'), '"\xef\xbf\xbd\xef\xbf\xbd"'), ('key3', utfjson.dump(u'我', encoding='utf-8'), '"\xe6\x88\x91"'), ('key4', utfjson.dump(u'我'), '"\xe6\x88\x91"'), ('key5', utfjson.dump('我'), '"\xe6\x88\x91"'), ('key6', utfjson.dump({"我": "我"}), '{"\xe6\x88\x91": "\xe6\x88\x91"}'), ('key7', utfjson.dump({"我": u"我"}), '{"\xe6\x88\x91": "\xe6\x88\x91"}'), ('key8', utfjson.dump({u"我": "我"}), '{"\xe6\x88\x91": "\xe6\x88\x91"}'), ('key9', utfjson.dump({u"我": u"我"}), '{"\xe6\x88\x91": "\xe6\x88\x91"}'), ('key10', utfjson.dump((u"我", )), '["\xe6\x88\x91"]'), ) cli = etcd.Client(host=HOSTS) for key, val, expected in cases: cli.set(key, val) res = cli.get(key) self.assertEqual(expected, res.value)
def test_check_system_load(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'check_load': { 'mem_low_threshold': 100 * 1024 ** 3, 'cpu_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('SystemOverloadError', resp['err']) job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'check_load': { 'cpu_low_threshold': 100.1, 'mem_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws2 = self._create_client() ws2.send(utfjson.dump(job_desc)) resp = ws2.recv() resp = utfjson.load(resp) ws2.close() self.assertEqual('SystemOverloadError', resp['err'])
def test_client_close(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'foo', 'time_sleep': 10, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) self.ws.close() self.ws = self._create_client() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'bar', 'time_sleep': 10, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('foo', resp['result'])
def test_same_ident_same_job(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'foo', 'sleep_time': 10, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertEqual('foo', resp['result']) job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'bar', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws = self._create_client() ws.send(utfjson.dump(job_desc)) resp = utfjson.load(ws.recv()) ws.close() # not bar, because the ident is same as the first job, # if job exists, it will not create a new one self.assertEqual('foo', resp['result']) self.assertEqual('foo', resp['echo'])
def test_same_ident_different_job(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'foo', 'sleep_time': 0.1, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self._wait_for_result(self.ws) self.assertEqual('foo', resp['result']) time.sleep(0.2) job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'bar', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws2 = self._create_client() ws2.send(utfjson.dump(job_desc)) resp = self._wait_for_result(ws2) ws2.close() # old job with the same ident has exit, it will create a new one self.assertEqual('bar', resp['result']) self.assertEqual('bar', resp['echo'])
def test_same_ident_same_job(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'foo', 'sleep_time': 10, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertEqual('foo', resp['result']) job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'bar', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws = self._create_client() ws.send(utfjson.dump(job_desc)) resp = utfjson.load(ws.recv()) ws.close() # not bar, because the ident is same as the first job, # if job exists, it will not create a new one self.assertEqual('foo', resp['result']) self.assertEqual('foo', resp['echo'])
def test_same_ident_different_job(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'foo', 'sleep_time': 0.1, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertEqual('foo', resp['result']) time.sleep(0.2) job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'bar', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws2 = self._create_client() ws2.send(utfjson.dump(job_desc)) resp = utfjson.load(ws2.recv()) ws2.close() # old job with the same ident has exit, it will create a new one self.assertEqual('bar', resp['result']) self.assertEqual('bar', resp['echo'])
def test_client_close(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'foo', 'time_sleep': 10, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) self.ws.close() self.ws = self._create_client() job_desc = { 'func': 'test_job_echo.run', 'ident': ident, 'echo': 'bar', 'time_sleep': 10, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('foo', resp['result'])
def commit(self, force=False): # Only when commit, it is necessary to ensure connection still active: # Thus tx_alive_lock is not lost, then no other process would take # charge of this tx. if self.time_left() < 0: raise TXTimeout('{tx} timeout when committing'.format(tx=self)) self._assert_connected() kazootx = self.zke.transaction() jour = {} cnf = self.zke._zkconf for k, rec in self.modifications.items(): curr = self.got_keys[k] if rec.v == curr.v: continue jour[k] = rec.v record_vals = curr.values + [[self.txid, rec.v]] record_vals = record_vals[-self.zkstorage.max_value_history:] if curr.version == -1: kazootx.create(cnf.record(rec.k), utfjson.dump(record_vals)) else: kazootx.set_data(cnf.record(rec.k), utfjson.dump(record_vals), version=curr.version) state, ver = self._get_state(self.txid) if ver > -1: kazootx.delete(cnf.tx_state(self.txid), version=ver) for key in self.got_keys: kazootx.delete(cnf.lock(key), version=0) if len(jour) > 0 or force: kazootx.create(cnf.journal(self.txid), utfjson.dump(jour)) kazootx.commit() status = COMMITTED else: # Nothing to commit, make it an aborted tx. kazootx.commit() status = PURGED self.zkstorage.add_to_txidset(status, self.txid) logger.info('{tx} updated txidset: {status}'.format(tx=self, status=status)) self.tx_status = status self.modifications = {} self._close()
def test_json(self): blk = BlockDesc({ 'block_id': BlockID('d0', 'g000640000000123', '0000', DriveID('idc000' 'c62d8736c7280002'), 1), 'size': 1000, 'range': ['0a', '0b'], 'ts_range': ["1235", "456"], 'ref_num': 1, 'is_del': 0, 'mtime': 1, }) rst = utfjson.dump(blk) expected = ( '{"block_id": "d0g0006400000001230000idc000c62d8736c72800020000000001", "is_del": 0, "ref_num": 1, "range": ["0a", "0b"], "mtime": 1, "ts_range": ["1235", "456"], "size": 1000}' ) self.assertEqual(expected, rst) loaded = BlockDesc(utfjson.load(rst)) self.assertEqual(blk, loaded)
def schedule(self): for job_name, job_status in self.status.items(): if job_name not in self.jobs: del (self.status[job_name]) continue if len(job_status['active_threads']) > 0: msg = 'threads aborted by restart: %s' % ( job_status['active_threads']) self.status[job_name]['message'] = msg self.status[job_name]['active_threads'] = {} while True: curr_time = get_time_info(time.time()) with self.lock: self._schedule(curr_time) if self.dump_status is not None: try: self.dump_status(self.status) except Exception as e: logger.exception('failed to dump job status: %s' % repr(e)) for job_name, job_status in self.status.items(): logger.info('status of job %s, %s' % (job_name, utfjson.dump(job_status))) end_time = time.time() logger.info('scheduled at: %s, time used: %f' % (repr(curr_time), end_time - curr_time['ts'])) to_sleep = 60 - (end_time % 60) + 1 time.sleep(to_sleep)
def test_watch_acquire(self): a = zkutil.ZKLock('foo', on_lost=lambda: True) b = zkutil.ZKLock('foo', on_lost=lambda: True) # no one locked n = 0 for holder, ver in a.acquire_loop(): n += 1 self.assertEqual(0, n, 'acquired directly') # watch node change it = b.acquire_loop() holder, ver = it.next() self.assertEqual((a.identifier, 0), (holder, ver)) a.identifier['val'] = 'xx' value = utfjson.dump(a.identifier) self.zk.set(a.lock_path, value) holder, ver = it.next() self.assertEqual(('xx', 1), (holder['val'], ver), 'watched node change') a.release() try: holder, ver = it.next() self.fail('should not have next yield') except StopIteration: pass self.assertTrue(b.is_locked())
def test_json_dump(self): cases = ( (None, 'null'), (self.block_group_id, id_str(self.block_group_id)), (self.block_group_id, id_str(self.block_group_id)), (self.block_id, id_str(str(self.block_id))), (self.block_index, id_str(self.block_index)), (self.drive_id, id_str(self.drive_id)), ([self.block_group_id, self.drive_id], "[{0}, {1}]".format(id_str(self.block_group_id), id_str(self.drive_id))), ((self.block_group_id, self.drive_id), "[{0}, {1}]".format(id_str(self.block_group_id), id_str(self.drive_id))), ({'xxx': self.block_id}, '{{"xxx": {0}}}'.format(id_str(self.block_id))), ({10: self.block_id}, '{{"10": {0}}}'.format(id_str(self.block_id))), ({self.block_id: 'abc'}, '{{{0}: "abc"}}'.format(id_str(self.block_id))), ({self.block_group_id: self.block_id}, '{{{0}: {1}}}'.format(id_str(self.block_group_id), id_str(self.block_id))), ({self.block_group_id: (self.block_id, self.drive_id)}, '{{{0}: [{1}, {2}]}}'.format(id_str(self.block_group_id), id_str(self.block_id), id_str(self.drive_id))), ) for obj, expected in cases: self.assertEqual(expected, utfjson.dump(obj))
def _req(self, action, args): request = { 'verb': 'POST', 'uri': '/api/%s/%s/%s' % (self.api_version, self.subject, action), 'args': {}, 'headers': { 'Host': '', 'Content-Length': 0, 'User-Agent': self.user_agent, }, 'body': '', } request['body'] = utfjson.dump(args) request['headers']['Content-Length'] = len(request['body']) body, headers = self._do_request(request) result = self.parse_response_body(body) self.convert_field(result) self._load_shard(headers) return result
def test_invalid_check_load_args(self): cases = ( {'check_load': {'mem_low_threshold': 'foo', 'cpu_low_threshold': 0}}, {'check_load': {'cpu_low_threshold': None, 'mem_low_threshold': 0}}, {'check_load': {'max_client_number': {}, 'cpu_low_threshold': 0, 'mem_low_threshold': 0}}, ) job_desc = { 'func': 'test_job_normal.run', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } for case in cases: case.update(job_desc) case['ident'] = self.get_random_ident() ws = self._create_client() ws.send(utfjson.dump(case)) resp = utfjson.load(ws.recv()) ws.close() self.assertEqual('InvalidMessageError', resp['err'])
def test_json(self): region = Region({ 'range': ['a', 'z'], 'levels': [[['a', 'b', BlockDesc()], [ 'b', 'c', BlockDesc( size=2, block_id=BlockID( 'd1g0006300000001230101idc000c62d8736c72800020000000001' )) ]]] }) rst = utfjson.dump(region) expected = ( '{"range": ["a", "z"], "levels": [[["a", "b", ' '{"is_del": 0, "range": null, "block_id": null, "size": 0}], ' '["b", "c", {"is_del": 0, "range": null, ' '"block_id": "d1g0006300000001230101idc000c62d8736c72800020000000001", "size": 2}]]], "idc": ""}' ) self.assertEqual(expected, rst) loaded = Region(utfjson.load(rst)) self.assertEqual(region, loaded)
def api(self, *args, **argkv): mtd_args = [] for idx in range(self.args_count): if idx < len(args): mtd_args.append(args[idx]) else: left_cnt = self.args_count - len(mtd_args) opt_name = self.opts[-left_cnt] mtd_args.append(argkv.get(opt_name, None)) retry = mtd_args.pop() qs = {} # retry in opts, but it is not in qs qs_keys = list(self.opts[:-1]) while len(qs_keys) > 0: qs[qs_keys.pop()] = mtd_args.pop() body = None if self.http_mtd == 'PUT': body = utfjson.dump(mtd_args.pop()) path = [self.redis_op] + mtd_args return self.cli._api(self.http_mtd, path, body, qs, retry)
def _make_key_lock(self, txid, key): keylock = zkutil.ZKLock(key, zkclient=self.zke, zkconf=self.zke._zkconf, ephemeral=False, identifier=utfjson.dump(txid)) return keylock
def _make_key_lock(self, txid, key): keylock = zkutil.ZKLock(key, zkclient=self.zke, zkconf=self.zke._zkconf, ephemeral=False, identifier=utfjson.dump(txid)) return keylock
def test_invalid_jobdesc(self): cases = ( ('foo', 'not json'), (utfjson.dump('foo'), 'not dict'), (utfjson.dump({}), 'no func'), (utfjson.dump({'func': 'foo'}), 'no ident'), (utfjson.dump({'ident': 'bar'}), 'no func'), (utfjson.dump({ 'ident': 'bar', 'func': {} }), 'invalid func'), (utfjson.dump({ 'ident': 44, 'func': 'foo' }), 'invalid ident'), (utfjson.dump({ 'ident': 'foo', 'func': 'foo', 'jobs_dir': {} }), 'invalid jobs_dir'), ) for msg, desc in cases: ws = self._create_client() ws.send(msg) resp = utfjson.load(ws.recv()) self.assertIn('err', resp, desc) ws.close()
def _send_err_and_close(self, err): try: err_msg = { 'err': err.__class__.__name__, 'val': err.args, } self.ws.send(utfjson.dump(err_msg)) except Exception as e: logger.error(('error on channel %s while sending back error ' + 'message, %s') % (repr(self), repr(e)))
def _send_err_and_close(self, err): try: err_msg = { 'err': err.__class__.__name__, 'val': err.args, } self.ws.send(utfjson.dump(err_msg)) except Exception as e: logger.error(('error on channel %s while sending back error ' + 'message, %s') % (repr(self), repr(e)))
def test_module_not_exists(self): job_desc = { 'func': 'foo.bar', 'ident': self.get_random_ident(), 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertIn('err', resp)
def set_lock_val(self, val, version=-1): locked, holder, ver = self.try_acquire() if not locked: raise ZKUtilError("set non-locked: {k}".format(k=self.lock_name)) self.identifier['val'] = val value = utfjson.dump(self.identifier) st = self.zkclient.set(self.lock_path, value, version=version) return st.version
def test_normal_job(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertEqual('foo', resp['result'], 'test get result')
def test_module_not_exists(self): job_desc = { 'func': 'foo.bar', 'ident': self.get_random_ident(), 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertIn('err', resp)
def test_normal_job(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) dd(resp) self.assertEqual('foo', resp['result'], 'test get result')
def test_func_not_exists(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.func_not_exists', 'ident': ident, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('LoadingError', resp['err'])
def test_json(self): g = BlockGroup(block_group_id='g000640000000123', idcs=['a', 'b', 'c'], config=_ec_config) rst = utfjson.dump(g) expected = ('{"config": {"in_idc": [4, 2], "ec_policy": "lrc", "cross_idc": [2, 1], ' '"data_replica": 3}, "blocks": {}, "idcs": ["a", "b", "c"], ' '"block_group_id": "g000640000000123"}') self.assertEqual(expected, rst) loaded = BlockGroup(utfjson.load(rst)) self.assertEqual(g, loaded)
def test_invalid_cpu_sample_interval(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'cpu_sample_interval': 'foo', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertEqual('InvalidMessageError', resp['err'])
def test_invalid_cpu_sample_interval(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'cpu_sample_interval': 'foo', 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertEqual('InvalidMessageError', resp['err'])
def test_func_not_exists(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_echo.func_not_exists', 'ident': ident, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('LoadingError', resp['err'])
def test_json(self): region = Region({ 'range': ['a', 'z'], 'levels': [ [['a', 'b', tbid1], ['b', 'c', tbid2]] ]}) rst = utfjson.dump(region) expected = ('{"range": ["a", "z"], "levels": ' '[[["a", "b", "' + tbid1 + '"], ["b", "c", "' + tbid2 + '"]]], "idc": ""}') self.assertEqual(utfjson.load(expected), region) self.assertEqual(region, Region(utfjson.load(rst)))
def test_report_system_load(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'report_system_load': True, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertIn('mem_available', resp['system_load']) self.assertIn('cpu_idle_percent', resp['system_load']) self.assertIn('client_number', resp['system_load'])
def test_cb(self): latest = ['foo'] def cb(path, old, new): latest[0] = new zkutil.CachedReader(self.zk, 'foo', callback=cb) for i in range(100): self.val['a'] += 1 self.zk.set('foo', utfjson.dump(self.val)) time.sleep(1) self.assertEqual(self.val, latest[0])
def test_update(self): c = zkutil.CachedReader(self.zk, 'foo') self.assertDictEqual(self.val, c) cases = ( {'a': 2}, {'a': 'a_v', 'b': 'b_v'}, {'a': 3, 'b': {'c': 4}, 'd': {'e': {'e': 'val'}}}, ) for case in cases: self.zk.set('foo', utfjson.dump(case)) time.sleep(0.5) self.assertDictEqual(case, c)
def test_cb(self): latest = ['foo'] def cb(path, old, new): latest[0] = new zkutil.CachedReader(self.zk, 'foo', callback=cb) for i in range(100): self.val['a'] += 1 self.zk.set('foo', utfjson.dump(self.val)) time.sleep(1) self.assertEqual(self.val, latest[0])
def test_report_system_load(self): job_desc = { 'func': 'test_job_normal.run', 'ident': self.get_random_ident(), 'report_system_load': True, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = utfjson.load(self.ws.recv()) self.assertIn('mem_available', resp['system_load']) self.assertIn('cpu_idle_percent', resp['system_load']) self.assertIn('client_number', resp['system_load'])
def write_last_stat(self, f, offset): st = os.fstat(f.fileno()) ino = st[stat.ST_INO] last = { "inode": ino, "offset": offset, } fsutil.write_file(self.stat_path(), utfjson.dump(last), fsync=False) logger.info('position written fn=%s inode=%d offset=%d' % ( self.fn, ino, offset))
def write_last_stat(self, f, offset): st = os.fstat(f.fileno()) ino = st[stat.ST_INO] last = { "inode": ino, "offset": offset, } fsutil.write_file(self.stat_path(), utfjson.dump(last), fsync=False) logger.info('position written fn=%s inode=%d offset=%d' % (self.fn, ino, offset))
def test_max_client_number(self): job_desc = { 'func': 'test_job_loop_10.run', 'ident': self.get_random_ident(), 'check_load': { 'max_client_number': 1, 'cpu_low_threshold': 0, 'mem_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertNotIn('err', resp) job_desc = { 'func': 'test_job_loop_10.run', 'ident': self.get_random_ident(), 'check_load': { 'max_client_number': 1, 'cpu_low_threshold': 0, 'mem_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws2 = self._create_client() ws2.send(utfjson.dump(job_desc)) resp = ws2.recv() resp = utfjson.load(resp) self.assertEqual('SystemOverloadError', resp['err']) ws2.close()
def test_max_client_number(self): job_desc = { 'func': 'test_job_loop_10.run', 'ident': self.get_random_ident(), 'check_load': { 'max_client_number': 1, 'cpu_low_threshold': 0, 'mem_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertNotIn('err', resp) job_desc = { 'func': 'test_job_loop_10.run', 'ident': self.get_random_ident(), 'check_load': { 'max_client_number': 1, 'cpu_low_threshold': 0, 'mem_low_threshold': 0, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } ws2 = self._create_client() ws2.send(utfjson.dump(job_desc)) resp = ws2.recv() resp = utfjson.load(resp) self.assertEqual('SystemOverloadError', resp['err']) ws2.close()
def test_json(self): g = BlockGroup(block_group_id='g000640000000123', idcs=['a', 'b', 'c'], config=_ec_config) rst = utfjson.dump(g) expected = ( '{"config": {"in_idc": [4, 2], "ec_policy": "lrc", "cross_idc": [2, 1], ' '"data_replica": 3}, "blocks": {}, "idcs": ["a", "b", "c"], ' '"block_group_id": "g000640000000123"}') self.assertEqual(expected, rst) loaded = BlockGroup(utfjson.load(rst)) self.assertEqual(g, loaded)
def test_progress_key(self): job_desc = { 'func': 'test_job_progress_key.run', 'ident': self.get_random_ident(), 'progress': { 'key': 'foo', }, 'report_system_load': True, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() resp = utfjson.load(resp) self.assertEqual('80%', resp)
def do_GET(self): response = utfjson.dump({ 'foo': 1, 'bar': 2, }) path_res = urlparse.urlparse(self.path) TestRedisProxyClient.request['req-path'] = path_res.path TestRedisProxyClient.request['req-qs'] = path_res.query self.send_response(TestRedisProxyClient.response['http-status']) self.send_header('Content-Length', len(response)) self.end_headers() self.wfile.write(response)
def test_json(self): blk = BlockDesc({ 'block_id': BlockID('d0', 'g000640000000123', '0000', DriveID('c62d8736c7280002'), 1), 'size': 1000, 'range': ['0a', '0b'], 'is_del': 0 }) rst = utfjson.dump(blk) expected = ('{"is_del": 0, "range": ["0a", "0b"], "block_id": ' '"d0g0006400000001230000c62d8736c72800020000000001", "size": 1000}') self.assertEqual(expected, rst) loaded = BlockDesc(utfjson.load(rst)) self.assertEqual(blk, loaded)
def test_json(self): region = Region({'range': ['a', 'z'], 'levels': [ [['a', 'b', BlockDesc()], ['b', 'c', BlockDesc(size=2, block_id=BlockID('d1g0006300000001230101idc000c62d8736c72800020000000001'))] ]]}) rst = utfjson.dump(region) expected = ('{"range": ["a", "z"], "levels": [[["a", "b", ' '{"is_del": 0, "range": null, "block_id": null, "size": 0}], ' '["b", "c", {"is_del": 0, "range": null, ' '"block_id": "d1g0006300000001230101idc000c62d8736c72800020000000001", "size": 2}]]], "idc": ""}') self.assertEqual(expected, rst) loaded = Region(utfjson.load(rst)) self.assertEqual(region, loaded)
def test_get_txidset(self): cases = ( [[1, 2]], [[7, 20]], [[1, 3], [7, 8]], [[1, 2], [10, 15]], ) for c in cases: val = { COMMITTED: c, } self.redis_cli.delete(self.txid_path) self.redis_cli.set(self.txid_path, utfjson.dump(val)) txidset = self.storage.txidset.get() self.assertEqual(c, txidset[COMMITTED])
def _init_hierarchy(hierarchy, parent_path): if len(hierarchy) == 0: return for node, attr_children in hierarchy.items(): val = attr_children.get('__val__', {}) val = utfjson.dump(val) acl = attr_children.get('__acl__') path = _init_node(zkcli, parent_path, node, val, acl, users) children = {k: v for k, v in attr_children.items() if k not in ('__val__', '__acl__') } _init_hierarchy(children, path)
def test_progress_key(self): job_desc = { 'func': 'test_job_progress_key.run', 'ident': self.get_random_ident(), 'progress': { 'key': 'foo', }, 'report_system_load': True, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) resp = self.ws.recv() dd(resp) resp = utfjson.load(resp) self.assertEqual('80%', resp)
def setUp(self): utdocker.create_network() utdocker.start_container( zk_test_name, zk_test_tag, env={ "ZOO_MY_ID": 1, "ZOO_SERVERS": "server.1=0.0.0.0:2888:3888", }, port_bindings={ 2181: 21811, } ) self.zk = KazooClient(hosts='127.0.0.1:21811') self.zk.start() self.val = {'a': 1, 'b': 2} self.zk.create('foo', utfjson.dump(self.val))
def test_invalid_jobdesc(self): cases = ( ('foo', 'not json'), (utfjson.dump('foo'), 'not dict'), (utfjson.dump({}), 'no func'), (utfjson.dump({'func': 'foo'}), 'no ident'), (utfjson.dump({'ident': 'bar'}), 'no func'), (utfjson.dump({'ident': 'bar', 'func': {}}), 'invalid func'), (utfjson.dump({'ident': 44, 'func': 'foo'}), 'invalid ident'), (utfjson.dump({'ident': 'foo', 'func': 'foo', 'jobs_dir': {}}), 'invalid jobs_dir'), ) for msg, desc in cases: ws = self._create_client() ws.send(msg) resp = utfjson.load(ws.recv()) self.assertIn('err', resp, desc) ws.close()
def test_worker_exception(self): ident = self.get_random_ident() job_desc = { 'func': 'test_job_worker_exception.run', 'ident': ident, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', 'progress': { 'interval': 0.1, }, } self.ws.send(utfjson.dump(job_desc)) for i in range(10): resp = self.ws.recv() self.assertNotIn('err', resp) with self.assertRaises(Exception): for i in range(3): self.ws.recv()
def progress_sender(job, channel, interval=5, stat=None): stat = stat or (lambda data: data) data = job.data i = 10 try: while True: # if thread died due to some reason, still send 10 stats if not job.thread.is_alive(): logger.info('job %s died: %s' % (job.ident, repr(job.err))) if i == 0: channel.ws.close() break i -= 1 logger.info('jod %s on channel %s send progress: %s' % (job.ident, repr(channel), repr(stat(data)))) to_send = stat(data) if channel.report_system_load and type(to_send) == type({}): to_send['system_load'] = channel.get_system_load() channel.ws.send(utfjson.dump(to_send)) if job.progress_available.wait(interval): job.progress_available.clear() except WebSocketError as e: if channel.ws.closed == True: logger.info('the client has closed the connection') else: logger.exception(('got websocket error when sending progress on' + ' channel %s: %s') % (repr(channel), repr(e))) except Exception as e: logger.exception('got exception when sending progress on channel %s: %s' % (repr(channel), repr(e))) channel.ws.close()
def _run(context, slot_number): arch_conf = context['arch_conf']['value'] result = {} for subsystem_name, subsystem_arch_conf in arch_conf.iteritems(): subsystem_model = model.subsystem[subsystem_name] cgroup_path = os.path.join(context['cgroup_dir'], subsystem_name) cgroup_conf = subsystem_arch_conf result[subsystem_name] = {} account_one_cgroup(slot_number, subsystem_model, cgroup_path, cgroup_conf, result[subsystem_name]) redis_client = context['redis_client'] key_name = '%s/account/%d' % (context['redis_prefix'], slot_number) redis_client.set(key_name, utfjson.dump(result)) redis_client.expire(key_name, context['redis_expire_time'])
def test_report_interval(self): job_desc = { 'func': 'test_job_loop_10.run', 'ident': self.get_random_ident(), 'progress': { 'interval': 0.5, }, 'jobs_dir': 'pykit/wsjobd/test/test_jobs', } self.ws.send(utfjson.dump(job_desc)) self.ws.recv() first_report_time = time.time() self.ws.recv() second_report_time = time.time() actual_interval = second_report_time - first_report_time diff = actual_interval - job_desc['progress']['interval'] # tolerate 0.1 second of difference self.assertLess(diff, 0.1)
def set(self, key, value): key = self._get_path(key) value = utfjson.dump(value) self.cli.set(key, value)
def hset(self, hashname, hashkey, value): hashname = self._get_path(hashname) value = utfjson.dump(value) self.cli.hset(hashname, hashkey, value)
def _request(self, url, method, params, timeout, bodyinjson): while True: host, port, path = self._parse_url(url) if host is None or port is None or path is None: raise EtcdException('url is invalid, {url}'.format(url=url)) qs = {} headers = {} body = '' if method in (self._MGET, self._MDELETE): qs.update(params or {}) # use once, coz params is in location's query string params = None headers['Content-Length'] = 0 elif method in (self._MPUT, self._MPOST): if bodyinjson: if params is not None: body = utfjson.dump(params) headers.update({'Content-Type': 'application/json', 'Content-Length': len(body)}) else: body = urllib.urlencode(params or {}) headers.update( {'Content-Type': 'application/x-www-form-urlencoded', 'Content-Length': len(body)} ) else: raise EtcdRequestError('HTTP method {method} not supported' ''.format(method=method)) if len(qs) > 0: if '?' in path: path = path + '&' + urllib.urlencode(qs) else: path = path + '?' + urllib.urlencode(qs) if self.basic_auth_account is not None: auth = { 'Authorization': 'Basic {ant}'.format( ant=self.basic_auth_account.encode('base64').strip()), } headers.update(auth) logger.debug('connect -> {mtd} {url}{path} {timeout}'.format( mtd=method, url=self._base_uri, path=path, timeout=timeout)) h = http.Client(host, port, timeout) h.send_request(path, method, headers) h.send_body(body) h.read_response() resp = Response.from_http(h) if not self.allow_redirect: return resp if resp.status not in Response.REDIRECT_STATUSES: return resp url = resp.get_redirect_location() if url is None: raise EtcdResponseError('location not found in {header}' ''.format(header=resp.headers)) logger.debug('redirect -> ' + url)