def testAddNode(self): data = range(100) new_port = random.randint(10000, 30000) new_node = 'localhost:%s' % new_port new_rpc_server = ColaRPCServer(('localhost', new_port)) thd = threading.Thread(target=new_rpc_server.serve_forever) thd.setDaemon(True) thd.start() new_dir = tempfile.mkdtemp() ns = list(self.nodes) ns.append(new_node) new_mq = MessageQueue(new_dir, new_rpc_server, new_node, ns) try: self.mq0.add_node(new_node) self.mq1.add_node(new_node) self.mq2.add_node(new_node) self.mq0.put(data) self.assertEqual(data, sorted(self.mq0.get(size=100))) finally: try: new_rpc_server.shutdown() new_mq.shutdown() finally: shutil.rmtree(new_dir)
def setUp(self): url_patterns = UrlPatterns( Url(r'^http://zh.wikipedia.org/wiki/[^(:|/)]+$', 'wiki_item', FakeWikiParser)) fake_user_conf = Config(StringIO(user_conf)) self.dir = tempfile.mkdtemp() self.job = Job( 'fake wiki crawler', url_patterns, MechanizeOpener, [ 'http://zh.wikipedia.org/wiki/%E6%97%A0%E6%95%8C%E8%88%B0%E9%98%9F', ], user_conf=fake_user_conf) local_node = 'localhost:%s' % self.job.context.job.port nodes = [ local_node, ] self.rpc_server = ColaRPCServer( ('localhost', self.job.context.job.port)) self.loader = JobLoader(self.job) self.loader.init_mq(self.rpc_server, nodes, local_node, self.dir) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start()
class Test(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() self.zip_dir = os.path.join(self.dir, "zip") if not os.path.exists(self.zip_dir): os.mkdir(self.zip_dir) self.job_dir = os.path.join(self.dir, "job") if not os.path.exists(self.job_dir): os.mkdir(self.job_dir) zip_file = os.path.join(self.zip_dir, "wiki.zip") src_dir = os.path.join(root_dir(), "contrib", "wiki") self.zip_file = ZipHandler.compress(zip_file, src_dir, type_filters=("pyc",)) self.rpc_server = ColaRPCServer(("localhost", main_conf.master.port)) self.master_watcher = MasterWatcher(self.rpc_server, self.zip_dir, self.job_dir) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start() def tearDown(self): self.rpc_server.shutdown() shutil.rmtree(self.dir) def testMasterWatcher(self): self.master_watcher.start_job(self.zip_file)
class Test(unittest.TestCase): def setUp(self): self.dir = tempfile.mkdtemp() self.zip_dir = os.path.join(self.dir, 'zip') if not os.path.exists(self.zip_dir): os.mkdir(self.zip_dir) self.job_dir = os.path.join(self.dir, 'job') if not os.path.exists(self.job_dir): os.mkdir(self.job_dir) zip_file = os.path.join(self.zip_dir, 'wiki.zip') src_dir = os.path.join(root_dir(), 'contrib', 'wiki') self.zip_file = ZipHandler.compress(zip_file, src_dir, type_filters=('pyc', )) self.rpc_server = ColaRPCServer(('localhost', main_conf.master.port)) self.master_watcher = MasterWatcher(self.rpc_server, self.zip_dir, self.job_dir) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start() def tearDown(self): self.rpc_server.shutdown() shutil.rmtree(self.dir) def testMasterWatcher(self): self.master_watcher.start_job(self.zip_file)
def setUp(self): self.port = random.randint(10000, 30000) self.addr = 'localhost:%s' % self.port self.rpc_server = ColaRPCServer(('localhost', self.port)) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start() self.dir_ = tempfile.mkdtemp()
class Test(unittest.TestCase): def setUp(self): url_patterns = UrlPatterns( Url(r'^http://zh.wikipedia.org/wiki/[^(:|/)]+$', 'wiki_item', FakeWikiParser)) fake_user_conf = Config(StringIO(user_conf)) self.dir = tempfile.mkdtemp() self.job = Job( 'fake wiki crawler', url_patterns, MechanizeOpener, [ 'http://zh.wikipedia.org/wiki/%E6%97%A0%E6%95%8C%E8%88%B0%E9%98%9F', ], user_conf=fake_user_conf) local_node = 'localhost:%s' % self.job.context.job.port nodes = [ local_node, ] self.rpc_server = ColaRPCServer( ('localhost', self.job.context.job.port)) self.loader = JobLoader(self.job) self.loader.init_mq(self.rpc_server, nodes, local_node, self.dir) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start() def tearDown(self): try: self.loader.finish() self.rpc_server.shutdown() finally: shutil.rmtree(self.dir) def testJobLoader(self): self.assertEqual(len(self.job.starts), 1) self.loader.mq.put(self.job.starts) self.assertEqual(self.loader.mq.get(), self.job.starts[0]) # put starts into mq again self.loader.mq.put(self.job.starts) self.loader.run() self.assertEqual(len(f.getvalue().strip(sep).split(sep)), 10)
def start_rpc_server(): global rpc_server global rpc_server_thread if rpc_server is not None and \ rpc_server_thread is not None: return rpc_server_thread rpc_server = ColaRPCServer((get_ip(), main_conf.client.port)) rpc_server.register_function(stop) thd = threading.Thread(target=rpc_server.serve_forever) thd.setDaemon(True) thd.start() rpc_server_thread = thd return rpc_server_thread
def create_rpc_server(job, context=None): ctx = context or job.context rpc_server = ColaRPCServer((get_ip(), ctx.job.port)) thd = threading.Thread(target=rpc_server.serve_forever) thd.setDaemon(True) thd.start() return rpc_server
class Test(unittest.TestCase): def setUp(self): url_patterns = UrlPatterns( Url(r'^http://zh.wikipedia.org/wiki/[^(:|/)]+$', 'wiki_item', FakeWikiParser) ) fake_user_conf = Config(StringIO(user_conf)) self.dir = tempfile.mkdtemp() self.job = Job('fake wiki crawler', url_patterns, MechanizeOpener, ['http://zh.wikipedia.org/wiki/%E6%97%A0%E6%95%8C%E8%88%B0%E9%98%9F', ], user_conf=fake_user_conf) local_node = 'localhost:%s' % self.job.context.job.port nodes = [local_node, ] self.rpc_server = ColaRPCServer(('localhost', self.job.context.job.port)) self.loader = JobLoader(self.job) self.loader.init_mq(self.rpc_server, nodes, local_node, self.dir) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start() def tearDown(self): try: self.loader.finish() self.rpc_server.shutdown() finally: shutil.rmtree(self.dir) def testJobLoader(self): self.assertEqual(len(self.job.starts), 1) self.loader.mq.put(self.job.starts) self.assertEqual(self.loader.mq.get(), self.job.starts[0]) # put starts into mq again self.loader.mq.put(self.job.starts) self.loader.run() self.assertEqual(len(f.getvalue().strip(sep).split(sep)), 10)
def setUp(self): ports = tuple([random.randint(10000, 30000) for _ in range(3)]) self.nodes = ['localhost:%s'%port for port in ports] self.dirs = [tempfile.mkdtemp() for _ in range(len(ports))] self.size = len(ports) for i in range(self.size): setattr(self, 'rpc_server%s'%i, ColaRPCServer(('localhost', ports[i]))) setattr(self, 'mq%s'%i, MessageQueue(self.dirs[i], getattr(self, 'rpc_server%s'%i), self.nodes[i], self.nodes[:]) ) thd = threading.Thread(target=getattr(self, 'rpc_server%s'%i).serve_forever) thd.setDaemon(True) thd.start() self.client = MessageQueueClient(self.nodes)
def setUp(self): ports = (11111, 11211, 11311) self.nodes = ['localhost:%s' % port for port in ports] self.dirs = [tempfile.mkdtemp() for _ in range(2 * len(ports))] self.size = len(ports) for i in range(self.size): setattr(self, 'rpc_server%s' % i, ColaRPCServer(('localhost', ports[i]))) setattr( self, 'mq%s' % i, MessageQueue(self.nodes[:], self.nodes[i], getattr(self, 'rpc_server%s' % i))) getattr(self, 'mq%s' % i).init_store(self.dirs[2 * i], self.dirs[2 * i + 1]) thd = threading.Thread(target=getattr(self, 'rpc_server%s' % i).serve_forever) thd.setDaemon(True) thd.start() self.client = MessageQueueClient(self.nodes)
def setUp(self): url_patterns = UrlPatterns( Url(r'^http://zh.wikipedia.org/wiki/[^(:|/)]+$', 'wiki_item', FakeWikiParser) ) fake_user_conf = Config(StringIO(user_conf)) self.dir = tempfile.mkdtemp() self.job = Job('fake wiki crawler', url_patterns, MechanizeOpener, ['http://zh.wikipedia.org/wiki/%E6%97%A0%E6%95%8C%E8%88%B0%E9%98%9F', ], user_conf=fake_user_conf) local_node = 'localhost:%s' % self.job.context.job.port nodes = [local_node, ] self.rpc_server = ColaRPCServer(('localhost', self.job.context.job.port)) self.loader = JobLoader(self.job) self.loader.init_mq(self.rpc_server, nodes, local_node, self.dir) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start()
class Test(unittest.TestCase): def client_call(self): server = xmlrpclib.ServerProxy('http://localhost:11103') num = random.randint(0, 100) plus_one_num = server.test_plus_one(num) self.assertEqual(plus_one_num, num + 1) def start_server(self): self.server = ColaRPCServer(('localhost', 11103)) self.server.register_function(test_plus_one) self.server.serve_forever() def setUp(self): self.server_run = threading.Thread(target=self.start_server) def testRPC(self): self.server_run.start() self.client_call() self.server.shutdown() del self.server with self.assertRaises(socket.error): self.client_call()
def create_rpc_server(): rpc_server = ColaRPCServer((get_ip(), main_conf.master.port)) thd = threading.Thread(target=rpc_server.serve_forever) thd.setDaemon(True) thd.start() return rpc_server
class Test(unittest.TestCase): def setUp(self): self.port = random.randint(10000, 30000) self.addr = 'localhost:%s' % self.port self.rpc_server = ColaRPCServer(('localhost', self.port)) thd = threading.Thread(target=self.rpc_server.serve_forever) thd.setDaemon(True) thd.start() self.dir_ = tempfile.mkdtemp() def tearDown(self): try: self.rpc_server.shutdown() finally: shutil.rmtree(self.dir_) def testBudgetApply(self): self.serv = BudgetApplyServer(self.dir_, Settings(), rpc_server=self.rpc_server) self.cli1 = BudgetApplyClient(self.serv) self.cli2 = BudgetApplyClient('localhost:%s'%self.port) try: self.serv.set_budgets(90) self.assertEqual(self.cli1.apply(50), 50) self.assertEqual(self.cli2.apply(50), 40) self.cli1.finish(50) self.assertEqual(50, self.serv.finished) self.cli2.finish(50) self.assertEqual(90, self.serv.finished) self.cli1.error(10) self.assertEqual(90, self.serv.applied) self.serv.finished = 0 self.cli2.error(10) self.assertEqual(80, self.serv.applied) finally: self.serv.shutdown() def testCounter(self): self.serv = CounterServer(self.dir_, Settings(), rpc_server=self.rpc_server) self.cli1 = CounterClient(self.serv) self.cli2 = CounterClient('localhost:%s'%self.port) try: self.cli1.global_inc('pages', 10) self.cli2.global_inc('pages', 2) self.assertEqual(self.cli1.get_global_inc('pages'), 10) self.assertEqual(self.cli2.get_global_inc('pages'), 2) self.assertEqual(self.serv.inc_counter.get('global', 'pages', 0), 0) self.cli1.sync() self.cli2.sync() self.assertEqual(self.cli1.get_global_inc('pages'), None) self.assertEqual(self.cli2.get_global_inc('pages'), None) self.assertEqual(self.serv.inc_counter.get('global', 'pages'), 12) self.cli1.local_inc(self.addr, 0, 'pages', 100) self.assertEqual(self.cli1.get_local_inc(self.addr, 0, 'pages'), 100) self.cli1.sync() self.assertEqual(self.serv.inc_counter.get('%s#%s'%(self.addr, 0), 'pages'), 100) self.cli1.global_acc('normal', 100) self.cli2.global_acc('normal', 'test') self.assertEqual(self.cli1.get_global_acc('normal'), [100,]) self.assertEqual(self.cli2.get_global_acc('normal'), ['test',]) self.cli1.sync() self.cli2.sync() self.assertEqual(self.cli1.get_global_acc('normal'), None) self.assertEqual(self.cli2.get_global_acc('normal'), None) self.assertEqual(self.serv.acc_counter.get('global', 'normal'), [100, 'test']) self.cli2.local_acc(self.addr, 1, 'normal', 100) self.assertEqual(self.cli2.get_local_acc(self.addr, 1, 'normal'), [100, ]) self.cli2.sync() self.assertEqual(self.serv.acc_counter.get('%s#%s'%(self.addr, 1), 'normal'), [100, ]) finally: self.serv.shutdown() def testSpeedControl(self): settings = Settings() counter_server = CounterServer(self.dir_, settings) self.serv = SpeedControlServer(self.dir_, settings, rpc_server=self.rpc_server, counter_server=counter_server) self.cli = SpeedControlClient(self.serv, 'localhost', 0) self.assertEqual(self.cli.require(10), (10, 0)) self.serv.set_speed(100) almost = lambda left, right: left[0] == right[0] \ and abs(left[1] - right[1]) < 0.1 self.assertTrue(almost(self.cli.require(100), (100, 0.5))) self.assertTrue(almost(self.cli.require(1), (0, 0.5))) self.serv.set_instance_speed(50) self.serv.calc_spans() self.serv.reset() self.assertTrue(almost(self.cli.require(50), (50, 1.1))) self.assertTrue(almost(self.cli.require(1), (0, 1.1))) self.serv.set_adaptive(True) addr = 'localhost#0' self.serv.counter_server.acc(addr, 'banned_start', 1100) self.serv.counter_server.acc(addr, 'banned_end', 1500) self.serv.counter_server.acc(addr, 'normal_start', 1000) self.serv.counter_server.acc(addr, 'normal_end', 1100) self.serv.counter_server.acc(addr, 'normal_pages', 200) self.serv.counter_server.inc(addr, 'pages', 500) self.serv.counter_server.inc(addr, 'secs', 1000) self.serv.calc_spans() self.serv.reset() self.assertTrue(almost(self.cli.require(24), (24, 0.5))) self.assertTrue(almost(self.cli.require(1), (0, 0.5)))
def start_server(self): self.server = ColaRPCServer(('localhost', 11103)) self.server.register_function(test_plus_one) self.server.serve_forever()
def init_rpc_server(self): rpc_server = ColaRPCServer((self.host, self.port)) thd = threading.Thread(target=rpc_server.serve_forever) thd.setDaemon(True) thd.start() self.rpc_server = rpc_server