def __init__(self, task_id, engine=EngineType.REQUESTS, io_loop=None, task_url=TASK_URL, wrap=False, tenant=None): self.task_id = task_id self.task_url = task_url self._seq = 0 self._partial_log_url = self._get_partial_url('log') self._partial_result_url = self._get_partial_url('result') self.wrap = wrap if wrap and tenant: self._partial_log_url = update_query_params( self._partial_log_url, {'tenant': tenant}) self._partial_result_url = update_query_params( self._partial_result_url, {'tenant': tenant}) if engine == EngineType.REQUESTS: self.log = self._log_by_requests self.result = self._result_by_requests elif engine == EngineType.TORNADO: io_loop = io_loop if io_loop else IOLoop.current() self._http_client = AsyncHTTPClient(io_loop=io_loop) self._queue = Queue() self.log = self._log_by_tornado self.result = self._result_by_tornado else: raise TaskLoggerError('', reason='engine only supports {}'.format( EngineType.types_str()))
def test_update_query_params(): url = update_query_params('http://127.0.0.1:18858/module/entity', {'a': 15, 'b': True, 'c': 'test'}) assert 'a=15' in url assert 'b=True' in url assert 'c=test' in url
def __init__(self): self._cursor = self.get_cursor() url = urlparse.urljoin(config['upstream'], 'dispatcher/message') self._base_url = utils.update_query_params(url, { 'tenant': config['tenant'], 'id': config['id'] }) self._retry_times = 0 self._retry_interval = config.get('message_fetch_retry_interval', 5) self._http_client = httpclient.AsyncHTTPClient(io_loop=get_io_loop())
def eventloop(self): while True: try: params = {'cursor': self._cursor} url = utils.update_query_params(self._base_url, params) timeout = (config['message_fetch_timeout'] + MessageFetchScheduler.TIMEOUT_DELAY) response = yield self._http_client.fetch( url, method='GET', headers={'Accept': 'application/json'}, connect_timeout=timeout, request_timeout=timeout, validate_cert=False, raise_error=True, ) self._retry_times = 0 json_data = escape.json_decode(response.body) self._cursor = json_data['cursor'] self.set_cursor(self._cursor) messages = json_data['messages'] logger.info('Received %s messages', len(messages)) for message in messages: msg = Message.create(message) reactor.feed(msg) except Exception as exc: logger.error('Error while fetching message: %s', exc, exc_info=True) if hasattr(exc, 'response') and exc.response: logger.error(exc.response.body) self._retry_times += 1 logger.info('Current message cursor: %s', self._cursor) if self._retry_times == 1: wait_unit = 0 else: wait_unit = (self._retry_times % 12 + 1) wait_time = wait_unit * self._retry_interval logger.info('Wait %s seconds to re-fetch', wait_time) yield gen.sleep(wait_time)
def _get_result_url(self, seq, exit_code=0): url = update_query_params(self._partial_result_url, { 'seq': seq, 'exit_code': exit_code }) return url
def _get_log_url(self, seq): url = update_query_params(self._partial_log_url, {'seq': seq}) return url
def _get_partial_url(self, partial_name): url = urljoin(self.task_url, partial_name) url = update_query_params(url, {'task_id': self.task_id}) return url
def __init__(self): url = urlparse.urljoin(config['upstream'], 'dispatcher/message') self._url = utils.update_query_params(url, {'tenant': config['tenant']}) self._http_client = AsyncHTTPClient(io_loop=get_io_loop())
def test_update_query_params_twice(): url = update_query_params('http://127.0.0.1:18858/module/', {'cursor': 5}) assert url == 'http://127.0.0.1:18858/module/?cursor=5' new_url = update_query_params(url, {'cursor': 8}) assert new_url == 'http://127.0.0.1:18858/module/?cursor=8'