def get_spool(self, task): """Query hacheck for the state of a task, and parse the result into a dictionary.""" response = requests.get(self.spool_url(task), headers={"User-Agent": get_user_agent()}) if response.status_code == 200: return {"state": "up"} regex = "".join( [ "^", r"Service (?P<service>.+)", r" in (?P<state>.+) state", r"(?: since (?P<since>[0-9.]+))?", r"(?: until (?P<until>[0-9.]+))?", r"(?:: (?P<reason>.*))?", "$", ] ) match = re.match(regex, response.text) groupdict = match.groupdict() info = {} info["service"] = groupdict["service"] info["state"] = groupdict["state"] if "since" in groupdict: info["since"] = float(groupdict["since"] or 0) if "until" in groupdict: info["until"] = float(groupdict["until"] or 0) if "reason" in groupdict: info["reason"] = groupdict["reason"] return info
def get_spool(self, task): """Query hacheck for the state of a task, and parse the result into a dictionary.""" response = requests.get(self.spool_url(task), headers={'User-Agent': get_user_agent()}) if response.status_code == 200: return { 'state': 'up', } regex = ''.join([ "^", r"Service (?P<service>.+)", r" in (?P<state>.+) state", r"(?: since (?P<since>[0-9.]+))?", r"(?: until (?P<until>[0-9.]+))?", r"(?:: (?P<reason>.*))?", "$" ]) match = re.match(regex, response.text) groupdict = match.groupdict() info = {} info['service'] = groupdict['service'] info['state'] = groupdict['state'] if 'since' in groupdict: info['since'] = float(groupdict['since'] or 0) if 'until' in groupdict: info['until'] = float(groupdict['until'] or 0) if 'reason' in groupdict: info['reason'] = groupdict['reason'] return info
def post_spool(self, task, status): resp = requests.post( self.spool_url(task), data={"status": status, "expiration": time.time() + self.expiration, "reason": "Drained by Paasta"}, headers={"User-Agent": get_user_agent()}, ) resp.raise_for_status()
def post_spool(self, task, status): resp = requests.post( self.spool_url(task), data={ 'status': status, 'expiration': time.time() + self.expiration, 'reason': 'Drained by Paasta', }, headers={'User-Agent': get_user_agent()}, ) resp.raise_for_status()
def fetch(self, url, **kwargs): headers = {'User-Agent': get_user_agent()} try: return requests.get( urljoin(self.host, url), timeout=self.config["response_timeout"], headers=headers, **kwargs, ) except requests.exceptions.ConnectionError: raise exceptions.SlaveDoesNotExist( "Unable to connect to the slave at {}".format(self.host), )
def get_marathon_client(url, user, passwd): """Get a new marathon client connection in the form of a MarathonClient object. :param url: The url to connect to marathon at :param user: The username to connect with :param passwd: The password to connect with :returns: A new marathon.MarathonClient object""" log.info("Connecting to Marathon server at: %s", url) session = requests.Session() session.headers.update({'User-Agent': get_user_agent()}) return MarathonClient(url, user, passwd, timeout=30, session=session)
def post_spool(self, task, status): spool_url = self.spool_url(task) if spool_url is not None: resp = requests.post( self.spool_url(task), data={ 'status': status, 'expiration': time.time() + self.expiration, 'reason': 'Drained by Paasta', }, headers={'User-Agent': get_user_agent()}, ) resp.raise_for_status()
def _request(self, method, url, data): headers = {"User-Agent": get_user_agent()} kwargs = {"url": urljoin(self.master_url, url), "headers": headers} if method == "GET": kwargs["params"] = data response = requests.get(**kwargs) elif method == "POST": kwargs["data"] = data response = requests.post(**kwargs) else: raise ValueError(f"Unrecognized method: {method}") return self._get_response_or_error(response)
async def issue_request(self, url_spec: UrlSpec, task: DrainTask) -> None: """Issue a request to the URL specified by url_spec regarding the task given.""" format_params = self.get_format_params(task) url = self.format_url(url_spec['url_format'], format_params) method = url_spec.get('method', 'GET').upper() async with aiohttp.ClientSession() as session: response = await session.request( method=method, url=url, headers={'User-Agent': get_user_agent()}, timeout=15, ) self.check_response_code(response.status, url_spec['success_codes'])
def retrieve_envoy_clusters( envoy_host: str, envoy_admin_port: int, envoy_admin_endpoint_format: str ) -> Dict[str, Any]: envoy_uri = envoy_admin_endpoint_format.format( host=envoy_host, port=envoy_admin_port, endpoint="clusters?format=json" ) # timeout after 1 second and retry 3 times envoy_admin_request = requests.Session() envoy_admin_request.headers.update({"User-Agent": get_user_agent()}) envoy_admin_request.mount("http://", requests.adapters.HTTPAdapter(max_retries=3)) envoy_admin_request.mount("https://", requests.adapters.HTTPAdapter(max_retries=3)) envoy_admin_response = envoy_admin_request.get(envoy_uri, timeout=1) return envoy_admin_response.json()
def _request(self, url, method=requests.get, **kwargs): headers = {'User-Agent': get_user_agent()} try: return method( urljoin(self.host, url), timeout=self.config["response_timeout"], headers=headers, **kwargs, ) except requests.exceptions.ConnectionError: raise exceptions.MasterNotAvailableException(MISSING_MASTER.format(self.host)) except requests.exceptions.TooManyRedirects: raise exceptions.MasterTemporarilyNotAvailableException( "Unable to connect to master at %s, likely due to an ongoing leader election" % self.host, )
def submit_performance_check_job(service, commit, image): performance_check_config = load_performance_check_config() payload = { 'service': service, 'commit': commit, 'submitter': get_username(), 'image': image, } r = requests.post( url=performance_check_config['endpoint'], data=payload, headers={'User-Agent': get_user_agent()} ) print "Posted a submission to the PaaSTA performance-check service:" print r.text
def _request(self, method, url, data): headers = {'User-Agent': get_user_agent()} kwargs = { 'url': urljoin(self.master_url, url), 'headers': headers, } if method == 'GET': kwargs['params'] = data response = requests.get(**kwargs) elif method == 'POST': kwargs['data'] = data response = requests.post(**kwargs) else: raise ValueError(f'Unrecognized method: {method}') return self._get_response_or_error(response)
async def fetch(self, url, **kwargs) -> aiohttp.ClientResponse: headers = {"User-Agent": get_user_agent()} async with aiohttp.ClientSession( conn_timeout=self.config["response_timeout"], read_timeout=self.config["response_timeout"], ) as session: try: async with session.get( urljoin(self.host, url), headers=headers, **kwargs ) as response: await response.text() return response except aiohttp.ClientConnectionError: raise exceptions.SlaveDoesNotExist( f"Unable to connect to the slave at {self.host}" )
def issue_request(self, url_spec, task): """Issue a request to the URL specified by url_spec regarding the task given.""" format_params = self.get_format_params(task) url = self.format_url(url_spec["url_format"], format_params) method = url_spec.get("method", "GET").upper() requests_func = { "GET": requests.get, "POST": requests.post, "PUT": requests.put, "PATCH": requests.patch, "DELETE": requests.delete, "OPTIONS": requests.options, "HEAD": requests.head, }[method] resp = requests_func(url, headers={"User-Agent": get_user_agent()}) self.check_response_code(resp.status_code, url_spec["success_codes"])
def issue_request(self, url_spec, task): """Issue a request to the URL specified by url_spec regarding the task given.""" format_params = self.get_format_params(task) url = self.format_url(url_spec['url_format'], format_params) method = url_spec.get('method', 'GET').upper() requests_func = { 'GET': requests.get, 'POST': requests.post, 'PUT': requests.put, 'PATCH': requests.patch, 'DELETE': requests.delete, 'OPTIONS': requests.options, 'HEAD': requests.head, }[method] resp = requests_func(url, headers={'User-Agent': get_user_agent()},) self.check_response_code(resp.status_code, url_spec['success_codes'])
def get_local_slave_state(): """Fetches mesos slave state and returns it as a dict.""" hostname = socket.getfqdn() stats_uri = 'http://%s:%s/state' % (hostname, MESOS_SLAVE_PORT) try: headers = {'User-Agent': get_user_agent()} response = requests.get(stats_uri, timeout=10, headers=headers) if response.status_code == 404: fallback_stats_uri = 'http://%s:%s/state.json' % (hostname, MESOS_SLAVE_PORT) response = requests.get(fallback_stats_uri, timeout=10, headers=headers) except requests.ConnectionError as e: raise MesosSlaveConnectionError( 'Could not connect to the mesos slave to see which services are running\n' 'on %s. Is the mesos-slave running?\n' 'Error was: %s\n' % (e.request.url, e.message) ) response.raise_for_status() return json.loads(response.text)
async def get_spool(self, task: DrainTask) -> SpoolInfo: """Query hacheck for the state of a task, and parse the result into a dictionary.""" spool_url = self.spool_url(task) if spool_url is None: return None # TODO: aiohttp says not to create a session per request. Fix this. async with aiohttp.ClientSession( conn_timeout=HACHECK_CONN_TIMEOUT, read_timeout=HACHECK_READ_TIMEOUT, ) as session: response = await session.get( self.spool_url(task), headers={'User-Agent': get_user_agent()}, ) if response.status == 200: return { 'state': 'up', } regex = ''.join([ "^", r"Service (?P<service>.+)", r" in (?P<state>.+) state", r"(?: since (?P<since>[0-9.]+))?", r"(?: until (?P<until>[0-9.]+))?", r"(?:: (?P<reason>.*))?", "$", ]) response_text = await response.text() match = re.match(regex, response_text) groupdict = match.groupdict() info: SpoolInfo = {} info['service'] = groupdict['service'] info['state'] = groupdict['state'] if 'since' in groupdict: info['since'] = float(groupdict['since'] or 0) if 'until' in groupdict: info['until'] = float(groupdict['until'] or 0) if 'reason' in groupdict: info['reason'] = groupdict['reason'] return info
async def post_spool(self, task: DrainTask, status: str) -> None: spool_url = self.spool_url(task) if spool_url is not None: data: Dict[str, str] = {'status': status} if status == 'down': data.update({ 'expiration': str(time.time() + self.expiration), 'reason': 'Drained by Paasta', }) async with aiohttp.ClientSession( conn_timeout=HACHECK_CONN_TIMEOUT, read_timeout=HACHECK_READ_TIMEOUT, ) as session: async with session.post( self.spool_url(task), data=data, headers={'User-Agent': get_user_agent()}, ) as resp: resp.raise_for_status()
async def _request( self, url: str, method: str='GET', cached: bool=False, **kwargs, ) -> aiohttp.ClientResponse: headers = {'User-Agent': get_user_agent()} if cached and self.config.get("use_mesos_cache", False): # TODO: fall back to original host if this fails? host = self.cache_host else: host = self.host try: async with aiohttp.ClientSession( conn_timeout=self.config["response_timeout"], read_timeout=self.config["response_timeout"], ) as session: async with session.request( method=method, url=urljoin(host, url), headers=headers, **kwargs, ) as resp: # if nobody awaits resp.text() or resp.json() before we exit the session context manager, then the # http connection gets closed before we read the response; then later calls to resp.text/json will # fail. await resp.text() return resp except aiohttp.client_exceptions.ClientConnectionError: raise exceptions.MasterNotAvailableException( MISSING_MASTER.format(host), ) except aiohttp.client_exceptions.TooManyRedirects: raise exceptions.MasterTemporarilyNotAvailableException( ( "Unable to connect to master at %s, likely due to " "an ongoing leader election" ) % host, )
def get_local_slave_state(): """Fetches mesos slave state and returns it as a dict.""" hostname = socket.getfqdn() stats_uri = 'http://%s:%s/state' % (hostname, MESOS_SLAVE_PORT) try: headers = {'User-Agent': get_user_agent()} response = requests.get(stats_uri, timeout=10, headers=headers) if response.status_code == 404: fallback_stats_uri = 'http://%s:%s/state.json' % (hostname, MESOS_SLAVE_PORT) response = requests.get(fallback_stats_uri, timeout=10, headers=headers) except requests.ConnectionError as e: raise MesosSlaveConnectionError( 'Could not connect to the mesos slave to see which services are running\n' 'on %s. Is the mesos-slave running?\n' 'Error was: %s\n' % (e.request.url, e.message)) response.raise_for_status() return json.loads(response.text)
async def issue_request(self, url_spec: UrlSpec, task: DrainTask) -> None: """Issue a request to the URL specified by url_spec regarding the task given.""" format_params = self.get_format_params(task) urls = [ self.format_url(url_spec["url_format"], param) for param in format_params ] method = url_spec.get("method", "GET").upper() async with aiohttp.ClientSession() as session: reqs = [ session.request( method=method, url=url, headers={"User-Agent": get_user_agent()}, timeout=15, ) for url in urls ] res = await asyncio.gather(*reqs) for response in res: self.check_response_code(response.status, url_spec["success_codes"])
def issue_request(self, url_spec, task): """Issue a request to the URL specified by url_spec regarding the task given.""" format_params = self.get_format_params(task) url = self.format_url(url_spec['url_format'], format_params) method = url_spec.get('method', 'GET').upper() requests_func = { 'GET': requests.get, 'POST': requests.post, 'PUT': requests.put, 'PATCH': requests.patch, 'DELETE': requests.delete, 'OPTIONS': requests.options, 'HEAD': requests.head, }[method] resp = requests_func( url, headers={'User-Agent': get_user_agent()}, ) self.check_response_code(resp.status_code, url_spec['success_codes'])
def get_local_slave_state(hostname=None): """Fetches mesos slave state and returns it as a dict. :param hostname: The host from which to fetch slave state. If not specified, defaults to the local machine.""" if hostname is None: hostname = socket.getfqdn() stats_uri = f'http://{hostname}:{MESOS_SLAVE_PORT}/state' try: headers = {'User-Agent': get_user_agent()} response = requests.get(stats_uri, timeout=10, headers=headers) if response.status_code == 404: fallback_stats_uri = f'http://{hostname}:{MESOS_SLAVE_PORT}/state.json' response = requests.get(fallback_stats_uri, timeout=10, headers=headers) except requests.ConnectionError as e: raise MesosSlaveConnectionError( 'Could not connect to the mesos slave to see which services are running\n' 'on %s. Is the mesos-slave running?\n' 'Error was: %s\n' % (e.request.url, str(e)), ) response.raise_for_status() return json.loads(response.text)
def retrieve_haproxy_csv(synapse_host, synapse_port, synapse_haproxy_url_format): """Retrieves the haproxy csv from the haproxy web interface :param synapse_host_port: A string in host:port format that this check should contact for replication information. :returns reader: a csv.DictReader object """ synapse_uri = synapse_haproxy_url_format.format(host=synapse_host, port=synapse_port) # timeout after 1 second and retry 3 times haproxy_request = requests.Session() haproxy_request.headers.update({'User-Agent': get_user_agent()}) haproxy_request.mount('http://', requests.adapters.HTTPAdapter(max_retries=3)) haproxy_request.mount('https://', requests.adapters.HTTPAdapter(max_retries=3)) haproxy_response = haproxy_request.get(synapse_uri, timeout=1) haproxy_data = haproxy_response.text reader = csv.DictReader(haproxy_data.splitlines()) return reader
def retrieve_haproxy_csv(synapse_host, synapse_port, synapse_haproxy_url_format): """Retrieves the haproxy csv from the haproxy web interface :param synapse_host_port: A string in host:port format that this check should contact for replication information. :returns reader: a csv.DictReader object """ synapse_uri = synapse_haproxy_url_format.format(host=synapse_host, port=synapse_port) # timeout after 1 second and retry 3 times haproxy_request = requests.Session() haproxy_request.headers.update({'User-Agent': get_user_agent()}) haproxy_request.mount( 'http://', requests.adapters.HTTPAdapter(max_retries=3)) haproxy_request.mount( 'https://', requests.adapters.HTTPAdapter(max_retries=3)) haproxy_response = haproxy_request.get(synapse_uri, timeout=1) haproxy_data = haproxy_response.text reader = csv.DictReader(haproxy_data.splitlines()) return reader
def _request(self, url, method=requests.get, cached=False, **kwargs): headers = {'User-Agent': get_user_agent()} if cached and self.config.get("use_mesos_cache", False): # TODO: fall back to original host if this fails? host = self.cache_host else: host = self.host try: return method( urljoin(host, url), timeout=self.config["response_timeout"], headers=headers, **kwargs, ) except requests.exceptions.ConnectionError: raise exceptions.MasterNotAvailableException( MISSING_MASTER.format(host), ) except requests.exceptions.TooManyRedirects: raise exceptions.MasterTemporarilyNotAvailableException( ("Unable to connect to master at %s, likely due to " "an ongoing leader election") % host, )
def retrieve_haproxy_csv( synapse_host: str, synapse_port: int, synapse_haproxy_url_format: str, scope: str ) -> Iterable[Dict[str, str]]: """Retrieves the haproxy csv from the haproxy web interface :param synapse_host: A host that this check should contact for replication information. :param synapse_port: A integer that this check should contact for replication information. :param synapse_haproxy_url_format: The format of the synapse haproxy URL. :param scope: scope :returns reader: a csv.DictReader object """ synapse_uri = synapse_haproxy_url_format.format( host=synapse_host, port=synapse_port, scope=scope ) # timeout after 1 second and retry 3 times haproxy_request = requests.Session() haproxy_request.headers.update({"User-Agent": get_user_agent()}) haproxy_request.mount("http://", requests.adapters.HTTPAdapter(max_retries=3)) haproxy_request.mount("https://", requests.adapters.HTTPAdapter(max_retries=3)) haproxy_response = haproxy_request.get(synapse_uri, timeout=1) haproxy_data = haproxy_response.text reader = csv.DictReader(haproxy_data.splitlines()) return reader
def get_spool(self, task): """Query hacheck for the state of a task, and parse the result into a dictionary.""" spool_url = self.spool_url(task) if spool_url is None: return None response = requests.get( self.spool_url(task), headers={'User-Agent': get_user_agent()}, timeout=HACHECK_TIMEOUT, ) if response.status_code == 200: return { 'state': 'up', } regex = ''.join([ "^", r"Service (?P<service>.+)", r" in (?P<state>.+) state", r"(?: since (?P<since>[0-9.]+))?", r"(?: until (?P<until>[0-9.]+))?", r"(?:: (?P<reason>.*))?", "$", ]) match = re.match(regex, response.text) groupdict = match.groupdict() info = {} info['service'] = groupdict['service'] info['state'] = groupdict['state'] if 'since' in groupdict: info['since'] = float(groupdict['since'] or 0) if 'until' in groupdict: info['until'] = float(groupdict['until'] or 0) if 'reason' in groupdict: info['reason'] = groupdict['reason'] return info
async def get_spool(spool_url: str) -> SpoolInfo: """Query hacheck for the state of a task, and parse the result into a dictionary.""" if spool_url is None: return None # TODO: aiohttp says not to create a session per request. Fix this. async with aiohttp.ClientSession(timeout=HACHECK_TIMEOUT) as session: async with session.get(spool_url, headers={"User-Agent": get_user_agent()}) as response: if response.status == 200: return {"state": "up"} regex = "".join([ "^", r"Service (?P<service>.+)", r" in (?P<state>.+) state", r"(?: since (?P<since>[0-9.]+))?", r"(?: until (?P<until>[0-9.]+))?", r"(?:: (?P<reason>.*))?", "$", ]) response_text = await response.text() match = re.match(regex, response_text) groupdict = match.groupdict() info: SpoolInfo = {} info["service"] = groupdict["service"] info["state"] = groupdict["state"] if "since" in groupdict: info["since"] = float(groupdict["since"] or 0) if "until" in groupdict: info["until"] = float(groupdict["until"] or 0) if "reason" in groupdict: info["reason"] = groupdict["reason"] return info
def get_json_body_from_service(host, port, endpoint, timeout=2): return requests.get( 'http://%s:%s/%s' % (host, port, endpoint), headers={'User-Agent': get_user_agent()}, timeout=timeout, ).json()
async def get_json_body_from_service(host, port, endpoint, session): async with session.get(f"http://{host}:{port}/{endpoint}", headers={"User-Agent": get_user_agent()}) as response: return await response.json()
def get_json_body_from_service(host, port, endpoint): return requests.get( 'http://%s:%s/%s' % (host, port, endpoint), headers={'User-Agent': get_user_agent()}, ).json()
def get_json_body_from_service(host, port, endpoint, timeout=2): return requests.get( f'http://{host}:{port}/{endpoint}', headers={'User-Agent': get_user_agent()}, timeout=timeout, ).json()