def main(): args = create_parser().parse_args() grafana_url = args.grafana_url login = args.login password = args.password folder_id = args.folder_id save_path = args.save_path search_url = grafana_url + "search?folderIds={}".format(folder_id) r = requests.get(url=search_url, auth=auth.HTTPBasicAuth(login, password)) if r.ok: dashboards_data = r.json() for dashboard in dashboards_data: title = dashboard.get('title') uid = dashboard.get('uid') dashboard_uid_url = grafana_url + 'dashboards/uid/{}'.format(uid) request_dashboard = requests.get(url=dashboard_uid_url, auth=auth.HTTPBasicAuth( login, password)) if request_dashboard.ok: dashboard_data = request_dashboard.json() print(title, ' save') with open(os.path.join(save_path, title + '.json'), 'w', encoding='utf-8') as f: json.dump(dashboard_data.get('dashboard'), f)
def __init__(self, BaseURL=None, username=None, password=None): """ Channel finder client object. It provides a connection object to perform the following operations: - find: find all channels satisfying given searching criteria - set: add channel into service - update: update channel information - delete: delete channel from service :param BaseURL: the url of the channel finder service :param username: user name authorized by channel finder service :param password: password for the authorized user """ try: self.__baseURL = self.__getDefaultConfig('BaseURL', BaseURL) self.__userName = self.__getDefaultConfig('username', username) self.__password = self.__getDefaultConfig('password', password) #self.__userName = "******" #self.__password = "******" if self.__userName and self.__password: self.__auth = auth.HTTPBasicAuth(self.__userName, self.__password) else: self.__auth = None self.__session = requests.Session() self.__session.mount(self.__baseURL, HTTPAdapter()) except: raise RuntimeError('Failed to create client to ' + self.__baseURL)
def test_request_generation_and_basic_auth(self, req_get_mock): self.rpc.call('method', {'param': 'value'}) req_get_mock.assert_called_once_with(url='http://test', auth=auth.HTTPBasicAuth( "me", "team"), json=mock.ANY)
def create_bot(ticker: str, name: str, client_id: str, token: str, is_crypto: bool) -> bool: ''' Create a new bot instance Returns a container instance of the bot ''' data = { "ticker": ticker, "name": name, "crypto": is_crypto, "frequency": 90, "discord_bot_token": token } resp = post(getenv("URL") + '/ticker', auth=auth.HTTPBasicAuth(getenv('AUTH_USER'), getenv('AUTH_PASS')), data=dumps(data)) if resp.status_code == 200: change_bot_username(token, name) return True else: logging.error(str(resp.status_code) + " " + resp.text) log(resp.text) return False
def __init__(self, jenkins_url, username, password): self.jenkins_url = jenkins_url self.jenkins_job_url = self.jenkins_url + "/job" self.username = username self.password = password self.auth = rauth.HTTPBasicAuth(self.username, self.password) self.session = rsession()
def update(self): self.api = ProofPointAPI(auth=auth.HTTPBasicAuth( self.config['api_user'], self.config['api_password'])) self.siem_api = SIEMAPI(self.api) self.campaign_api = CampaignAPI(self.api) self.forensic_api = ForensicsAPI(self.api) # pull events for the period we didn't run time_period = self.siem_api.make_time_param( frequency=self.default_values['frequency']) messages = self.siem_api.get_messages_blocked(time_period) log.info("Downloaded {nb} message threats for the last period".format( nb=len(messages['messagesBlocked']))) # But efficient processing calls for getting all Threatid for all event. # then grouping event metadata for each threatid # then creating entries for that threatid, and these metadata. # That is the opposite of creating threatid for each event. # parse all messages to identify all unique threats threats = self._get_all_threats(messages['messagesBlocked']) for threat in threats: # group multiple email events associated to one threat events = self._get_messages_for_threat(messages['messagesBlocked'], threat) self.analyze({'threat': threat, 'events': events}) return
def handle_401(self, r, **kwargs): """Resends a request with auth headers, if needed.""" www_authenticate = r.headers.get('www-authenticate', '').lower() if 'basic' in www_authenticate: if self.pos is not None: r.request.body.seek(self.pos) # Consume content and release the original connection # to allow our new request to reuse the same one. r.content r.raw.release_conn() prep = r.request.copy() if not hasattr(prep, '_cookies'): prep._cookies = cookies.RequestsCookieJar() cookies.extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) self.auth = auth.HTTPBasicAuth(self.username, self.password) prep = self.auth(prep) _r = r.connection.send(prep, **kwargs) _r.history.append(r) _r.request = prep return _r if 'digest' in www_authenticate: self.auth = auth.HTTPDigestAuth(self.username, self.password) # Digest auth would resend the request by itself. We can take a # shortcut here. return self.auth.handle_401(r, **kwargs)
def status(self): try: if not self.url.lower().endswith( '/server-status') or not self.url.lower().endswith( '/server-status?json'): self.url = parse.urljoin(self.url, "server-status?json") if self.auth_type == "direct": res = requests.get(self.url, timeout=self.timeout) else: res = requests.get(self.url, auth=auth.HTTPBasicAuth( self.username, self.password), timeout=self.timeout) res.raise_for_status() values = dict() values['bytes_per_sec'] = res.json()['TrafficAverage5s'] values['requests_per_sec'] = res.json()['RequestAverage5s'] values['busy_servers'] = res.json()['BusyServers'] / ( res.json()['BusyServers'] + res.json()['IdleServers']) * 100 // 1 values['uptime'] = res.json()['Uptime'] return values except (requests.exceptions.ConnectionError, requests.exceptions.MissingSchema, requests.exceptions.HTTPError) as err: raise ConnectionError(err)
def __init__(self, user: str, password: str, hostname='tristano.neuro.polymtl.ca', port=80): self.hostname = hostname self.port = port self.http_auth = auth.HTTPBasicAuth(username=user, password=password)
def test_get_podm_status_Online(self, mock_get): fake_resp = fakes.mock_request_get({}, http_client.OK) mock_get.return_value = fake_resp self.assertEqual(redfish.pod_status('url', 'username', 'password'), constants.PODM_STATUS_ONLINE) mock_get.asset_called_once_with('url', auth=auth.HTTPBasicAuth( 'username', 'password'))
def test_get_podm_status_Offline_by_wrong_auth(self, mock_get): fake_resp = fakes.mock_request_get({}, 401) mock_get.return_value = fake_resp self.assertEqual(redfish.pod_status('url', 'username', 'password'), constants.PODM_STATUS_OFFLINE) mock_get.asset_called_once_with('url', auth=auth.HTTPBasicAuth( 'username', 'password'))
def fetch(self, path): url = 'http://localhost:%d/%s' % (self.port, path) if self._secure: request = requests.get(url, auth=auth.HTTPBasicAuth('logs', 'logs')) else: request = requests.get(url) return request.content
def __init__(self, wsurl, user=None, pwd=None, idlength=6): self.idlength = idlength if (user or pwd): session = Session() session.auth = auth.HTTPBasicAuth(user, pwd) self.sclient = zeep.Client( wsdl=wsurl, transport=zeep.Transport(session=session)) else: self.sclient = zeep.Client(wsdl=wsurl)
def __init__(self, manager, auth_username, auth_password, version): #TODO(alazarev) make port configurable (bug #1262895) port = '9443' self.session = manager.remote().get_http_client(port, max_retries=10) self.base_url = ('https://%s:%s/restapi/intelcloud/api/%s' % (manager.management_ip, port, version)) LOG.debug("Connecting to manager with URL of %s", self.base_url) self.auth = auth.HTTPBasicAuth(auth_username, auth_password)
def __init__(self, apiKeyID, apiSecret, apiAddress='https://api.dome9.com', apiVersion='v2'): self.apiKeyID = apiKeyID self.apiSecret = apiSecret self.apiAddress = apiAddress self.apiVersion = '/{}/'.format(apiVersion) self.baseAddress = self.apiAddress + self.apiVersion self.clientAuth = auth.HTTPBasicAuth(self.apiKeyID, self.apiSecret) self.restHeaders = {'Accept': 'application/json', 'Content-Type': 'application/json'} if not self.apiKeyID or not self.apiSecret: raise Exception('Cannot create api client instance without keyID and secret!')
def get_access_token_from_api(self): r = requests.post(self.host.rstrip("/") + "/" + str(self.version) + "/token", auth=auth.HTTPBasicAuth(self.key, self.secret), data={"grant_type": "client_credentials"}) if r.status_code != 200: # FIXME error handling raise Exception("Error fetching token: " + str(r.status_code)) return r.json()
def get_auth(self): """ :return: """ if self._username and self._password: return auth.HTTPBasicAuth(self._username, self._password) if self._token: return HTTPTokenAuth(self._token) return None
def pod_status(pod_url, username, password): try: resp = requests.get(pod_url, auth=auth.HTTPBasicAuth(username, password)) if resp.status_code == http_client.OK: return constants.PODM_STATUS_ONLINE else: return constants.PODM_STATUS_OFFLINE except requests.RequestException: return constants.PODM_STATUS_OFFLINE
def get(url="http://localhost:20001", api_prefix="/kiali/api", username=None, password=None): basic_auth = None if username and password: basic_auth = auth.HTTPBasicAuth(username, password) connector = client.APIConnector(url, api_prefix=api_prefix, auth=basic_auth) return KialiClient(connector)
def build_markdown(self, file_path: str) -> str: with open(os.path.join(self.source_dir, file_path)) as file_: text = file_.read() response = requests.post( "https://api.github.com/markdown", json={"text": text}, headers={"Accept": "application/vnd.github.v3+json"}, auth=auth.HTTPBasicAuth("mmEissen", self.gh_token), ) response.raise_for_status() return str(response.content, "utf-8")
def register_collection(image_collection): authorisation = auth.HTTPBasicAuth(settings.DLCS_API_KEY, settings.DLCS_API_SECRET) url = settings.DLCS_ENTRY + 'customers/' + str( settings.DLCS_CUSTOMER_ID) + '/queue' json = image_collection.to_json_dict() response = post(url, json=json, auth=authorisation) batch = Batch(response.json()) return batch
def auth(self): client_auth = rauth.HTTPBasicAuth(self.data['id'], self.data['secret']) post_data = {"grant_type": "password", "username": self.data['username'], "password": self.password} headers = {"User-Agent": "Ortho Fill Bot by ortho-fill-bot"} self.auth_response = rpost("https://www.reddit.com/api/v1/access_token", auth=client_auth, data=post_data, headers=headers) response = jloads(self.auth_response.content) if 'access_token' in response: self.access_token = response['access_token'] return True else: return False
async def _send_http_request(self, url, payload, method='post', **kwargs): # type: (Text, Optional[Text], Text, dict) -> Response """ Sends the actual HTTP request. Split into its own method so that it can be mocked during unit tests. """ kwargs.setdefault( 'timeout', self.timeout if self.timeout else get_default_timeout(), ) if self.authentication: kwargs.setdefault('auth', auth.HTTPBasicAuth(*self.authentication)) self._log( level=DEBUG, message='Sending {method} to {url}: {payload!r}'.format( method=method, payload=payload, url=url, ), context={ 'request_method': method, 'request_kwargs': kwargs, 'request_payload': payload, 'request_url': url, }, ) response = await requests_async.request(method=method, url=url, data=payload, **kwargs) self._log( level=DEBUG, message='Receiving {method} from {url}: {response!r}'.format( method=method, response=response.content, url=url, ), context={ 'request_method': method, 'request_kwargs': kwargs, 'request_payload': payload, 'request_url': url, 'response_headers': response.headers, 'response_content': response.content, }, ) return response
def init_app(self, app): self.enabled = _get_required_value(app, C.TRACKING_API_ENABLED) if self.enabled: self.url_base = _get_required_value(app, C.TRACKING_API_URL_BASE) self.export_path = _get_required_value(app, C.TRACKING_API_EXPORT_PATH) self.auth_user = _get_required_value(app, C.TRACKING_API_AUTH_USER) self.auth_pass = _get_required_value(app, C.TRACKING_API_AUTH_PASS) self.timeout = _get_required_value(app, C.TRACKING_API_TIMEOUT) self._export_url = '/'.join([self.url_base, self.export_path]) self._auth = auth.HTTPBasicAuth(self.auth_user, self.auth_pass)
def recurse(subreddit, hot_list=[], after=None): """ Recursive query system for Reddit API hot posts """ # Generates token & bearer for requests to oauth.reddit.com endpoints # https://github.com/reddit-archive/reddit/wiki/OAuth2-Quick-Start-Example client_id = "i_m3aNLQE0vskA" secret = "UykV3Qz2TGSWSWaMmSeOWgR5JT4" my_username = "******" my_pwd = "Holberton98" client_auth = ra.HTTPBasicAuth(client_id, secret) post_data = { "grant_type": "password", "username": my_username, "password": my_pwd } headers = {"User-Agent": "ChangeMeClient/0.1 by {}".format(my_username)} response = r.post("https://www.reddit.com/api/v1/access_token", auth=client_auth, data=post_data, headers=headers) token = response.json().get("access_token") bearer = response.json().get("token_type") # Queries oauth.reddit.com endpoints using token & bearer # Sets limit on number of items returned in params variable sub = subreddit sub_url = "https://oauth.reddit.com/r/{}/hot".format(sub) headers = { "Authorization": "{} {}".format(bearer, token), "User-Agent": "ChangeMeClient/0.1 by {}".format(my_username) } params = {'limit': 100, 'after': after} response = r.get(sub_url, headers=headers, params=params) # handles error response; invalid subreddit if response.status_code is not 200: return None # peels the onion of nested dicts and lists else: response_json = response.json().get('data').get('children') for subdict in response_json: hot_list.append(subdict.get('data').get('title')) if response.json().get('data').get('after') is not None: after = response.json().get('data').get('after') return recurse(subreddit, hot_list, after) else: return hot_list
def set_auth(self, username: Union[None, str] = None, password: Union[None, str] = None) -> None: """Set up auth type. Now supports only basic authentication. :param username: user's name :type username: str :param password: user's password :type password: str """ self.auth = auth.HTTPBasicAuth(username, password)
def post_data(): api_log_file = (get_latest_file()) print(api_log_file) with open(api_log_file, 'r') as f: lines = f.readlines() # GET the last index of api logs lastDBRecord = get( 'http://elasticsearch.member.id/payment_logs/api/_search?from=0&size=1', auth=auth.HTTPBasicAuth('admin', 'password')) lastDBRecord = json.loads(lastDBRecord.text) try: dbIndex = int(lastDBRecord['hits'] ['total']) # index of last elasticsearch record # Get the last timestamp of the last elasticsearch record lastData = get('http://elasticsearch.member.id/payment_logs/api/' + str(dbIndex), auth=auth.HTTPBasicAuth('admin', 'password')) lastData = json.loads(lastData.text) lastTimestamp = lastData['_source']['timestamp'] except Exception as e: dbIndex = 1 print(e) dataIndex = binary_search(lines, lastTimestamp, 0, len(lines) - 1) + 1 print(dbIndex, dataIndex, lastTimestamp) # Insert all new data into elasticsearch for line in lines[dataIndex:]: data = json.loads(line.strip('\n')) URL = 'http://elasticsearch.member.id/payment_logs/api/' + str( dbIndex) response = post(URL, auth=auth.HTTPBasicAuth('admin', 'password'), json=data) print(response.text) dbIndex += 1
def _is_tenant_created(self, tenant_id): url = self._convert2ascii(self._policy_url % {'tenant_id': tenant_id}) r = requests.request('get', url=url, headers=self._headers, auth=auth.HTTPBasicAuth(self._username, self._password)) if r.status_code == 200: return True elif r.status_code == 404: return False else: r.raise_for_status()
def __init__(self, instance, port="8080", **kwargs): kwargs.setdefault("username", "admin") kwargs.setdefault("password", "admin") self._port = port self._base_url = "http://{host}:{port}/api/v1".format( host=instance.management_ip, port=port) self._instance = instance self._http_client = instance.remote().get_http_client(port) self._headers = {"X-Requested-By": "sahara"} self._auth = auth.HTTPBasicAuth(kwargs["username"], kwargs["password"]) self._default_client_args = {"verify": False, "auth": self._auth, "headers": self._headers}
def pollRunningScan(scanId, url, userId, passwordToken, pollInterval, sevThreshold): # /api/1.0/scans/detail/{id} getScanEndpoint = url + "scans/detail/%s" % scanId getScanHeaders = {"Accept": "application/json"} getScanAuth = auth.HTTPBasicAuth(userId, passwordToken) getRunningScanResponse = get(url=getScanEndpoint, headers=getScanHeaders, auth=getScanAuth) if getRunningScanResponse.status_code is not 200: sys.stderr.write( "[ERROR] Retrieval of scan by id returned a status code of %s indicating: %s\n" % (getRunningScanResponse.status_code, getRunningScanResponse.text)) sys.exit(1) #Poll scan /api/1.0/scans/detail/{id} getRunningScanPayload = json.loads(getRunningScanResponse.text) ##WebsiteName #InitiatedAt #WebsiteUrl #Id #FailureReason #FailureReasonDescription #FailureReasonString #IsCompleted #Percentage if getRunningScanPayload["IsCompleted"] is True: if getRunningScanPayload['FailureReason']: sys.stderr.write( '[ERROR] Scan finished with errors: %s\n%s\n' % (getRunningScanPayload['FailureReasonDescription'], getRunningScanPayload['FailureReasonString'])) return 1 if getRunningScanPayload['State'] != 'Complete': sys.stderr.write( '[ERROR] Scan finished with an non-completion state of %s.\n' % getRunningScanPayload['State']) return 1 if handleErrorThresholds(getRunningScanPayload, sevThreshold): return 2 successUrl = url.replace('/api/1.0', '') + 'scans/report/' + scanId sys.stdout.write( '[SUCCESS] Scan finished successfully. View report at %s\n' % successUrl) return 0 #Netsparker will sometimes give erroneous percentages that backtrack from previous polls! sys.stdout.write('[API] Scan %s is %s%% complete..\r' % (scanId, getRunningScanPayload['Percentage'])) sys.stdout.flush() time.sleep(pollInterval) return pollRunningScan(scanId, url, userId, passwordToken, pollInterval, sevThreshold)