def test_refresh_invalid_response(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') tokens.manage('mytok', ['myscope']) tokens.start() # this does not do anything.. response = MagicMock() response.json.return_value = {'foo': 'bar'} post = MagicMock() post.return_value = response monkeypatch.setattr('requests.post', post) monkeypatch.setattr('tokens.read_credentials', lambda path: (VALID_USER_JSON, VALID_CLIENT_JSON)) with pytest.raises(tokens.InvalidTokenResponse) as exc_info: tokens.get('mytok') assert str(exc_info.value) == """Invalid token response: Expected a JSON object with keys "expires_in" and "access_token": 'expires_in'""" # verify that we use a proper HTTP timeout.. post.assert_called_with('https://example.org', data={'username': '******', 'scope': 'myscope', 'password': '******', 'grant_type': 'password'}, headers={'User-Agent': 'python-tokens/{}'.format(tokens.__version__)}, timeout=(1.25, 2.25), auth=('cid', 'sec')) response.json.return_value = {'access_token': '', 'expires_in': 100} with pytest.raises(tokens.InvalidTokenResponse) as exc_info: tokens.get('mytok') assert str(exc_info.value) == 'Invalid token response: Empty "access_token" value'
def test_get_refresh_failure_ignore_expiration_no_access_token( monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: json.dump( { 'application_username': '******', 'application_password': '******' }, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) exc = Exception('FAIL') response = MagicMock() response.raise_for_status.side_effect = exc monkeypatch.setattr('requests.post', lambda url, **kwargs: response) # we never got any access token tokens.TOKENS = { 'mytok': { 'ignore_expiration': True, 'scopes': ['myscope'], # expired a long time ago.. 'expires_at': 0 } } with pytest.raises(Exception) as exc_info: tokens.get('mytok') assert exc_info.value == exc
def test_get_refresh_failure(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: json.dump({'application_username': '******', 'application_password': '******'}, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) exc = Exception('FAIL') response = MagicMock() response.raise_for_status.side_effect = exc monkeypatch.setattr('requests.post', lambda url, **kwargs: response) logger = MagicMock() monkeypatch.setattr('tokens.logger', logger) tokens.TOKENS = {'mytok': {'access_token': 'oldtok', 'scopes': ['myscope'], # token is still valid for 10 minutes 'expires_at': time.time() + (10 * 60)}} tok = tokens.get('mytok') assert tok == 'oldtok' logger.warn.assert_called_with('Failed to refresh access token "%s" (but it is still valid): %s', 'mytok', exc) tokens.TOKENS = {'mytok': {'scopes': ['myscope'], 'expires_at': 0}} with pytest.raises(Exception) as exc_info: tok = tokens.get('mytok') assert exc_info.value == exc
def test_get(): tokens.TOKENS = { 'test': { 'access_token': 'mytok123', 'expires_at': time.time() + 3600 } } tokens.get('test')
def test_read_from_file_fail(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), from_file_only=True) tokens.manage('mytok') with pytest.raises(tokens.InvalidCredentialsError) as exc_info: tokens.get('mytok') assert str( exc_info.value ) == 'Invalid OAuth credentials: Failed to read token "mytok" from {}.'.format( str(tmpdir))
def to_tree(sequence: list): root = Tree(tokens.get(sequence[0])) for code in sequence[1:]: node = Tree(tokens.get(code)) saturated = not root.append(node) if saturated: break return root
def test_refresh_invalid_credentials(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') tokens.manage('mytok', ['myscope']) tokens.start() # this does not do anything.. with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: # missing password json.dump({'application_username': '******'}, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) with pytest.raises(tokens.InvalidCredentialsError) as exc_info: tokens.get('mytok') assert str(exc_info.value) == "Invalid OAuth credentials: Missing key: 'application_password'"
def evaluate(self, filepath): policy_paths = [] # (path) policies = [] # (average policy) stepsize = 10 iter = os.scandir(filepath) for entry in iter: if entry.name.endswith('.pt'): policy_paths.append(filepath + '/' + entry.name) names = [tokens.get(id).name for id in tokens.possibilities()] for i, path in enumerate(policy_paths): if i % stepsize == 0: policy = self._policy_average(path) with open('{}.txt'.format(path[:-2]), 'w', encoding="utf-8") as file: string = '' for name, value in zip(names, policy): string += '{}: {}\n'.format(name, value) file.write(string) policies.append(policy) x = numpy.array(tokens.possibilities()) # token ids for all tokens y = numpy.array([p for p in policies]) # save single plots for i, policy in enumerate(y): figure = self._plot(x, policy, ['']) epoch = i * stepsize figure.savefig('{}/{}.png'.format(filepath, epoch)) # save multiplots every tenth policy legend = [str(epoch * stepsize) for epoch in range(len(policies))] figure = self._plot(x, y, legend) figure.savefig('{}/all_distributions.png'.format(filepath))
def get_nakadi_partitions(cg: str, et: str): token = tokens.get('pemetaan') settings = get_matching_mangan_settings(cg, et) headers = {'Authorization': 'Bearer ' + token} return_dict = {} for setting in settings[cg]: all_nakadi_event_types = get_all_event_types( setting['nakadi_endpoint']) event_type_group_dict = {} if all_nakadi_event_types: for nakadi_event_type in all_nakadi_event_types: if re.search(et, nakadi_event_type['name']): event_type_group_dict[nakadi_event_type['name']] = \ settings for event_type_key, event_type_value in event_type_group_dict.items(): r = requests.get(setting['nakadi_endpoint'] + '/event-types/' + event_type_key + '/partitions', headers=headers) partitions = json.loads(r.text) for partition in partitions: if partition['newest_available_offset'] == 'BEGIN': partition['newest_available_offset'] = '0' return_dict[event_type_key + partition['partition']] = { 'oldest_available_offset': int(partition['oldest_available_offset']), 'newest_available_offset': int(partition['newest_available_offset'])} return return_dict
def update_accounts(): r = requests.get(os.environ.get('HTTP_TEAM_SERVICE_URL') + '/api/accounts/aws', headers={'Authorization': 'Bearer {}'.format(tokens.get('tok'))}) r.raise_for_status() for a in r.json(): redis.set('accounts:' + a['id'], json.dumps(get_account_info(a))) ACCOUNTS.update({a['id']: get_account_info(a) for a in r.json()})
def __request(self, url, params=None, body=None): """Return json response""" if self.oauth2: self._headers.update({'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) try: if body is None: response = requests.get(url, params=params, timeout=self.timeout, headers=self._headers) if not response.ok: raise Exception('Elasticsearch query failed: {}'.format(url)) return response.json() else: response = requests.post(url, params=params, json=body, timeout=self.timeout, headers=self._headers) if not response.ok: raise Exception( 'Elasticsearch query failed: {} with response: {}'.format(url, json.dumps(response.text))) return response.json() except requests.Timeout: raise HttpError('timeout', self.url), None, sys.exc_info()[2] except requests.ConnectionError: raise HttpError('connection failed', self.url), None, sys.exc_info()[2] except Exception: raise
def __request(self, raise_error=True, post_data = None): if self.__r is not None: return self.__r if self.max_retries: s = requests.Session() s.mount('', HTTPAdapter(max_retries=self.max_retries)) else: s = requests base_url = self.url basic_auth = None url_parsed = urlparse.urlsplit(base_url) if url_parsed and url_parsed.username and url_parsed.password: base_url = base_url.replace("{0}:{1}@".format(urllib.quote(url_parsed.username), urllib.quote(url_parsed.password)), "") base_url = base_url.replace("{0}:{1}@".format(url_parsed.username, url_parsed.password), "") basic_auth = requests.auth.HTTPBasicAuth(url_parsed.username, url_parsed.password) self.clean_url = base_url if self.oauth2: self.headers.update({'Authorization':'Bearer {}'.format(tokens.get('uid'))}) try: if post_data is None: self.__r = s.get(base_url, params=self.params, timeout=self.timeout, verify=self.verify, headers=self.headers, auth = basic_auth) else: self.__r = s.post(base_url, params=self.params, timeout=self.timeout, verify=self.verify, headers=self.headers, auth = basic_auth, data=json.dumps(post_data)) except requests.Timeout, e: raise HttpError('timeout', self.clean_url), None, sys.exc_info()[2]
def test_read_from_file(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir)) with open(os.path.join(str(tmpdir), 'mytok-token-secret'), 'w') as fd: fd.write('my-access-token\n') tokens.manage('mytok') tok = tokens.get('mytok') assert tok == 'my-access-token'
def test_get_refresh_failure_ignore_expiration(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: json.dump( { 'application_username': '******', 'application_password': '******' }, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) exc = Exception('FAIL') response = MagicMock() response.raise_for_status.side_effect = exc monkeypatch.setattr('requests.post', lambda url, **kwargs: response) logger = MagicMock() monkeypatch.setattr('tokens.logger', logger) tokens.TOKENS = { 'mytok': { 'access_token': 'expired-token', 'ignore_expiration': True, 'scopes': ['myscope'], # expired a long time ago.. 'expires_at': 0 } } tok = tokens.get('mytok') assert tok == 'expired-token' logger.warn.assert_called_with( 'Failed to refresh access token "%s" (ignoring expiration): %s', 'mytok', exc)
def __init__(self, url=None, username=None, password=None, es_url=None, index_prefix=''): if not url: raise RuntimeError( 'AppDynamics plugin improperly configured. URL is required!') self.url = url self.es_url = es_url self.index_prefix = index_prefix self.__oauth2 = False self.__session = requests.Session() if not username or not password: self.__oauth2 = True self.__session.headers.update( {'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) else: self.__session.auth = (username, password) self.__session.headers.update({'User-Agent': get_user_agent()}) self.__session.params = {'output': 'json'} self.__session.timeout = 3
def update_accounts(): r = requests.get( os.environ.get('HTTP_TEAM_SERVICE_URL') + '/api/accounts/aws', headers={'Authorization': 'Bearer {}'.format(tokens.get('tok'))}) for a in r.json(): redis.set('accounts:' + a['id'], json.dumps(get_account_info(a))) ACCOUNTS.update({a['id']: get_account_info(a) for a in r.json()})
def get_groups(uid): token = tokens.get('uid') response = requests.get( GROUPS_URL.format(uid=uid), headers={'Authorization': 'Bearer {}'.format(token)}) response.raise_for_status() groups = [g for g in response.json() if not g.get('disabled')] return groups
def __call__(self, r): token = '' if self.on_taupage: token = tokens.get('costreport') else: token = self.token r.headers['Authorization'] = 'Bearer ' + token return r
def get_roles(username): token = tokens.get("uid") response = requests.get( GROUPS_URL.format(uid=username), headers={"Authorization": "Bearer {}".format(token)}, ) response.raise_for_status() return list(parse_groups(response.json()))
def get_metrics(): print('grab metrics') a_token = tokens.get('mytoken') response = requests.get('http://localhost:8081/metrics', headers={'Authorization': 'Bearer {}'.format(a_token)}) if response.status_code != requests.codes.ok: raise Exception('Got error response from metrics endpoint') return response.json()
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)s %(name)s: %(message)s') tokens.manage('zmon-entity-adapter', ['uid']) access_token = tokens.get('zmon-entity-adapter') entities = get_entities(('kio_application', 'team', 'infrastructure_account', 'aws_billing', 'kubernetes_cluster'), access_token) sync_apps(entities, os.getenv('KIO_URL'), access_token) sync_teams(entities, os.getenv('TEAM_SERVICE_URL'), access_token) sync_clusters(entities, os.getenv('CLUSTER_REGISTRY_URL'), access_token) logging.info('Update finished. Pushed entities: {}'.format(ENTITY_STATS))
def notify(cls, alert, *args, **kwargs): current_span = extract_span_from_kwargs(**kwargs) repeat = kwargs.get('repeat', 0) oauth2 = kwargs.get('oauth2', True) headers = {'Content-type': 'application/json'} timeout = 5 alert_def = alert['alert_def'] current_span.set_tag('alert_id', alert_def['id']) entity = alert.get('entity') is_changed = alert.get('alert_changed', False) is_alert = alert.get('is_alert', False) current_span.set_tag('entity', entity['id']) current_span.set_tag('alert_changed', bool(is_changed)) current_span.set_tag('is_alert', is_alert) url = cls._config.get('notifications.service.url', None) if not url: current_span.set_tag('notification_invalid', True) current_span.log_kv({'reason': 'No notification service url set!'}) logger.error('No notification service url set') return repeat url = url + '/api/v1/twilio' if oauth2: headers.update({'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) else: key = kwargs.get('key', cls._config.get('notifications.service.key')) headers.update({'Authorization': 'Bearer {}'.format(key)}) headers['User-Agent'] = get_user_agent() data = { 'message': kwargs.get('message', cls._get_subject(alert)), 'escalation_team': kwargs.get('team', alert['alert_def'].get('team', '')), 'numbers': kwargs.get('numbers', []), 'voice': kwargs.get('voice', 'woman'), 'alert_id': alert['alert_def']['id'], 'entity_id': alert['entity']['id'], 'event_type': 'ALERT_ENDED' if alert and not alert.get('is_alert') else 'ALERT_START', 'alert_changed': alert.get('alert_changed', False), } try: logger.info('Sending HTTP POST request to {}'.format(url)) r = requests.post(url, data=json.dumps(data, cls=JsonDataEncoder), headers=headers, timeout=timeout) r.raise_for_status() except Exception: logger.exception('Twilio Request failed!') return repeat
def notify(cls, alert, *args, **kwargs): repeat = kwargs.get('repeat', 0) oauth2 = kwargs.get('oauth2', True) headers = {'Content-type': 'application/json'} timeout = 5 url = cls._config.get('notifications.service.url', None) if not url: logger.error('No notification service url set') return repeat url = url + '/api/v1/twilio' if oauth2: headers.update( {'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) else: key = kwargs.get('key', cls._config.get('notifications.service.key')) headers.update({'Authorization': 'Bearer {}'.format(key)}) headers['User-Agent'] = get_user_agent() data = { 'message': kwargs.get('message', cls._get_subject(alert)), 'escalation_team': kwargs.get('team', alert['alert_def'].get('team', '')), 'numbers': kwargs.get('numbers', []), 'voice': kwargs.get('voice', 'woman'), 'alert_id': alert['alert_def']['id'], 'entity_id': alert['entity']['id'], 'event_type': 'ALERT_ENDED' if alert and not alert.get('is_alert') else 'ALERT_START', 'alert_changed': alert.get('alert_changed', False), } try: logger.info('Sending HTTP POST request to {}'.format(url)) r = requests.post(url, data=json.dumps(data, cls=JsonDataEncoder), headers=headers, timeout=timeout) r.raise_for_status() except: logger.exception('Twilio Request failed!') return repeat
def get_teams_for_user(user_name): if not TEAM_SERVICE_URL: return json.loads(os.getenv("TEAMS", "[]")) r = requests.get( TEAM_SERVICE_URL.format(user_name), headers={'Authorization': 'Bearer ' + tokens.get('read-only')}) teams = r.json() teams = list(map(lambda x: x['id_name'], teams)) return teams
def __init__(self, url, oauth2=False): if not url: raise ConfigurationError('KairosDB wrapper improperly configured. URL is missing!') self.url = url self.__session = requests.Session() if oauth2: self.__session.headers.update({'Authorization': 'Bearer {}'.format(tokens.get('uid'))})
def get_token(url: str, scopes: str, credentials_dir: str) -> dict: """ Get access token info. """ tokens.configure(url=url, dir=credentials_dir) tokens.manage('lizzy', [scopes]) tokens.start() return tokens.get('lizzy')
def get_clients(zmon_url, verify=True) -> Zmon: """Return Pykube and Zmon client instances as a tuple.""" # Get token if set as ENV variable. This is useful in development. zmon_token = os.getenv('ZMON_AGENT_TOKEN') if not zmon_token: zmon_token = tokens.get('uid') return Zmon(zmon_url, token=zmon_token, verify=verify)
def __request(self, raise_error=True, post_data=None): if self.__r is None: if self.max_retries: s = requests.Session() s.mount('', HTTPAdapter(max_retries=self.max_retries)) else: s = requests base_url = self.url basic_auth = None url_parsed = urlparse.urlsplit(base_url) if url_parsed and url_parsed.username and url_parsed.password: base_url = base_url.replace( "{0}:{1}@".format(urllib.quote(url_parsed.username), urllib.quote(url_parsed.password)), "") base_url = base_url.replace( "{0}:{1}@".format(url_parsed.username, url_parsed.password), "") basic_auth = (url_parsed.username, url_parsed.password) self.clean_url = base_url if self.oauth2: self._headers.update({ 'Authorization': 'Bearer {}'.format(tokens.get(self.oauth2_token_name)) }) self._headers.update({'User-Agent': get_user_agent()}) try: if post_data is None: # GET or HEAD get_method = getattr(s, self.__method) self.__r = get_method(base_url, params=self.params, timeout=self.timeout, verify=self.verify, headers=self._headers, auth=basic_auth, allow_redirects=self.allow_redirects) else: self.__r = s.post(base_url, params=self.params, timeout=self.timeout, verify=self.verify, headers=self._headers, auth=basic_auth, data=json.dumps(post_data)) except requests.Timeout, e: raise HttpError('timeout', self.clean_url), None, sys.exc_info()[2] except requests.ConnectionError, e: raise HttpError('connection failed', self.clean_url), None, sys.exc_info()[2]
def main(): logging.basicConfig(level=logging.INFO, format='%(levelname)s %(name)s: %(message)s') tokens.manage('zmon-entity-adapter', ['uid']) access_token = tokens.get('zmon-entity-adapter') entities = get_entities( ('kio_application', 'team', 'infrastructure_account', 'aws_billing', 'kubernetes_cluster'), access_token) sync_apps(entities, os.getenv('KIO_URL'), access_token) sync_teams(entities, os.getenv('TEAM_SERVICE_URL'), access_token) sync_clusters(entities, os.getenv('CLUSTER_REGISTRY_URL'), access_token) logging.info('Update finished. Pushed entities: {}'.format(ENTITY_STATS))
def forward_event(self, callback_url, event, topic): logging.debug('Forwarding to %s', callback_url) headers = {'Content-Type': 'application/json'} if self.use_oauth2_for_push: headers['Authorization'] = 'Bearer {}'.format(tokens.get('event-store')) response = requests.post(callback_url, data=json.dumps(event), headers=headers) self.metrics['events_out'].inc({'topic': topic, 'uuid': '0', 'url': callback_url, 'status_code': int(response.status_code)}) print(response.status_code, response.text) return int(response.status_code)
def get_service_token(name: str, scopes: list): '''Get service token (tokens lib) if possible, otherwise return None''' tokens.manage(name, scopes) try: access_token = tokens.get(name) except tokens.ConfigurationError: # will be thrown if configuration is missing (e.g. OAUTH2_ACCESS_TOKEN_URL) access_token = None except tokens.InvalidCredentialsError: # will be thrown if $CREDENTIALS_DIR/*.json cannot be read access_token = None return access_token
def __init__(self, service_url, infrastructure_account, verify=True, oauth2=False): if not service_url: raise ConfigurationError('EntitiesWrapper improperly configured. URL is missing!') self.infrastructure_account = infrastructure_account self.__service_url = urlparse.urljoin(service_url, 'api/v1/') self.__session = requests.Session() self.__session.headers.update({'User-Agent': get_user_agent()}) self.__session.verify = verify if oauth2: self.__session.headers.update({'Authorization': 'Bearer {}'.format(tokens.get('uid'))})
def plot_action_infos(folder, action_infos, step_difference, without_count=False): path = '{}/action_infos_wo_count_{}'.format(folder, without_count) os.makedirs(path) for i, action_info in enumerate(action_infos): if i % step_difference == 0: figure, axis = matplotlib.pyplot.subplots() x_pos = numpy.arange(0, len(action_info.keys()), 1) width = 0.9 actions = action_info.keys() heights_counts = [action_info[a][0] for a in actions] heights_probs = [action_info[a][1] for a in actions] heights_reward = [action_info[a][2] for a in actions] heights_probs = [math.exp(p) for p in heights_probs] heights_counts = normalize(heights_counts) # heights_probs = normalize(heights_probs) # heights_reward = normalize(heights_reward) if not without_count: axis.bar(x_pos + width / 3, heights_counts, width / 3, label='Count') axis.bar(x_pos - width / 3, heights_reward, width / 3, label='Reward') axis.bar(x_pos, heights_probs, width / 3, label='Probability') ts = [tokens.get(i).name for i in actions] matplotlib.pyplot.xticks(x_pos, ts) axis.tick_params(axis='x', labelsize=8) matplotlib.pyplot.title('Actions In Step {}'.format(i)) matplotlib.pyplot.legend(loc='best') matplotlib.pyplot.xticks(rotation='vertical') figure.set_size_inches(18, 8) figure.savefig('{}/step_{}.png'.format(path, i), bbox_inches="tight") matplotlib.pyplot.close(figure)
def get_sampling_rate_config(config, current_span): """ Get sampling rate config from a ZMON entity or config vars. Entity: { "id": "zmon-sampling-rate", "type": "zmon_config", "default_sampling": 100, "critical_checks": [13, 14, 19], "worker_sampling": { "account-1": 50, "account-2": 60, "account-3": 0 } } """ default_sampling = int(config.get('zmon.sampling.rate', 100)) critical_checks = config.get('zmon.critical.checks') if type(critical_checks) is not list: critical_checks = critical_checks.replace(' ', '').split(',') sampling_config = { 'default_sampling': default_sampling, 'critical_checks': critical_checks } # We try to get sampling rate entity! zmon_url = config.get('zmon.url') if not zmon_url: current_span.set_tag('sampling_entity_used', False) else: current_span.set_tag('sampling_entity_used', True) try: url = '{}/api/v1/entities/{}'.format(zmon_url, SAMPLING_RATE_ENTITY_ID) headers = {'Authorization': 'Bearer {}'.format(tokens.get('uid'))} resp = requests.get(url, headers=headers, timeout=2) resp.raise_for_status() entity = resp.json() sampling_config.update(entity) except Exception: current_span.set_tag('sampling_entity_used', False) current_span.log_kv({'exception': format_exc()}) return sampling_config
def test_refresh(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='') tokens.manage('mytok', ['myscope']) with pytest.raises(tokens.ConfigurationError): tokens.refresh('mytok') tokens.configure(dir=str(tmpdir), url='https://example.org') with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: json.dump({'application_username': '******', 'application_password': '******'}, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) response = MagicMock() response.json.return_value = {'expires_in': 123123, 'access_token': '777'} monkeypatch.setattr('requests.post', lambda url, **kwargs: response) tok = tokens.get('mytok') assert tok == '777'
def __init__(self, service_url, infrastructure_account, verify=True, oauth2=False): if not service_url: raise ConfigurationError( 'EntitiesWrapper improperly configured. URL is missing!') self.infrastructure_account = infrastructure_account self.__service_url = urlparse.urljoin(service_url, 'api/v1/') self.__session = requests.Session() self.__session.headers.update({'User-Agent': get_user_agent()}) self.__session.verify = verify if oauth2: self.__session.headers.update( {'Authorization': 'Bearer {}'.format(tokens.get('uid'))})
def forward_event(self, callback_url, event, topic): logging.debug('Forwarding to %s', callback_url) headers = {'Content-Type': 'application/json'} if self.use_oauth2_for_push: headers['Authorization'] = 'Bearer {}'.format( tokens.get('event-store')) response = requests.post(callback_url, data=json.dumps(event), headers=headers) self.metrics['events_out'].inc({ 'topic': topic, 'uuid': '0', 'url': callback_url, 'status_code': int(response.status_code) }) print(response.status_code, response.text) return int(response.status_code)
def __init__(self, url=None, check_id='', entities=None, oauth2=False): if not url: raise ConfigurationError( 'History wrapper improperly configured. URL is required.') self.url = os.path.join(url, DATAPOINTS_ENDPOINT) self.check_id = check_id if not entities: self.entities = [] elif type(entities) == list: self.entities = entities else: self.entities = [entities] self.__session = requests.Session() self.__session.headers.update({'Content-Type': 'application/json'}) if oauth2: self.__session.headers.update( {'Authorization': 'Bearer {}'.format(tokens.get('uid'))})
def test_get_refresh_failure_ignore_expiration_no_access_token(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: json.dump({'application_username': '******', 'application_password': '******'}, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) exc = Exception('FAIL') response = MagicMock() response.raise_for_status.side_effect = exc monkeypatch.setattr('requests.post', lambda url, **kwargs: response) # we never got any access token tokens.TOKENS = {'mytok': {'ignore_expiration': True, 'scopes': ['myscope'], # expired a long time ago.. 'expires_at': 0}} with pytest.raises(Exception) as exc_info: tok = tokens.get('mytok') assert exc_info.value == exc
def __init__(self, url=None, username=None, password=None, es_url=None, index_prefix=''): if not url: raise RuntimeError('AppDynamics plugin improperly configured. URL is required!') self.url = url self.es_url = es_url self.index_prefix = index_prefix self.__oauth2 = False self.__session = requests.Session() if not username or not password: self.__oauth2 = True self.__session.headers.update({'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) else: self.__session.auth = (username, password) self.__session.headers.update({'User-Agent': get_user_agent()}) self.__session.params = {'output': 'json'} self.__session.timeout = 3
def get_token(name: str, scopes: list): '''Get an OAuth token, either from Token Service or directly from OAuth provider (using the Python tokens library)''' # first try if a token exists already token = get_existing_token(name) if token: return token['access_token'] tokens.manage(name, scopes) try: access_token = tokens.get(name) except tokens.ConfigurationError: access_token = None except tokens.InvalidCredentialsError: access_token = None if access_token: return access_token config = get_config() user = config.get('user') or os.getenv('ZIGN_USER') or os.getenv('USER') if not user: raise ConfigurationError('Missing OAuth username. ' + 'Either set "user" in configuration file or ZIGN_USER environment variable.') if not config.get('url'): raise ConfigurationError('Missing OAuth access token service URL. ' + 'Please set "url" in configuration file.') password = os.getenv('ZIGN_PASSWORD') or keyring.get_password(KEYRING_KEY, user) token = get_new_token(config.get('realm'), scopes, user, password, url=config.get('url'), insecure=config.get('insecure')) if token: store_token(name, token) return token['access_token']
def __request(self, url, params=None, body=None): """Return json response""" if self.oauth2: self._headers.update( {'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) try: if body is None: response = requests.get(url, params=params, timeout=self.timeout, headers=self._headers) if not response.ok: raise Exception( 'Elasticsearch query failed: {}'.format(url)) return response.json() else: response = requests.post(url, params=params, json=body, timeout=self.timeout, headers=self._headers) if not response.ok: raise Exception( 'Elasticsearch query failed: {} with response: {}'. format(url, json.dumps(response.text))) return response.json() except requests.Timeout: raise HttpError('timeout', self.url), None, sys.exc_info()[2] except requests.ConnectionError: raise HttpError('connection failed', self.url), None, sys.exc_info()[2] except Exception: raise
def test_get_refresh_failure_ignore_expiration(monkeypatch, tmpdir): tokens.configure(dir=str(tmpdir), url='https://example.org') with open(os.path.join(str(tmpdir), 'user.json'), 'w') as fd: json.dump({'application_username': '******', 'application_password': '******'}, fd) with open(os.path.join(str(tmpdir), 'client.json'), 'w') as fd: json.dump({'client_id': 'cid', 'client_secret': 'sec'}, fd) exc = Exception('FAIL') response = MagicMock() response.raise_for_status.side_effect = exc monkeypatch.setattr('requests.post', lambda url, **kwargs: response) logger = MagicMock() monkeypatch.setattr('tokens.logger', logger) tokens.TOKENS = {'mytok': {'access_token': 'expired-token', 'ignore_expiration': True, 'scopes': ['myscope'], # expired a long time ago.. 'expires_at': 0}} tok = tokens.get('mytok') assert tok == 'expired-token' logger.warn.assert_called_with('Failed to refresh access token "%s" (ignoring expiration): %s', 'mytok', exc)
def __request(self, raise_error=True, post_data=None): if self.__r is None: if self.max_retries: s = requests.Session() s.mount('', HTTPAdapter(max_retries=self.max_retries)) else: s = requests base_url = self.url basic_auth = None url_parsed = urlparse.urlsplit(base_url) if url_parsed and url_parsed.username and url_parsed.password: base_url = base_url.replace( "{0}:{1}@".format(urllib.quote(url_parsed.username), urllib.quote(url_parsed.password)), "") base_url = base_url.replace("{0}:{1}@".format(url_parsed.username, url_parsed.password), "") basic_auth = (url_parsed.username, url_parsed.password) self.clean_url = base_url if self.oauth2: self._headers.update({'Authorization': 'Bearer {}'.format(tokens.get(self.oauth2_token_name))}) self._headers.update({'User-Agent': get_user_agent()}) try: if post_data is None: # GET or HEAD get_method = getattr(s, self.__method) self.__r = get_method(base_url, params=self.params, timeout=self.timeout, verify=self.verify, headers=self._headers, auth=basic_auth, allow_redirects=self.allow_redirects) else: self.__r = s.post(base_url, params=self.params, timeout=self.timeout, verify=self.verify, headers=self._headers, auth=basic_auth, data=json.dumps(post_data)) except requests.Timeout, e: raise HttpError('timeout', self.clean_url), None, sys.exc_info()[2] except requests.ConnectionError, e: raise HttpError('connection failed', self.clean_url), None, sys.exc_info()[2]
def __call__(self, request): token = tokens.get(self.token_name) request.headers["Authorization"] = f"Bearer {token}" return request
def main(): argp = argparse.ArgumentParser(description='ZMON AWS Agent') argp.add_argument('-e', '--entity-service', dest='entityservice') argp.add_argument('-r', '--region', dest='region', default=None) argp.add_argument('-j', '--json', dest='json', action='store_true') argp.add_argument('-t', '--tracer', dest='tracer', default=os.environ.get('OPENTRACING_TRACER', 'noop')) argp.add_argument('--no-oauth2', dest='disable_oauth2', action='store_true', default=False) argp.add_argument('--postgresql-user', dest='postgresql_user', default=os.environ.get('AGENT_POSTGRESQL_USER')) argp.add_argument('--postgresql-pass', dest='postgresql_pass', default=os.environ.get('AGENT_POSTGRESQL_PASS')) args = argp.parse_args() if not args.disable_oauth2: tokens.configure() tokens.manage('uid', ['uid']) tokens.start() init_opentracing_tracer(args.tracer) root_span = opentracing.tracer.start_span(operation_name='aws_entity_discovery') with root_span: logging.basicConfig(level=logging.INFO) # 0. Fetch extra data for entities entity_extras = {} for ex in os.getenv('EXTRA_ENTITY_FIELDS', '').split(','): if '=' not in ex: continue k, v = ex.split('=', 1) if k and v: entity_extras[k] = v # 1. Determine region if not args.region: logger.info('Trying to figure out region..') try: response = requests.get('http://169.254.169.254/latest/meta-data/placement/availability-zone', timeout=2) except Exception: root_span.set_tag('error', True) root_span.log_kv({'exception': traceback.format_exc()}) logger.exception('Region was not specified as a parameter and' + 'can not be fetched from instance meta-data!') raise region = response.text[:-1] else: region = args.region root_span.set_tag('region', region) logger.info('Using region: {}'.format(region)) logger.info('Entity service URL: %s', args.entityservice) logger.info('Reading DNS data for hosted zones') aws.populate_dns_data() aws_account_id = aws.get_account_id(region) infrastructure_account = 'aws:{}'.format(aws_account_id) if aws_account_id else None if not infrastructure_account: logger.error('AWS agent: Cannot determine infrastructure account ID. Terminating!') return root_span.set_tag('account', infrastructure_account) # 2. ZMON entities if not args.disable_oauth2: token = os.getenv('ZMON_TOKEN', None) or tokens.get('uid') zmon_client = Zmon(args.entityservice, token=token, user_agent=get_user_agent()) query = {'infrastructure_account': infrastructure_account, 'region': region, 'created_by': 'agent'} entities = zmon_client.get_entities(query) # 3. Get running apps apps = aws.get_running_apps(region, entities) elbs = [] scaling_groups = [] elastigroups = [] certificates = [] rds = [] elasticaches = [] dynamodbs = [] sqs = [] postgresql_clusters = [] aws_limits = [] new_entities = [] to_be_removed = [] if len(apps) > 0: elbs = aws.get_running_elbs(region, infrastructure_account) scaling_groups = aws.get_auto_scaling_groups(region, infrastructure_account) elastigroups = elastigroup.get_elastigroup_entities(region, infrastructure_account) rds = aws.get_rds_instances(region, infrastructure_account, entities) elasticaches = aws.get_elasticache_nodes(region, infrastructure_account) dynamodbs = aws.get_dynamodb_tables(region, infrastructure_account) certificates = aws.get_certificates(region, infrastructure_account) aws_limits = aws.get_limits(region, infrastructure_account, apps, elbs, entities) sqs = aws.get_sqs_queues(region, infrastructure_account, entities) postgresql_clusters = postgresql.get_postgresql_clusters(region, infrastructure_account, scaling_groups, apps) account_alias = aws.get_account_alias(region) ia_entity = { 'type': 'local', 'infrastructure_account': infrastructure_account, 'account_alias': account_alias, 'region': region, 'id': 'aws-ac[{}:{}]'.format(infrastructure_account, region), 'created_by': 'agent', } account_alias_prefix = os.getenv('ACCOUNT_ALIAS_PREFIX', None) owner = account_alias if account_alias_prefix: owner = owner.replace(account_alias_prefix, '', 1) root_span.set_tag('team', owner) application_entities = aws.get_apps_from_entities(apps, infrastructure_account, region) if args.postgresql_user and args.postgresql_pass: postgresql_databases = postgresql.get_databases_from_clusters(postgresql_clusters, infrastructure_account, region, args.postgresql_user, args.postgresql_pass) else: # Pretend the list of DBs is empty, but also make sure we don't remove # any pre-existing database entities because we don't know about them. postgresql_databases = [] entities = [e for e in entities if e.get('type') != 'postgresql_database'] current_entities = ( elbs + scaling_groups + elastigroups + apps + application_entities + rds + postgresql_databases + postgresql_clusters + elasticaches + dynamodbs + certificates + sqs) current_entities.append(aws_limits) current_entities.append(ia_entity) for entity in current_entities: entity.update(entity_extras) # 4. Removing misssing entities existing_ids = get_existing_ids(entities) current_entities_ids = {e['id'] for e in current_entities} to_be_removed, delete_error_count = remove_missing_entities( existing_ids, current_entities_ids, zmon_client, json=args.json) root_span.log_kv({'total_entitites': str(len(current_entities))}) root_span.log_kv({'removed_entities': str(len(to_be_removed))}) logger.info('Found {} removed entities from {} entities ({} failed)'.format( len(to_be_removed), len(current_entities), delete_error_count)) # 5. Get new/updated entities new_entities, add_error_count = add_new_entities(current_entities, entities, zmon_client, json=args.json) root_span.log_kv({'new_entities': str(len(new_entities))}) logger.info('Found {} new entities from {} entities ({} failed)'.format( len(new_entities), len(current_entities), add_error_count)) # 6. Always add Local entity if not args.json: ia_entity['errors'] = {'delete_count': delete_error_count, 'add_count': add_error_count} update_local_entity(zmon_client, ia_entity) types = {e['type']: len([t for t in new_entities if t['type'] == e['type']]) for e in new_entities} for t, v in types.items(): logger.info('Found {} new entities of type: {}'.format(v, t)) # Check if it is a dry run! if args.json: d = { 'applications': application_entities, 'apps': apps, 'elastigroups': elastigroups, 'dynamodb': dynamodbs, 'elbs': elbs, 'elc': elasticaches, 'rds': rds, 'certificates': certificates, 'aws_limits': aws_limits, 'sqs_queues': sqs, 'new_entities': new_entities, 'to_be_removed': to_be_removed, 'posgresql_clusters': postgresql_clusters } print(json.dumps(d, indent=4))
def main(): argp = argparse.ArgumentParser(description='ZMon AWS Agent') argp.add_argument('-e', '--entity-service', dest='entityservice') argp.add_argument('-r', '--region', dest='region', default=None) argp.add_argument('-j', '--json', dest='json', action='store_true') argp.add_argument('--no-oauth2', dest='disable_oauth2', action='store_true', default=False) args = argp.parse_args() if not args.disable_oauth2: tokens.configure() tokens.manage('uid', ['uid']) tokens.start() logging.basicConfig(level=logging.INFO) if not args.region: logging.info("Trying to figure out region...") try: response = requests.get('http://169.254.169.254/latest/meta-data/placement/availability-zone', timeout=2) except: logging.error("Region was not specified as a parameter and can not be fetched from instance meta-data!") raise region = response.text[:-1] else: region = args.region logging.info("Using region: {}".format(region)) logging.info("Entity service url: %s", args.entityservice) apps = get_running_apps(region) if len(apps) > 0: infrastructure_account = apps[0]['infrastructure_account'] elbs = get_running_elbs(region, infrastructure_account) scaling_groups = get_auto_scaling_groups(region, infrastructure_account) rds = get_rds_instances(region, infrastructure_account) elasticaches = get_elasticache_nodes(region, infrastructure_account) dynamodbs = get_dynamodb_tables(region, infrastructure_account) else: elbs = [] scaling_groups = [] rds = [] if args.json: d = {'apps': apps, 'elbs': elbs, 'rds': rds, 'elc': elasticaches, 'dynamodb': dynamodbs} print(json.dumps(d)) else: if infrastructure_account is not None: account_alias = get_account_alias(region) ia_entity = {"type": "local", "infrastructure_account": infrastructure_account, "account_alias": account_alias, "region": region, "id": "aws-ac[{}:{}]".format(infrastructure_account, region), "created_by": "agent"} application_entities = get_apps_from_entities(apps, infrastructure_account, region) current_entities = [] for e in elbs: current_entities.append(e["id"]) for e in scaling_groups: current_entities.append(e["id"]) for a in apps: current_entities.append(a["id"]) for a in application_entities: current_entities.append(a["id"]) for a in rds: current_entities.append(a["id"]) for a in elasticaches: current_entities.append(a["id"]) for a in dynamodbs: current_entities.append(a["id"]) current_entities.append(ia_entity["id"]) headers = {'Content-Type': 'application/json'} if not args.disable_oauth2: token = os.getenv('ZMON_AGENT_TOKEN', tokens.get('uid')) logging.info("Adding oauth2 token to requests {}...{}".format(token[:1], token[-1:])) headers.update({'Authorization': 'Bearer {}'.format(token)}) # removing all entities query = {'infrastructure_account': infrastructure_account, 'region': region, 'created_by': 'agent'} r = requests.get(args.entityservice, params={'query': json.dumps(query)}, headers=headers, timeout=10) entities = r.json() existing_entities = {} to_remove = [] for e in entities: existing_entities[e['id']] = e if not e["id"] in current_entities: to_remove.append(e["id"]) if os.getenv('zmon_user'): auth = (os.getenv('zmon_user'), os.getenv('zmon_password', '')) else: auth = None for e in to_remove: logging.info("removing instance: {}".format(e)) r = requests.delete(args.entityservice + "{}/".format(e), auth=auth, headers=headers, timeout=3) logging.info("...%s", r.status_code) def put_entity(entity_type, entity): logging.info("Adding {} entity: {}".format(entity_type, entity['id'])) r = requests.put(args.entityservice, auth=auth, data=json.dumps(entity, default=json_serial), headers=headers, timeout=3) logging.info("...%s", r.status_code) put_entity('LOCAL', ia_entity) for instance in apps: put_entity('instance', instance) for asg in scaling_groups: put_entity('Auto Scaling group', asg) for elb in elbs: put_entity('elastic load balancer', elb) for db in rds: put_entity('RDS instance', db) # merge here or we loose it on next pull for app in application_entities: if app['id'] in existing_entities: ex = existing_entities[app['id']] if 'scalyr_ts_id' in ex: app['scalyr_ts_id'] = ex['scalyr_ts_id'] for app in application_entities: put_entity('application', app) for elasticache in elasticaches: put_entity('elasticache', elasticache) for dynamodb in dynamodbs: put_entity('dynamodb', dynamodb)
def notify(cls, alert, url=None, body=None, params=None, headers=None, timeout=5, oauth2=False, include_alert=True, repeat=0, **kwargs): current_span = extract_span_from_kwargs(**kwargs) urls = cls._config.get('notifications.http.whitelist.urls', []) allow_any = cls._config.get('notifications.http.allow.all', False) default_url = cls._config.get('notifications.http.default.url', None) alert_def = alert['alert_def'] current_span.set_tag('alert_id', alert_def['id']) entity = alert.get('entity', {}) is_changed = alert.get('alert_changed', False) is_alert = alert.get('is_alert', False) current_span.set_tag('entity', entity.get('id')) current_span.set_tag('alert_changed', bool(is_changed)) current_span.set_tag('is_alert', is_alert) if isinstance(urls, basestring): urls = urls.replace(' ', '').split(',') if not url and not default_url: current_span.set_tag('notification_invalid', True) current_span.log_kv({'reason': 'Missing URL!'}) raise NotificationError('URL is required!') if not url: url = default_url elif not allow_any and url not in urls: current_span.set_tag('notification_invalid', True) current_span.log_kv({'reason': 'URL is not in whitelist'}) raise NotificationError('URL "{}" is not allowed. Please check worker white list URLs.'.format(url)) if not is_absolute_http_url(url): current_span.set_tag('notification_invalid', True) current_span.log_kv({'reason': 'Absolute URL required!'}) raise NotificationError('Absolute URL is required!') # HTTP headers. if not headers: headers = {} default_headers = cls._config.get('notifications.http.headers', {}) default_headers.update(headers) if oauth2: headers.update({'Authorization': 'Bearer {}'.format(tokens.get('uid'))}) headers['User-Agent'] = get_user_agent() if include_alert: data = { 'alert': alert, 'body': body, } else: data = body try: logger.info('Sending HTTP POST request to {}'.format(url)) r = requests.post(url, data=json.dumps(data, cls=JsonDataEncoder), params=params, headers=headers, timeout=timeout) r.raise_for_status() except Exception: current_span.set_tag('error', True) logger.exception('Request failed!') return repeat
def _get_token(): try: return tokens.get(app_name) except Exception as e: logging.error('Failed to get token for {}'.format(app_name), exc_info=e) return ''
def __call__(self, request): token = tokens.get(self.token_name) request.headers['Authorization'] = 'Bearer {}'.format(token) return request
def get_token(): """Main Method to get token.""" return tokens.get('pemetaan')
#!/usr/bin/env python3 import pierone.api import tokens import sys if len(sys.argv) < 2: print("Usage: prepare-docker <pierone url>") sys.exit(1) pierone_url = sys.argv[1] print("Preparing configuration for Docker repository {} ...".format(pierone_url)) tokens.manage('application', ['uid', 'application.read', 'application.write']) token = tokens.get('application') pierone.api.docker_login_with_token(pierone_url, token)