def destination(token, proxy, proxy_host, proxy_user, proxy_password, host_id, access_key, url): """ Data destination config. Anodot API token - You can copy it from Settings > API tokens > Data Collection in your Anodot account Proxy for connecting to Anodot """ # take all data from the command arguments if token is provided, otherwise ask for input if token: result = agent.destination.manager.create(token, url, access_key, proxy_host, proxy_user, proxy_password, host_id) if result.is_err(): raise click.ClickException(result.value) else: destination_ = agent.destination.repository.get()\ if agent.destination.repository.exists()\ else HttpDestination() _prompt_proxy(destination_) _prompt_url(destination_) _prompt_token(destination_) _prompt_access_key(destination_) agent.destination.repository.save(destination_) # todo code duplicate, try to avoid it if destination_.auth_token: agent.destination.repository.delete_auth_token( destination_.auth_token) auth_token = agent.destination.AuthenticationToken( destination_.id, AnodotApiClient(destination_).get_new_token()) agent.destination.repository.save_auth_token(auth_token) click.secho('Connection to Anodot established') click.secho('Destination configured', fg='green')
def get_alerts(): form = AlertsByStatusForm(request.args) if not form.validate(): return jsonify({ 'status': 'Abnormal request', 'errors': form.errors, }), 400 try: params = { 'status': form.status.data, 'order': form.order.data, 'sort': form.sort.data, } # apply time constraints only to CLOSE status if form.status.data != OPEN: params['startTime'] = form.startTime.data alert_groups = AnodotApiClient( destination.repository.get()).get_alerts(params) except destination.repository.DestinationNotExists as e: return jsonify({ 'status': 'Not connected', 'errors': { 'destination': [str(e)] }, }), 400 except requests.exceptions.HTTPError as e: return _error_response(e) _move_metric_dimensions(alert_groups) _move_metric_scores(alert_groups) _transform(alert_groups) return jsonify(alert_groups)
def get_alert_status(): form = AlertStatusForm(request.args) if not form.validate(): return jsonify({ 'status': 'Abnormal request', 'errors': form.errors, }), 400 try: alert_groups = AnodotApiClient( destination.repository.get()).get_alerts({ 'startTime': form.startTime.data, }) except destination.repository.DestinationNotExists as e: return jsonify({ 'status': 'Not connected', 'errors': { 'destination': [str(e)] }, }), 400 except requests.exceptions.HTTPError as e: return _error_response(e) _filter_groups_by_name(alert_groups, form.alertName.data) if form.host.data: _filter_group_alerts_by_host(alert_groups, form.host.data) alert_groups = alert_groups['alertGroups'] if len(alert_groups) == 0 or len(alert_groups[0]['alerts']) == 0: return jsonify({'status': 'No alert'}) return jsonify({'status': _extract_alert_status(alert_groups)})
def main(): try: pipelines = pipeline.repository.get_all() except Exception: _update_errors_count(0) raise num_of_errors = 0 api_client = AnodotApiClient(destination.repository.get()) for pipeline_ in pipelines: watermark_manager = pipeline.watermark.PeriodicWatermarkManager( pipeline_) if watermark_manager.should_send_watermark(): try: with pipeline.repository.SessionManager(pipeline_): next_bucket_start = watermark_manager.get_latest_bucket_start( ) pipeline.manager.update_pipeline_watermark( pipeline_, next_bucket_start) watermark = anodot.Watermark( pipeline_.get_schema_id(), datetime.fromtimestamp(next_bucket_start)) api_client.send_watermark(watermark.to_dict()) logger.debug( f'Sent watermark for `{pipeline_.name}`, value: {pipeline_.watermark.timestamp}' ) monitoring.set_watermark_delta( pipeline_.name, time.time() - pipeline_.watermark.timestamp) monitoring.set_watermark_sent(pipeline_.name) except Exception: num_of_errors = _update_errors_count(num_of_errors) logger.error( f'Error sending pipeline watermark {pipeline_.name}') logger.error(traceback.format_exc()) return num_of_errors
def create( token: str, url: str, access_key: str = None, proxy_host: str = None, proxy_username: str = None, proxy_password: str = None, host_id: str = None, ) -> Result[HttpDestination, str]: result = _build(HttpDestination(), token, url, access_key, proxy_host, proxy_username, proxy_password, host_id) if not result.is_err(): # todo duplicate code, try to avoid it auth_token = destination.AuthenticationToken( result.value.id, AnodotApiClient(result.value).get_new_token()) destination.repository.save_auth_token(auth_token) return result
def main(): num_of_errors = 0 try: api_client = AnodotApiClient(destination.repository.get()) pipelines = pipeline.repository.get_all() except Exception: _update_errors_count(0) raise for pipeline_ in pipelines: try: pipeline_data = pipeline.manager.transform_for_bc(pipeline_) error_notification = _get_notification_for_pipeline(pipeline_) if error_notification: pipeline_data['notification'] = asdict(error_notification) api_client.send_pipeline_data_to_bc(pipeline_data) if error_notification: logger.info( f'Error notification sent for pipeline {pipeline_.name}') # set 'notification_sent' flag to True _update_notification_sent(pipeline_, error_notification) except requests.HTTPError as e: if e.response.status_code != 404: num_of_errors = _update_errors_count(num_of_errors) logger.error(traceback.format_exc()) except Exception: num_of_errors = _update_errors_count(num_of_errors) logger.error(f'Error sending pipeline {pipeline_.name}') logger.error(traceback.format_exc()) for deleted_pipeline_id in pipeline.repository.get_deleted_pipeline_ids(): deleted_pipeline_id = deleted_pipeline_id[0] try: api_client.delete_pipeline_from_bc(deleted_pipeline_id) pipeline.repository.remove_deleted_pipeline_id(deleted_pipeline_id) except requests.HTTPError as e: if e.response.status_code != 404: num_of_errors = _update_errors_count(num_of_errors) logger.error(traceback.format_exc()) except Exception: num_of_errors = _update_errors_count(num_of_errors) logger.error(traceback.format_exc()) return num_of_errors
def delete(schema_id: str): AnodotApiClient(destination.repository.get()).delete_schema(schema_id)
def search(pipeline_id: str) -> Optional[str]: for schema_ in AnodotApiClient(destination.repository.get()).get_schemas(): if schema_['streamSchemaWrapper']['schema']['name'] == pipeline_id: return schema_['streamSchemaWrapper']['schema']['id'] return None
def update(schema: dict) -> dict: # deletes schema and recreates with the same id field delete(schema['id']) return AnodotApiClient(destination.repository.get()).update_schema(schema)['schema']
def create(schema: dict) -> dict: # created schema contains additional id field return AnodotApiClient(destination.repository.get()).create_schema(schema)['schema']