def is_valid_resource_url(resource_url: str, proxy_obj: proxy.Proxy = None) -> bool: response = requests.post(resource_url, json=[], proxies=proxy.get_config(proxy_obj), timeout=5) if response.status_code != 401: response.raise_for_status() return response.status_code != 401
def __init__(self, destination_url: str, access_token: str, proxy_: destination.Proxy): self.access_token = access_token self.proxies = proxy.get_config(proxy_) self.url_builder = AnodotUrlBuilder(destination_url) self.params = { 'token': access_token, 'protocol': destination.HttpDestination.PROTOCOL_20 }
def _get_script_params(self) -> list[dict]: return [{ 'key': 'ANODOT_URL', 'value': self.pipeline.destination.url }, { 'key': 'ACCESS_TOKEN', 'value': self.pipeline.destination.access_key }, { 'key': 'PROXIES', 'value': proxy.get_config(self.pipeline.destination.proxy) }]
def is_valid_destination_url(url: str, proxy_obj: proxy.Proxy = None) -> bool: status_url = urllib.parse.urljoin(url, HttpDestination.STATUS_URL) try: response = requests.get(status_url, proxies=proxy.get_config(proxy_obj), timeout=5) response.raise_for_status() except (ConnectionError, requests.HTTPError, requests.exceptions.ConnectionError, requests.exceptions.ProxyError) as e: raise ValidationException(str(e)) return True
def __init__(self, destination_: HttpDestination): self.url = destination_.url self.access_key = destination_.access_key self.api_token = destination_.token self.proxies = proxy.get_config(destination_.proxy) self.url_builder = AnodotUrlBuilder(destination_.url) self.session = requests.Session() self.auth_token: Optional[ AuthenticationToken] = destination_.auth_token if self.auth_token: self.session.headers.update({ 'Authorization': 'Bearer ' + self.auth_token.authentication_token })
def _get_script_params(self) -> list[dict]: return [ { 'key': 'PROXIES', 'value': proxy.get_config(self.pipeline.destination.proxy) }, { 'key': 'WATERMARK_URL', 'value': urllib.parse.urljoin( self.pipeline.destination.url, f'/api/v1/metrics/watermark' f'?token={self.pipeline.destination.token}&protocol={self.pipeline.destination.PROTOCOL_30}', ) }, { 'key': 'WATERMARK_DELTA_MONITORING_ENDPOINT', 'value': urllib.parse.urljoin( self.pipeline.streamsets.agent_external_url, f'/monitoring/watermark_delta/{self.pipeline.name}') }, { 'key': 'WATERMARK_SENT_MONITORING_ENDPOINT', 'value': urllib.parse.urljoin( self.pipeline.streamsets.agent_external_url, f'/monitoring/watermark_sent/{self.pipeline.name}') }, { 'key': 'WATERMARK_IN_LOCAL_TIMEZONE', 'value': str(self.pipeline.watermark_in_local_timezone), }, { 'key': 'TIMEZONE', 'value': str(self.pipeline.timezone), }, { 'key': 'WATERMARK_LOGS', 'value': str(self.pipeline.watermark_logs_enabled) }, ]
def run(topic, file_type, brokers: list): try: destination_ = destination.repository.get() api_client = anodot.ApiClient(destination_.access_key, proxies=proxy.get_config(destination_.proxy), base_url=destination_.url) messages_received = read_data(topic, file_type, brokers) if messages_received == 0: raise NoDataException(f'Read 0 messages in topic {topic}') logger_.info(str(messages_received) + ' messages was read') with open(get_file_path(file_type), 'rb') as f_in: result = api_client.send_topology_data(file_type, gzip.compress(f_in.read())) logger_.info('File sent: ' + str(result)) except: monitoring.increase_scheduled_script_error_counter('kafka-to-topology') logger_.exception(traceback.format_exc())
def _get_script_params(self) -> list[dict]: return [ { 'key': 'REQUEST_RETRIES', 'value': constants.PIPELINE_REQUEST_RETRIES }, { 'key': 'RETRY_SLEEP_TIME_SECONDS', 'value': constants.PIPELINE_RETRY_SLEEP_TIME_SECONDS }, { 'key': 'ANODOT_URL', 'value': self.pipeline.destination.url }, { 'key': 'ACCESS_TOKEN', 'value': self.pipeline.destination.access_key }, { 'key': 'PROXIES', 'value': proxy.get_config(self.pipeline.destination.proxy) }, ]