Exemplo n.º 1
0
def get_aware_devices(token, customer_id):

    token = json.loads(token)['token']

    print(f'token: {token}')
    conn = get_connection()
    h = HttpHook(http_conn_id=conn.conn_id, method='GET')
    endpoint = f'/api/customer/{customer_id}/devices?limit=1000'
    headers = {
        "Content-Type": "application/json",
        "X-Authorization": "Bearer " + token
    }
    r = h.run(endpoint=endpoint, headers=headers)

    response = r.json()
    devices = {}

    for device in response['data']:
        d_name = device['name']  #USACE00504
        d_label = device['label']
        d_type = device['type']
        d_entity_type = device['id']['entityType']  #normally 'DEVICE'
        d_id = device['id']['id']  # UUID of device
        # print(f' entityType: {d_entity_type} - id: {d_id} - name: {d_name} - label: {d_label} - type: {d_type}')

        d = {
            "name": d_name,
            "type": d_type,
            "label": d_label,
            "entityType": d_entity_type
        }
        if d_name != "USACEQueue":
            devices[d_id] = d

    return json.dumps(devices)
Exemplo n.º 2
0
    def poke(self, context: 'Context') -> bool:
        from airflow.utils.operator_helpers import determine_kwargs

        hook = HttpHook(
            method=self.method,
            http_conn_id=self.http_conn_id,
            tcp_keep_alive=self.tcp_keep_alive,
            tcp_keep_alive_idle=self.tcp_keep_alive_idle,
            tcp_keep_alive_count=self.tcp_keep_alive_count,
            tcp_keep_alive_interval=self.tcp_keep_alive_interval,
        )

        self.log.info('Poking: %s', self.endpoint)
        try:
            response = hook.run(
                self.endpoint,
                data=self.request_params,
                headers=self.headers,
                extra_options=self.extra_options,
            )
            if self.response_check:
                kwargs = determine_kwargs(self.response_check, [response],
                                          context)
                return self.response_check(response, **kwargs)
        except AirflowException as exc:
            if str(exc).startswith("404"):
                return False

            raise exc

        return True
Exemplo n.º 3
0
def extract(
    batch_id, method="GET", http_conn_id="default_api", mongo_conn_id="default_mongo"
):

    http = HttpHook(method, http_conn_id=http_conn_id)

    mongo_conn = MongoHook(mongo_conn_id)
    ids_to_update_coll = mongo_conn.get_collection("ids_to_update", "courts")
    results_to_transform_coll = mongo_conn.get_collection(
        "results_to_transform", "courts"
    )

    # Note/TODO: because we add endpoints back that we couldn't handle, we may
    # get stuck in an infinite loop. Another solution is exiting whenever an
    # exception occurs, but this isn't ideal either
    while ids_to_update_coll.find_one({"batch_id": str(batch_id)}) != None:

        # find a job to work on
        result = ids_to_update_coll.find_one_and_delete({"batch_id": str(batch_id)})
        api_id = result["api_id"]
        try:

            # transform to get a valid link
            # TODO: this needs to be generalized to any website
            endpoint = f"opinions/{api_id}"

            # pull data in
            response = http.run(endpoint)

            result_data = response.json()

            if response.status_code == 200:

                # store our result into mongo
                results_to_transform_coll.insert_one(
                    {"batch_id": str(batch_id), "data": result_data}
                )

            else:
                # TODO: throw a more specific exception
                raise AirflowException(
                    f"Received {response.status_code} code from {endpoint}."
                )

        except json.JSONDecodeError as j_error:
            print("Failed to decode response with {j_error}:\n{response.body}")
            mongo_conn.insert_one(
                "ids_to_update",
                {"api_id": str(api_id), "batch_id": str(batch_id)},
                mongo_db="courts",
            )
        except Exception as error:
            # something went wrong. Log it and return this endpoint to mongoDB so we can try again
            print(f"An exception occured while processing batch {batch_id}:\n{error}")
            mongo_conn.insert_one(
                "ids_to_update",
                {"api_id": str(api_id), "batch_id": str(batch_id)},
                mongo_db="courts",
            )
Exemplo n.º 4
0
 def fetch_log_page(hook: HttpHook, endpoint, line_from, line_to):
     prepd_endpoint = endpoint + f"?from={line_from}&size={line_to}"
     response = hook.run(prepd_endpoint)
     try:
         return json.loads(response.content)
     except JSONDecodeError as ex:
         log_response_error("$", response)
         raise AirflowBadRequest(ex)
Exemplo n.º 5
0
def flashfloodinfo_authenticate():
    conn = get_connection()
    h = HttpHook(http_conn_id=conn.conn_id, method='POST')
    r = h.run(endpoint='/api/auth/login',
              data=json.dumps({
                  'username': conn.login,
                  'password': conn.password
              }),
              headers={"Content-Type": "application/json"})

    return json.dumps(r.json())
Exemplo n.º 6
0
    def execute(self, context):
        http = HttpHook(self.method, http_conn_id=self.http_conn_id)

        self.log.info("Calling HTTP method")

        response = http.run(self.endpoint, self.data, self.headers,
                            self.extra_options)
        if self.log_response:
            self.log.info(response.text)
        if self.response_check:
            if not self.response_check(response):
                raise AirflowException("Response check returned False.")
        return response.text
Exemplo n.º 7
0
def flashflood_get_customer(token):

    token = json.loads(token)['token']

    conn = get_connection()
    h = HttpHook(http_conn_id=conn.conn_id, method='GET')
    endpoint = f'/api/auth/user'
    headers = {
        "Content-Type": "application/json",
        "X-Authorization": "Bearer " + token
    }
    r = h.run(endpoint=endpoint, headers=headers)

    return r.json()['customerId']['id']
Exemplo n.º 8
0
    def write_to_midas(instrument, aware_data):
        """ Write timeseries data from FlashFlood API to MIDAS API (single instrument)
        
        Arguments: 
            instrument {dictionary} - instrument dictionary
            aware_data {string} - aware_data object as string
        """

        # Convert string to dict
        aware_response = json.loads(aware_data)

        # Return from function if aware data not present
        if len(aware_response) == 0:
            return

        logging.info(f'instrument: {instrument}')
        logging.debug(f'aware_data: {aware_data}')       

        payload = []

        for aware_param, midas_ts_id in instrument['aware_parameters'].items():
            if midas_ts_id is not None:
                tsv_obj = {}
                tsv_obj['timeseries_id'] = midas_ts_id
                tsv_list = []

                print(f"AWARE values for {aware_param}:")
                # Get the list that cooresponds to the AWARE param
                aware_tsv_list = aware_response[aware_param]
                for tsv in aware_tsv_list:
                    tsv_list.append({"time": aware.epoch_ms_to_human(tsv['ts']), "value": float(tsv['value'])})
                
                tsv_obj['items'] = tsv_list
                payload.append(tsv_obj)       
        

        # pp = pprint.PrettyPrinter(depth=6)
        # pp.pprint(json.dumps(midas_payload))
        print(f'payload: {json.dumps(payload)}')
        
              
        conn = midas.get_connection()
        h = HttpHook(http_conn_id=conn.conn_id, method='POST')    
        endpoint = f"/projects/{instrument['project_id']}/timeseries_measurements?key_id={conn.login}&key={conn.password}"
        headers = {"Content-Type": "application/json"}
        r = h.run(endpoint=endpoint, json=payload, headers=headers)           

        return
Exemplo n.º 9
0
def get_device_ts_data(token, device_id, startTs, endTs, keys, limit):

    conn = get_connection()
    h = HttpHook(http_conn_id=conn.conn_id, method='GET')
    headers = {
        "Content-Type": "application/json",
        "X-Authorization": "Bearer " + token
    }
    endpoint = (
        f"/api/plugins/telemetry/DEVICE/{device_id}"
        f"/values/timeseries?limit={limit}&agg=NONE&startTs={startTs}&endTs={endTs}&keys={keys}"
    )
    # print(f'calling ts data endpoint: {endpoint} with token {token}')
    r = h.run(endpoint=endpoint, headers=headers)

    return r
Exemplo n.º 10
0
    def execute(self, context: 'Context') -> Any:
        from airflow.utils.operator_helpers import determine_kwargs

        http = HttpHook(self.method, http_conn_id=self.http_conn_id, auth_type=self.auth_type)

        self.log.info("Calling HTTP method")

        response = http.run(self.endpoint, self.data, self.headers, self.extra_options)
        if self.log_response:
            self.log.info(response.text)
        if self.response_check:
            kwargs = determine_kwargs(self.response_check, [response], context)
            if not self.response_check(response, **kwargs):
                raise AirflowException("Response check returned False.")
        if self.response_filter:
            kwargs = determine_kwargs(self.response_filter, [response], context)
            return self.response_filter(response, **kwargs)
        return response.text
Exemplo n.º 11
0
    def execute(self, context: Dict[str, Any]) -> Any:
        from airflow.utils.operator_helpers import make_kwargs_callable

        http = HttpHook(self.method, http_conn_id=self.http_conn_id)

        self.log.info("Calling HTTP method")

        response = http.run(self.endpoint, self.data, self.headers, self.extra_options)
        if self.log_response:
            self.log.info(response.text)
        if self.response_check:
            kwargs_callable = make_kwargs_callable(self.response_check)
            if not kwargs_callable(response, **context):
                raise AirflowException("Response check returned False.")
        if self.response_filter:
            kwargs_callable = make_kwargs_callable(self.response_filter)
            return kwargs_callable(response, **context)
        return response.text
Exemplo n.º 12
0
class BaseAPIOperator(BaseOperator):
    """
    Base Operator for API Requests to a main endpoint that generates subendpoints for futher requests.

    :param endpoint: The API endpoint to query.
    :type endpoint: str
    :param parser: Function that parses the endpoint response, into a list of sub-endpoints.
                   Should return a list of strings.
    :type parser: function that takes a requests.Response object and returns a list of sub-endpoints
    :param response_count: Function that returns number of items in API response
    :type response_count: Callable[[requests.Response], int]
    :param number_of_batches: Number of batches used in the DAG Run.
    :type number_of_batches: int
    :param http_conn_id: Airflow Connection variable name for the base API URL.
    :type http_conn_id: str
    :param mongo_conn_id: Airflow Connection variable name for the MongoDB.
    :type mongo_conn_id: str
    :param response_valid: Function that checks if status code is valid. Defaults to 200 status only.
    :type response_valid: Callable[[requests.Response], bool]
    :param query_builder: Function that returns a Dictionary of query parameters.
    :type query_builder: Callable[[None], Dict[str, str]]
    :param header: Headers to be added to API request.
    :type header: dict of string key-value pairs
    :param options: Optional keyword arguments for the Requests library get function.
    :type options: dict of string key-value pairs
    :param log_response: Flag to allow for logging Request response. Defaults to False.
    :type log_response: bool
    """
    @apply_defaults
    def __init__(
        self,
        endpoint: str,
        parser: Callable[
            [requests.Response],
            list],  # Function that parses a response to gather specific endpoints
        response_count: Callable[
            [requests.Response],
            int],  # Determines the number of items from query
        number_of_batches: int,
        http_conn_id: str,
        mongo_conn_id: str,
        batch_name: str,
        response_valid: Callable[[requests.Response], bool] = None,
        query_builder: Callable[[None], str] = None,
        header: Optional[Dict[str, str]] = None,
        options: Optional[Dict[str, Any]] = None,
        log_response: bool = False,
        **kwargs,
    ) -> None:

        # delegate to BaseOperator, we don't need to do anything else
        super().__init__(**kwargs)

        self.number_of_batches = number_of_batches

        # API endpoint information, we should only be making GET requests from here
        # Header is most likely unneccessary
        self.endpoint = endpoint
        self.method = "GET"
        self.query_builder = query_builder or self._default_query_builder
        self.header = header or {}

        self.http_conn_id = http_conn_id
        self.mongo_conn_id = mongo_conn_id
        self.batch_name = batch_name

        # Functions for operating on response data
        self.parser = parser
        self.response_count = response_count
        self.response_valid = response_valid or self._default_response_valid

        # Options is for Requests library functions
        self.options = options or {}

        self.log_response = log_response

        # # these get instantiated on execute
        # these get instantiated on execute
        self.http = None
        self.mongo_conn = None

    # Override the execute method, we want any derived classes to override
    # _execute()
    def execute(self, context: Dict[str, Any]) -> Any:

        self.http = HttpHook(self.method, http_conn_id=self.http_conn_id)
        self.mongo_conn = MongoHook(self.mongo_conn_id)

        # generate query parameters
        self.query = self.query_builder()

        self.log.info(f"Connecting to: {self.http_conn_id}")

        return_val = self._execute(context)

        self._shutdown()

        return return_val

    def _execute(self, context: Dict[str, Any]) -> Any:
        raise NotImplementedError(
            "_execute() needs to be defined for subclasses.")

    def _call_once(self,
                   use_query: bool = False) -> Union[requests.Response, None]:
        """
        Execute a single API call.

        :param query: If use_query is true, we use the internal query string provided in our request.
        :type query: bool (defaults to False)
        """
        response = self.http.run(
            self.endpoint,
            self.query if use_query else {},
            self.header,
            self.options,
        )

        if self.log_response:
            self.log.info(response.url)

        if not self.response_valid(response):
            return None

        return self._to_json(response)

    def _to_json(self, response: requests.Response):
        try:
            return response.json()
        except JSONDecodeError:
            self.log.error(
                f"Failed to convert response to JSON: {response.url}")
            return None

    def _api_id_to_document(self, _id: str, name: str, batch_id: int):
        return {"api_id": str(_id), "batch_id": f"{name}{batch_id}"}

    def _default_query_builder(self) -> dict:
        return {}

    def _default_response_valid(self, response: requests.Response) -> bool:
        """Default response_valid() function. Returns True only on 200."""
        return response.status_code == 200

    def _shutdown(self) -> None:
        """Explicitly close MongoDB connection"""
        if self.mongo_conn:
            self.mongo_conn.close_conn()
Exemplo n.º 13
0
class TestHttpHook(unittest.TestCase):
    """Test get, post and raise_for_status"""
    def setUp(self):
        session = requests.Session()
        adapter = requests_mock.Adapter()
        session.mount('mock', adapter)
        self.get_hook = HttpHook(method='GET')
        self.get_lowercase_hook = HttpHook(method='get')
        self.post_hook = HttpHook(method='POST')

    @requests_mock.mock()
    def test_raise_for_status_with_200(self, m):

        m.get('http://test:8080/v1/test',
              status_code=200,
              text='{"status":{"status": 200}}',
              reason='OK')
        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            resp = self.get_hook.run('v1/test')
            self.assertEqual(resp.text, '{"status":{"status": 200}}')

    @requests_mock.mock()
    @mock.patch('requests.Session')
    @mock.patch('requests.Request')
    def test_get_request_with_port(self, mock_requests, request_mock,
                                   mock_session):
        from requests.exceptions import MissingSchema

        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection_with_port):
            expected_url = 'http://test.com:1234/some/endpoint'
            for endpoint in ['some/endpoint', '/some/endpoint']:

                try:
                    self.get_hook.run(endpoint)
                except MissingSchema:
                    pass

                request_mock.assert_called_once_with(mock.ANY,
                                                     expected_url,
                                                     headers=mock.ANY,
                                                     params=mock.ANY)

                request_mock.reset_mock()

    @requests_mock.mock()
    def test_get_request_do_not_raise_for_status_if_check_response_is_false(
            self, m):

        m.get(
            'http://test:8080/v1/test',
            status_code=404,
            text='{"status":{"status": 404}}',
            reason='Bad request',
        )

        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            resp = self.get_hook.run('v1/test',
                                     extra_options={'check_response': False})
            self.assertEqual(resp.text, '{"status":{"status": 404}}')

    @requests_mock.mock()
    def test_hook_contains_header_from_extra_field(self, mock_requests):
        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            expected_conn = get_airflow_connection()
            conn = self.get_hook.get_conn()
            self.assertDictContainsSubset(json.loads(expected_conn.extra),
                                          conn.headers)
            self.assertEqual(conn.headers.get('bareer'), 'test')

    @requests_mock.mock()
    @mock.patch('requests.Request')
    def test_hook_with_method_in_lowercase(self, mock_requests, request_mock):
        from requests.exceptions import InvalidURL, MissingSchema

        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection_with_port):
            data = "test params"
            try:
                self.get_lowercase_hook.run('v1/test', data=data)
            except (MissingSchema, InvalidURL):
                pass
            request_mock.assert_called_once_with(mock.ANY,
                                                 mock.ANY,
                                                 headers=mock.ANY,
                                                 params=data)

    @requests_mock.mock()
    def test_hook_uses_provided_header(self, mock_requests):
        conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"})
        self.assertEqual(conn.headers.get('bareer'), "newT0k3n")

    @requests_mock.mock()
    def test_hook_has_no_header_from_extra(self, mock_requests):
        conn = self.get_hook.get_conn()
        self.assertIsNone(conn.headers.get('bareer'))

    @requests_mock.mock()
    def test_hooks_header_from_extra_is_overridden(self, mock_requests):
        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            conn = self.get_hook.get_conn(headers={"bareer": "newT0k3n"})
            self.assertEqual(conn.headers.get('bareer'), 'newT0k3n')

    @requests_mock.mock()
    def test_post_request(self, mock_requests):
        mock_requests.post('http://test:8080/v1/test',
                           status_code=200,
                           text='{"status":{"status": 200}}',
                           reason='OK')

        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            resp = self.post_hook.run('v1/test')
            self.assertEqual(resp.status_code, 200)

    @requests_mock.mock()
    def test_post_request_with_error_code(self, mock_requests):
        mock_requests.post(
            'http://test:8080/v1/test',
            status_code=418,
            text='{"status":{"status": 418}}',
            reason='I\'m a teapot',
        )

        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            with self.assertRaises(AirflowException):
                self.post_hook.run('v1/test')

    @requests_mock.mock()
    def test_post_request_do_not_raise_for_status_if_check_response_is_false(
            self, mock_requests):
        mock_requests.post(
            'http://*****:*****@mock.patch('airflow.providers.http.hooks.http.requests.Session')
    def test_retry_on_conn_error(self, mocked_session):

        retry_args = dict(
            wait=tenacity.wait_none(),
            stop=tenacity.stop_after_attempt(7),
            retry=tenacity.retry_if_exception_type(
                requests.exceptions.ConnectionError),
        )

        def send_and_raise(unused_request, **kwargs):
            raise requests.exceptions.ConnectionError

        mocked_session().send.side_effect = send_and_raise
        # The job failed for some reason
        with self.assertRaises(tenacity.RetryError):
            self.get_hook.run_with_advanced_retry(endpoint='v1/test',
                                                  _retry_args=retry_args)
        self.assertEqual(self.get_hook._retry_obj.stop.max_attempt_number + 1,
                         mocked_session.call_count)

    @requests_mock.mock()
    def test_run_with_advanced_retry(self, m):

        m.get('http://*****:*****@mock.patch('airflow.providers.http.hooks.http.HttpHook.get_connection')
    def test_http_connection(self, mock_get_connection):
        conn = Connection(conn_id='http_default',
                          conn_type='http',
                          host='localhost',
                          schema='http')
        mock_get_connection.return_value = conn
        hook = HttpHook()
        hook.get_conn({})
        self.assertEqual(hook.base_url, 'http://localhost')

    @mock.patch('airflow.providers.http.hooks.http.HttpHook.get_connection')
    def test_https_connection(self, mock_get_connection):
        conn = Connection(conn_id='http_default',
                          conn_type='http',
                          host='localhost',
                          schema='https')
        mock_get_connection.return_value = conn
        hook = HttpHook()
        hook.get_conn({})
        self.assertEqual(hook.base_url, 'https://localhost')

    @mock.patch('airflow.providers.http.hooks.http.HttpHook.get_connection')
    def test_host_encoded_http_connection(self, mock_get_connection):
        conn = Connection(conn_id='http_default',
                          conn_type='http',
                          host='http://localhost')
        mock_get_connection.return_value = conn
        hook = HttpHook()
        hook.get_conn({})
        self.assertEqual(hook.base_url, 'http://localhost')

    @mock.patch('airflow.providers.http.hooks.http.HttpHook.get_connection')
    def test_host_encoded_https_connection(self, mock_get_connection):
        conn = Connection(conn_id='http_default',
                          conn_type='http',
                          host='https://localhost')
        mock_get_connection.return_value = conn
        hook = HttpHook()
        hook.get_conn({})
        self.assertEqual(hook.base_url, 'https://localhost')

    def test_method_converted_to_uppercase_when_created_in_lowercase(self):
        self.assertEqual(self.get_lowercase_hook.method, 'GET')

    @mock.patch('airflow.providers.http.hooks.http.HttpHook.get_connection')
    def test_connection_without_host(self, mock_get_connection):
        conn = Connection(conn_id='http_default', conn_type='http')
        mock_get_connection.return_value = conn

        hook = HttpHook()
        hook.get_conn({})
        self.assertEqual(hook.base_url, 'http://')

    @parameterized.expand([
        'GET',
        'POST',
    ])
    @requests_mock.mock()
    def test_json_request(self, method, mock_requests):
        obj1 = {'a': 1, 'b': 'abc', 'c': [1, 2, {"d": 10}]}

        def match_obj1(request):
            return request.json() == obj1

        mock_requests.request(method=method,
                              url='//test:8080/v1/test',
                              additional_matcher=match_obj1)

        with mock.patch('airflow.hooks.base_hook.BaseHook.get_connection',
                        side_effect=get_airflow_connection):
            # will raise NoMockAddress exception if obj1 != request.json()
            HttpHook(method=method).run('v1/test', json=obj1)
    def add_devices_to_midas(aware_devices, metadata, midas_aware_instruments):

        aware_devices = json.loads(aware_devices)
        midas_aware_instruments = json.loads(midas_aware_instruments)

        # print(f'aware_devices: {aware_devices}')
        instruments_dict = {}

        # Convert midas aware instruments list results to dict with aware_id as key
        for i in midas_aware_instruments:
            instruments_dict[i['aware_id']] = i

        project_id = "82c07c9a-9ec8-4ff5-850c-b1d74ffb5e14"
        metadata = json.loads(metadata)

        # print('--METaDATA--')
        # print(metadata)

        payload = []

        # Fake new gage not in MIDAS
        # aware_devices['d0574790-4fc3-11eb-a888-07a0a8e5af03'] = {'name': 'USACE00999', 'type': 'USACE', 'label': None, 'entityType': 'DEVICE'}

        for d_id, d_obj in aware_devices.items():

            #if not any(obj['name'] == f"AWARE Gage {d_obj['name']}" for obj in midas_aware_instruments):

            if d_id not in instruments_dict.keys():

                print('#' * 50)
                print(
                    f"AWARE Gage {d_obj['name']} NOT in MIDAS, preparing to add..."
                )
                instrument_obj = {}

                # Metadata may not be available.  It requires a timeseries call
                # and may not result in values if the window is not large enough
                try:
                    lat = float(metadata[d_id]['lat']['value'])
                    lon = float(metadata[d_id]['lon']['value'])
                except:
                    logging.warning(
                        f"lat/lon not available for: {d_obj['name']}")
                    lat = 0.0
                    lon = 0.0

                instrument_obj['aware_id'] = d_id
                instrument_obj[
                    'status_id'] = "e26ba2ef-9b52-4c71-97df-9e4b6cf4174d"
                instrument_obj['status'] = "active"
                instrument_obj['status_time'] = datetime.now().strftime(
                    '%Y-%m-%dT%H:%M:%S.%fZ')
                # instrument_obj['slug']: f"aware-gage-{d_obj['name']}"
                instrument_obj['formula'] = None
                instrument_obj['name'] = d_obj['name']
                instrument_obj[
                    'type_id'] = "98a61f29-18a8-430a-9d02-0f53486e0984"
                instrument_obj['type'] = "Instrument"
                instrument_obj['geometry'] = {
                    "type": "Point",
                    "coordinates": [lon, lat]
                }
                instrument_obj['station'] = None
                instrument_obj['offset'] = None
                instrument_obj['project_id'] = project_id

                payload.append(instrument_obj)

                for k, v in instrument_obj.items():
                    print(f'{k}: {v}')

            else:
                print(f"{d_obj['name']} already in MIDAS, skipping...")

        print(json.dumps(payload))

        if len(payload) > 0:
            conn = midas.get_connection()
            h = HttpHook(http_conn_id=conn.conn_id, method='POST')
            endpoint = f'/projects/{project_id}/instruments?key_id={conn.login}&key={conn.password}'
            headers = {"Content-Type": "application/json"}
            r = h.run(endpoint=endpoint, json=payload, headers=headers)

        ########### Example payload ###################
        # [{
        #     "aware_id": "fdbe0c62-bcf7-49be-af7b-316af478dfc2",
        #     "status_id": "e26ba2ef-9b52-4c71-97df-9e4b6cf4174d",
        #     "status": "active",
        #     "status_time": "2021-01-21T19:51:55.261Z",
        #     "slug": "aware-gage-1",
        #     "formula": null,
        #     "name": "Demo Piezometer 2",
        #     "type_id": "98a61f29-18a8-430a-9d02-0f53486e0984",
        #     "type": "Instrument",
        #     "geometry": {
        #         "type": "Point",
        #         "coordinates": [
        #             -80.8,
        #             26.7
        #         ]
        #     },
        #     "station": null,
        #     "offset": null,
        #     "project_id": "82c07c9a-9ec8-4ff5-850c-b1d74ffb5e14"
        # }]

        return
Exemplo n.º 15
0
        file.write(data)
        return file.name


with airflow.DAG(dag_id='airbods',
                 start_date=datetime.datetime(
                     2021, 5, 17, tzinfo=datetime.timezone.utc)) as dag:
    # Datacake HTTP
    hook = HttpHook(http_conn_id='datacake_airbods')

    # List devices
    response = hook.run(endpoint=None,
                        json=dict(query=textwrap.dedent("""
query {
  allDevices(inWorkspace:"0bdfb2eb-6531-4afb-a842-ce6b51d3c980") {
    id
    serialNumber
    verboseName
  }
}
""")))
    devices = response.json()['data']['allDevices']

    # Iterate over devices
    for device in devices:
        # Download raw data for each device
        get_raw_data = GraphQLHttpOperator(
            http_conn_id='datacake_airbods',
            task_id='raw_{}'.format(device['id']),
            doc="Get raw data for {}".format(device['verboseName']),
            params=device,
            # Jinja escape characters for GraphQL syntax
Exemplo n.º 16
0
class HttpSensor(BaseSensorOperator):
    """
    Executes a HTTP GET statement and returns False on failure caused by
    404 Not Found or `response_check` returning False.

    HTTP Error codes other than 404 (like 403) or Connection Refused Error
    would raise an exception and fail the sensor itself directly (no more poking).
    To avoid failing the task for other codes than 404, the argument ``extra_option``
    can be passed with the value ``{'check_response': False}``. It will make the ``response_check``
    be execute for any http status code.

    The response check can access the template context to the operator:

    .. code-block:: python

        def response_check(response, task_instance):
            # The task_instance is injected, so you can pull data form xcom
            # Other context variables such as dag, ds, execution_date are also available.
            xcom_data = task_instance.xcom_pull(task_ids="pushing_task")
            # In practice you would do something more sensible with this data..
            print(xcom_data)
            return True


        HttpSensor(task_id="my_http_sensor", ..., response_check=response_check)

    .. seealso::
        For more information on how to use this operator, take a look at the guide:
        :ref:`howto/operator:HttpSensor`

    :param http_conn_id: The :ref:`http connection<howto/connection:http>` to run the
        sensor against
    :param method: The HTTP request method to use
    :param endpoint: The relative part of the full url
    :param request_params: The parameters to be added to the GET url
    :param headers: The HTTP headers to be added to the GET request
    :param response_check: A check against the 'requests' response object.
        The callable takes the response object as the first positional argument
        and optionally any number of keyword arguments available in the context dictionary.
        It should return True for 'pass' and False otherwise.
    :param extra_options: Extra options for the 'requests' library, see the
        'requests' documentation (options to modify timeout, ssl, etc.)
    """

    template_fields: Sequence[str] = ('endpoint', 'request_params', 'headers')

    def __init__(
        self,
        *,
        endpoint: str,
        http_conn_id: str = 'http_default',
        method: str = 'GET',
        request_params: Optional[Dict[str, Any]] = None,
        headers: Optional[Dict[str, Any]] = None,
        response_check: Optional[Callable[..., bool]] = None,
        extra_options: Optional[Dict[str, Any]] = None,
        **kwargs: Any,
    ) -> None:
        super().__init__(**kwargs)
        self.endpoint = endpoint
        self.http_conn_id = http_conn_id
        self.method = method
        self.request_params = request_params or {}
        self.headers = headers or {}
        self.extra_options = extra_options or {}
        self.response_check = response_check

        self.hook = HttpHook(method=method, http_conn_id=http_conn_id)

    def poke(self, context: 'Context') -> bool:
        from airflow.utils.operator_helpers import determine_kwargs

        self.log.info('Poking: %s', self.endpoint)
        try:
            response = self.hook.run(
                self.endpoint,
                data=self.request_params,
                headers=self.headers,
                extra_options=self.extra_options,
            )
            if self.response_check:
                kwargs = determine_kwargs(self.response_check, [response],
                                          context)
                return self.response_check(response, **kwargs)
        except AirflowException as exc:
            if str(exc).startswith("404"):
                return False

            raise exc

        return True
Exemplo n.º 17
0
class HttpSensor(BaseSensorOperator):
    """
    Executes a HTTP GET statement and returns False on failure caused by
    404 Not Found or `response_check` returning False.

    HTTP Error codes other than 404 (like 403) or Connection Refused Error
    would fail the sensor itself directly (no more poking).

    The response check can access the template context to the operator:

        def response_check(response, task_instance):
            # The task_instance is injected, so you can pull data form xcom
            # Other context variables such as dag, ds, execution_date are also available.
            xcom_data = task_instance.xcom_pull(task_ids='pushing_task')
            # In practice you would do something more sensible with this data..
            print(xcom_data)
            return True

        HttpSensor(task_id='my_http_sensor', ..., response_check=response_check)

    .. seealso::
        For more information on how to use this operator, take a look at the guide:
        :ref:`howto/operator:HttpSensor`

    :param http_conn_id: The connection to run the sensor against
    :type http_conn_id: str
    :param method: The HTTP request method to use
    :type method: str
    :param endpoint: The relative part of the full url
    :type endpoint: str
    :param request_params: The parameters to be added to the GET url
    :type request_params: a dictionary of string key/value pairs
    :param headers: The HTTP headers to be added to the GET request
    :type headers: a dictionary of string key/value pairs
    :param response_check: A check against the 'requests' response object.
        Returns True for 'pass' and False otherwise.
    :type response_check: A lambda or defined function.
    :param extra_options: Extra options for the 'requests' library, see the
        'requests' documentation (options to modify timeout, ssl, etc.)
    :type extra_options: A dictionary of options, where key is string and value
        depends on the option that's being modified.
    """

    template_fields = ('endpoint', 'request_params')

    @apply_defaults
    def __init__(self,
                 endpoint: str,
                 http_conn_id: str = 'http_default',
                 method: str = 'GET',
                 request_params: Optional[Dict] = None,
                 headers: Optional[Dict] = None,
                 response_check: Optional[Callable] = None,
                 extra_options: Optional[Dict] = None,
                 *args,
                 **kwargs):
        super().__init__(*args, **kwargs)
        self.endpoint = endpoint
        self.http_conn_id = http_conn_id
        self.request_params = request_params or {}
        self.headers = headers or {}
        self.extra_options = extra_options or {}
        self.response_check = response_check

        self.hook = HttpHook(method=method, http_conn_id=http_conn_id)

    def poke(self, context: Dict):
        self.log.info('Poking: %s', self.endpoint)
        try:
            response = self.hook.run(self.endpoint,
                                     data=self.request_params,
                                     headers=self.headers,
                                     extra_options=self.extra_options)
            if self.response_check:
                op_kwargs = PythonOperator.determine_op_kwargs(
                    self.response_check, context)
                return self.response_check(response, **op_kwargs)

        except AirflowException as exc:
            if str(exc).startswith("404"):
                return False

            raise exc

        return True