Exemplo n.º 1
0
    def _preprocess_values(self, e, original_attrs, col_names,
                           fiware_servicepath):
        values = []
        for cn in col_names:
            if cn == 'entity_type':
                values.append(e['type'])
            elif cn == 'entity_id':
                values.append(e['id'])
            elif cn == self.TIME_INDEX_NAME:
                values.append(e[self.TIME_INDEX_NAME])
            elif cn == FIWARE_SERVICEPATH:
                values.append(fiware_servicepath or '')
            else:
                # Normal attributes
                try:
                    attr = original_attrs[cn][0]
                    attr_t = original_attrs[cn][1]
                    mapped_type = self._compute_type(e['id'], attr_t, e[attr])

                    if SlfGeometry.is_ngsi_slf_attr(e[attr]):
                        ast = SlfGeometry.build_from_ngsi_dict(e[attr])
                        mapped_value = geocoding.slf.wktcodec.encode_as_wkt(
                            ast, srid=4326)
                    elif mapped_type == NGSI_TO_SQL[NGSI_GEOJSON]:
                        mapped_value = geocoding.geojson.wktcodec.encode_as_wkt(
                            e[attr]['value'], srid=4326)
                    elif mapped_type == NGSI_TO_SQL[NGSI_STRUCTURED_VALUE]:
                        mapped_value = pg8000.PGJsonb(e[attr]['value'])
                    elif mapped_type == NGSI_TO_SQL[NGSI_TEXT] \
                            and 'value' in e[attr] and e[attr]['value'] is not None:
                        mapped_value = str(e[attr]['value'])
                    elif mapped_type == PG_JSON_ARRAY:
                        mapped_value = pg8000.PGJsonb(e[attr]['value'])
                    elif 'type' in e[attr] and e[attr]['type'] == 'Property' \
                            and 'value' in e[attr] \
                            and isinstance(e[attr]['value'], dict) \
                            and '@type' in e[attr]['value'] \
                            and e[attr]['value']['@type'] == 'DateTime':
                        mapped_value = e[attr]['value']['@value']
                    elif 'type' in e[attr] and e[attr][
                            'type'] == 'Relationship':
                        mapped_value = e[attr].get('value', None) or \
                                       e[attr].get('object', None)
                    else:
                        mapped_value = e[attr]['value']

                    values.append(mapped_value)
                except KeyError:
                    # this entity update does not have a value for the column
                    # so use None which will be inserted as NULL to the db.
                    values.append(None)
        return values
Exemplo n.º 2
0
 def _ngsi_array_to_db(attr):
     attr_v = attr.get('value', None)
     if isinstance(attr_v, list):
         return pg8000.PGJsonb(attr_v)
     logging.warning('{} cannot be cast to {} replaced with None'.format(
         attr.get('value', None), attr.get('type', None)))
     return None
def handler(event: SQSEvent, context: Dict[str, Any]) -> None:
    # read all our environment variables to throw errors early
    imagery = os.getenv('TILE_ENDPOINT')
    db = os.getenv('DATABASE_URL')
    prediction_endpoint = os.getenv('PREDICTION_ENDPOINT')

    assert (imagery)
    assert (db)
    assert (prediction_endpoint)

    # instantiate our DownloadAndPredict class
    dap = DownloadAndPredict(imagery=imagery,
                             db=db,
                             prediction_endpoint=prediction_endpoint)

    # get tiles from our SQS event
    tiles = dap.get_tiles(event)

    # construct a payload for our prediction endpoint
    tile_indices, payload = dap.get_prediction_payload(tiles)

    # send prediction request
    content = dap.post_prediction(payload)

    # save prediction request to db
    dap.save_to_db(tile_indices,
                   content['predictions'],
                   result_wrapper=lambda x: pg8000.PGJsonb(x))
Exemplo n.º 4
0
def handler(event: SQSEvent, context: Dict[str, Any]) -> None:
    # read all our environment variables to throw errors early
    bucket = os.getenv('BUCKET')
    db = os.getenv('DATABASE_URL')
    prediction_endpoint = os.getenv('PREDICTION_ENDPOINT')

    assert (bucket)
    assert (db)
    assert (prediction_endpoint)

    # instantiate our DownloadAndPredict class
    dap = S3_DownloadAndPredict(bucket=bucket,
                                db=db,
                                prediction_endpoint=prediction_endpoint)

    # construct a payload for our prediction endpoint
    s3_keys = [record['body'] for record in event['Records']]

    # sent images fron s3 bucket for inference
    tile_indices, payload = dap.get_prediction_payload(s3_keys)

    # send prediction request
    content = dap.post_prediction(payload)

    # save prediction request to db
    dap.save_to_db(tile_indices,
                   content['predictions'],
                   result_wrapper=lambda x: pg8000.PGJsonb(x))
Exemplo n.º 5
0
 def do_store():
     stmt = "insert into {} (table_name, entity_attrs) values (?, ?)" \
            " on conflict (table_name)" \
            " do update set entity_attrs = ?"
     stmt = stmt.format(METADATA_TABLE_NAME)
     entity_attrs_value = pg8000.PGJsonb(persisted_metadata)
     self.cursor.execute(
         stmt, (table_name, entity_attrs_value, entity_attrs_value))
Exemplo n.º 6
0
    def _preprocess_values(self, e, table, col_names, fiware_servicepath):
        values = []
        for cn in col_names:
            if cn == 'entity_type':
                values.append(e['type'])
            elif cn == 'entity_id':
                values.append(e['id'])
            elif cn == self.TIME_INDEX_NAME:
                values.append(e[self.TIME_INDEX_NAME])
            elif cn == FIWARE_SERVICEPATH:
                values.append(fiware_servicepath or '')
            else:
                # Normal attributes
                try:
                    mapped_type = table[cn]
                    ngsi_value = e[cn]['value']

                    if SlfGeometry.is_ngsi_slf_attr(e[cn]):
                        ast = SlfGeometry.build_from_ngsi_dict(e[cn])
                        mapped_value = geocoding.slf.wktcodec.encode_as_wkt(
                            ast, srid=4326)
                    elif mapped_type == NGSI_TO_SQL[NGSI_GEOJSON]:
                        mapped_value = geocoding.geojson.wktcodec.encode_as_wkt(
                            ngsi_value, srid=4326)
                    elif mapped_type == NGSI_TO_SQL[NGSI_STRUCTURED_VALUE]:
                        mapped_value = pg8000.PGJsonb(ngsi_value)
                    elif mapped_type == NGSI_TO_SQL[NGSI_TEXT] \
                            and ngsi_value is not None:
                        mapped_value = str(ngsi_value)
                    elif mapped_type == PG_JSON_ARRAY:
                        mapped_value = pg8000.PGJsonb(ngsi_value)
                    else:
                        mapped_value = ngsi_value

                    values.append(mapped_value)
                except KeyError:
                    # this entity update does not have a value for the column
                    # so use None which will be inserted as NULL to the db.
                    values.append(None)
        return values
Exemplo n.º 7
0
    def _update_metadata_table(self, table_name, metadata):
        """
        This method creates the METADATA_TABLE_NAME (if not exists), which
        stores, for each table_name (entity type), a translation table (dict)
        mapping the column names (from entity attributes) to the corresponding
        attributes metadata such as original attribute names and NGSI types.

        If such table existed, this method updates it accordingly if required.
        Required means, either there was no metadata for that
        table_name or the new_metadata has new entries not present in
        persisted_metadata.

        :param table_name: unicode
            The name of the table whose metadata will be updated

        :param metadata: dict
            The dict mapping the matedata of each column. See original_attrs.
        """
        stmt = "create table if not exists {} " \
               "(table_name text primary key, entity_attrs jsonb)"
        op = stmt.format(METADATA_TABLE_NAME)
        self.cursor.execute(op)

        # if self.cursor.rowcount:  # NOTE. rowcount always -1; not supported
            # Table just created!
        #    persisted_metadata = {}
        # else:
        # Bring translation table!
        stmt = "select entity_attrs from {} where table_name = ?"
        self.cursor.execute(stmt.format(METADATA_TABLE_NAME), [table_name])

        # By design, one entry per table_name
        res = self.cursor.fetchall()
        persisted_metadata = res[0][0] if res else {}

        if metadata.keys() - persisted_metadata.keys():
            persisted_metadata.update(metadata)
            stmt = "insert into {} (table_name, entity_attrs) values (?, ?) " \
                   "on conflict (table_name) " \
                   "do update set entity_attrs = ?"
            stmt = stmt.format(METADATA_TABLE_NAME)
            entity_attrs_value = pg8000.PGJsonb(persisted_metadata)
            self.cursor.execute(stmt, (table_name, entity_attrs_value,
                                       entity_attrs_value))
Exemplo n.º 8
0
def handler(event: SQSEvent, context: Dict[str, Any]) -> None:
    # read all our environment variables to throw errors early
    imagery = os.getenv('TILE_ENDPOINT')
    db = os.getenv('DATABASE_URL')
    prediction_endpoint = os.getenv('PREDICTION_ENDPOINT')
    sh_instance_id = os.getenv('SH_INSTANCE_ID')

    assert (imagery)
    assert (db)
    assert (prediction_endpoint)
    assert (sh_instance_id)

    # instantiate our custom DownloadAndPredict class
    dap = SentinelHubDownloader(
        imagery=imagery,
        db=db,
        prediction_endpoint=prediction_endpoint,
        sentinel_wms_kwargs=dict(
            layer='MY-SENTINEL-HUB-LAYER',
            width=256,
            height=256,
            maxcc=0.20,
            instance_id=sh_instance_id,
            time=(f'2019-04-01', f'2019-07-30'),
            time_difference=datetime.timedelta(days=21),
        ))

    # now that we've defined the behavior of our custom class, all the below
    # methods are identical to those in the base example

    # get tiles from our SQS event
    tiles = dap.get_tiles(event)

    # construct a payload for our prediction endpoint
    tile_indices, payload = dap.get_prediction_payload(tiles)

    # send prediction request
    content = dap.post_prediction(payload)

    # save prediction request to db
    dap.save_to_db(tile_indices,
                   content['predictions'],
                   result_wrapper=lambda x: pg8000.PGJsonb(x))
    def save_to_db(self, tiles:List[Tile], results:List[Any], result_wrapper:Optional[Callable]=None) -> None:
        db = urlparse(self.db)

        conn = pg8000.connect(
          user=db.username,
          password=db.password,
          host=db.hostname,
          database=db.path[1:],
          port=db.port
        )
        cursor = conn.cursor()

        for i, output in enumerate(results):
            quadkey = quadkey(tiles[i])
            # centroid = db.Column(Geometry('POINT', srid=4326))
            predictions = pg8000.PGJsonb(output)
            cursor.execute("INSERT INTO mlenabler VALUES (null, %s, %s, %s) ON CONFLICT (id) DO UPDATE SET output = %s", (self.prediction_id, quadkey, predictions, predictions))

        conn.commit()
        conn.close()
def handler(event: SQSEvent, context: Dict[str, Any]) -> None:
    # read all our environment variables to throw errors early
    imagery = os.getenv('TILE_ENDPOINT')
    db = os.getenv('DATABASE_URL')
    prediction_endpoint=os.getenv('PREDICTION_ENDPOINT')
    model_image_size = os.getenv('MODEL_IMAGE_SIZE')

    assert(imagery)
    assert(db)
    assert(prediction_endpoint)
    assert(model_image_size)

    # instantiate our custom DownloadAndPredict class
    dap = SuperTileDownloader(
        imagery=imagery,
        db=db,
        prediction_endpoint=prediction_endpoint,
        model_image_size=int(model_image_size)
    )

    # now that we've defined the behavior of our custom class, all the below
    # methods are identical to those in the base example

    # get tiles from our SQS event
    tiles = dap.get_tiles(event)

    # construct a payload for our prediction endpoint
    tile_indices, payload = dap.get_prediction_payload(tiles)

    # send prediction request
    content = dap.post_prediction(payload)

    # save prediction request to db
    dap.save_to_db(
        tile_indices,
        content['predictions'],
        result_wrapper=lambda x: pg8000.PGJsonb(x)
    )
Exemplo n.º 11
0
 def _to_db_ngsi_structured_value(data: dict) -> pg8000.PGJsonb:
     return pg8000.PGJsonb(data)