Exemplo n.º 1
0
def stark_import(mytimer: func.TimerRequest):
    """
    The main STARK import Azure Function routine.

    """

    utc_timestamp = (datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat())

    logging.info("Python stark timer trigger function started at %s",
                 utc_timestamp)

    # Get the data from the website
    status, error, energy_df = scrape_data()

    if not status:
        log_upload_event(CONST_STARK, "stark.co.uk", status, error,
                         SQL_CONNECTION_STRING)
    else:
        status, error = import_energy_data(energy_df, SQL_CONNECTION_STRING,
                                           SQL_DBNAME)

    logging.info(f"Log: {status} {error}")

    utc_timestamp = (datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat())

    logging.info("Python stark timer trigger function finished at %s",
                 utc_timestamp)
Exemplo n.º 2
0
def advanticsys_import(blobin: func.InputStream):
    """
    The main advanticsys Azure Function routine.

    """

    logging.info(f"Starting advanticsys sensor data import process:\n"
                 f"Name: {blobin.name}\n"
                 f"Blob Size: {blobin.length} bytes")

    # reading in data as pandas dataframe
    data_str = str(blobin.read(), "utf-8")
    data_stream = StringIO(data_str)
    data_df = pd.read_csv(data_stream)

    # getting the environmental parameters
    user = "******".format(os.environ["CROP_SQL_USER"].strip())
    password = "******".format(os.environ["CROP_SQL_PASS"].strip())
    host = "{}".format(os.environ["CROP_SQL_HOST"].strip())
    port = "{}".format(os.environ["CROP_SQL_PORT"].strip())
    database = "{}".format(os.environ["CROP_SQL_DBNAME"].strip())

    # uploading data to the database
    status, log = import_data(data_df, CONST_ADVANTICSYS, user, password, host,
                              port, database)

    # Logging the advanticsys sensor data upload event
    conn_string = make_conn_string(SQL_ENGINE, user, password, host, port)

    log_status, log_err = log_upload_event(CONST_ADVANTICSYS, blobin.name,
                                           status, log, conn_string)

    if status:

        logging.info(
            f"SUCCESS: advanticsys sensor data import process finished:\n"
            f"Name: {blobin.name}\n"
            f"Blob Size: {blobin.length} bytes\n"
            f"Info: {log}\n"
            f"Log: {log_status} {log_err}")

    else:

        logging.info(f"ERROR: advanticsys sensor data import process failed:\n"
                     f"Name: {blobin.name}\n"
                     f"Blob Size: {blobin.length} bytes\n"
                     f"Info: {log}\n"
                     f"Log: {log_status} {log_err}")
Exemplo n.º 3
0
def import_zensie_trh_data(conn_string, database, dt_from, dt_to):
    """
    Uploads zensie temperature and relative humidity data to the CROP database.

    Arguments:
        conn_string: connection string
        database: the name of the database
        dt_from: date range from
        dt_to: date range to
    Returns:
        status, error
    """

    log = ""
    sensor_type = CONST_ZENSIE_TRH_SENSOR_TYPE

    success, log, engine = connect_db(conn_string, database)

    if not success:
        logging.info(log)
        return success, log

    # get the list of zensie trh sensors
    try:
        session = session_open(engine)
        zensie_sensor_list = get_zensie_sensors_list(session, sensor_type)
        session_close(session)

        if zensie_sensor_list is None or len(zensie_sensor_list) == 0:
            success = False
            log = "No sensors with sensor type {} were found.".format(
                sensor_type)

    except:
        session_close(session)
        success = False
        log = "No sensors with sensor type {} were found.".format(sensor_type)

    if not success:
        logging.info(log)

        return log_upload_event(CONST_ZENSIE_TRH_SENSOR_TYPE, "Zensie API",
                                success, log, conn_string)

    for _, zensie_sensor in enumerate(zensie_sensor_list):

        sensor_id = zensie_sensor["sensors_id"]
        sensor_check_id = zensie_sensor["sensors_device_id"]

        logging.info("sensor_id: {} | sensor_check_id: {}".format(
            sensor_id, sensor_check_id))

        if sensor_id > 0 and len(sensor_check_id) > 0:

            logging.info("sensor_id: {} | dt_from: {}, dt_to: {}".format(
                sensor_id, dt_from, dt_to))

            # Sensor data from Zensie
            sensor_success, sensor_error, api_data_df = get_api_sensor_data(
                CONST_CROP_30MHZ_APIKEY, sensor_check_id, dt_from, dt_to)

            logging.info(
                "sensor_id: {} | sensor_success: {}, sensor_error: {}".format(
                    sensor_id, sensor_success, sensor_error))

            if sensor_success:
                # Sensor data from database
                session = session_open(engine)
                db_data_df = get_zensie_trh_sensor_data(
                    session,
                    sensor_id,
                    dt_from + timedelta(hours=-1),
                    dt_to + timedelta(hours=1),
                )
                session_close(session)

                if len(db_data_df) > 0:
                    # Filtering only new data
                    new_data_df = api_data_df[~api_data_df.index.
                                              isin(db_data_df.index)]

                    logging.info("sensor_id: {} | len(db_data_df): {}".format(
                        sensor_id, len(db_data_df)))
                else:
                    new_data_df = api_data_df

                logging.info("sensor_id: {} | len(new_data_df): {}".format(
                    sensor_id, len(new_data_df)))

                if len(new_data_df) > 0:

                    start_time = time.time()

                    session = session_open(engine)
                    for idx, row in new_data_df.iterrows():

                        data = ReadingsZensieTRHClass(
                            sensor_id=sensor_id,
                            timestamp=idx,
                            temperature=row["Temperature"],
                            humidity=row["Humidity"],
                        )

                        session.add(data)

                    session.query(SensorClass).\
                        filter(SensorClass.id == sensor_id).\
                        update({"last_updated": datetime.now()})

                    session_close(session)

                    elapsed_time = time.time() - start_time

                    logging.debug(
                        "sensor_id: {} | elapsed time importing data: {} s.".
                        format(sensor_id, elapsed_time))

                    upload_log = "New: {} (uploaded);".format(
                        len(new_data_df.index))
                    log_upload_event(
                        CONST_ZENSIE_TRH_SENSOR_TYPE,
                        "Zensie API; Sensor ID {}".format(sensor_id),
                        sensor_success,
                        upload_log,
                        conn_string,
                    )

            else:
                log_upload_event(
                    CONST_ZENSIE_TRH_SENSOR_TYPE,
                    "Zensie API; Sensor ID {}".format(sensor_id),
                    sensor_success,
                    sensor_error,
                    conn_string,
                )

    return True, None
Exemplo n.º 4
0
def import_energy_data(electricity_df, conn_string, database):
    """
    Uploads electricity data to the CROP database.

    Arguments:
        electricity_df: pandas dataframe containing electricity data
        conn_string: connection string
        database: the name of the database
    """

    stark_type_id = -1

    success, log, engine = connect_db(conn_string, database)
    if not success:
        return success, log

    # Check if the stark sensor type is in the database
    try:
        session = session_open(engine)

        stark_type_id = (
            session.query(TypeClass)
            .filter(TypeClass.sensor_type == CONST_STARK)
            .first()
            .id
        )

        session_close(session)

    except Exception:
        status = False
        log = "Sensor type {} was not found.".format(CONST_STARK)

        return log_upload_event(CONST_STARK, "stark.co.uk", status, log, conn_string)

    # Check if data sources are in the database
    data_sources = electricity_df["data_source"].unique()

    data_sources_dict = {}

    for data_source in data_sources:
        stark_sensor_id = -1

        try:
            stark_sensor_id = (
                session.query(SensorClass)
                .filter(SensorClass.device_id == str(data_source))
                .filter(SensorClass.type_id == stark_type_id)
                .first()
                .id
            )
        except Exception:

            status = False
            log = "{} sensor with {} = '{}' was not found.".format(
                CONST_STARK, "name", str(data_source)
            )

            return log_upload_event(
                CONST_STARK, "stark.co.uk", status, log, conn_string
            )

        data_sources_dict[data_source] = stark_sensor_id

    # Uploading electricity readings data
    add_cnt = 0
    dulp_cnt = 0

    try:
        session = session_open(engine)

        for _, row in electricity_df.iterrows():

            sensor_id = data_sources_dict[row["data_source"]]
            timestamp = row["timestamp"]
            electricity = row["electricity"]

            try:
                query_result = (
                    session.query(ReadingsEnergyClass)
                    .filter(ReadingsEnergyClass.sensor_id == sensor_id)
                    .filter(ReadingsEnergyClass.timestamp == timestamp)
                    .first()
                )

                if query_result is not None:
                    found = True
                    dulp_cnt += 1
                else:
                    found = False
            except Exception:
                found = False

            if not found:

                data = ReadingsEnergyClass(
                    sensor_id=sensor_id,
                    timestamp=timestamp,
                    electricity_consumption=electricity,
                )
                session.add(data)

                add_cnt += 1

            session.query(SensorClass).filter(SensorClass.id == sensor_id).update(
                {"last_updated": datetime.utcnow()}
            )

        session_close(session)

        status = True
        log = "New: {} (uploaded); Duplicates: {} (ignored)".format(add_cnt, dulp_cnt)

        return log_upload_event(CONST_STARK, "stark.co.uk", status, log, conn_string)

    except Exception:
        session_close(session)

        status = False
        log = "Cannot insert new data to database"

        return log_upload_event(CONST_STARK, "stark.co.uk", status, log, conn_string)
Exemplo n.º 5
0
def upload_openweathermap_data(conn_string: str, database: str,
                               dt_from: datetime, dt_to: datetime):
    """
    Uploads openweathermap data to the CROP database.

    Arguments:
        conn_string: connection string
        database: the name of the database
        dt_from: date range from
        dt_to: date range to
    Returns:
        status, error
    """
    # connect to the DB to get weather data already there, so we don't duplicate
    success, log, engine = connect_db(conn_string, database)
    if not success:
        logging.error(log)
        return success, log
    session = session_open(engine)
    df_db = get_db_weather_data(session, dt_from, dt_to)

    # now get the Openweathermap API data
    success, error, df_api = get_openweathermap_data(dt_from, dt_to)

    # filter out the rows that are already in the db data
    new_data_df = df_api[~df_api.index.isin(df_db.index)]

    logging.info("new data with size len(new_data_df): {}\n\n".format(
        len(new_data_df)))
    if len(new_data_df) > 0:
        # this is the current time in seconds since epoch
        start_time: float = time.time()
        session = session_open(engine)
        for idx, row in new_data_df.iterrows():
            data = ReadingsWeatherClass(
                sensor_id=0,
                timestamp=idx,
                temperature=row["temperature"],
                rain_probability=None,  # not in openweathermap data
                rain=row["rain"],
                relative_humidity=row["relative_humidity"],
                wind_speed=row["wind_speed"],
                wind_direction=row["wind_direction"],
                air_pressure=row["air_pressure"],
                radiation=None,  # not in openweathermap data
                icon=row["icon"],
                source=row["source"],
            )
            session.add(data)
        session_close(session)

        elapsed_time = time.time() - start_time

        logging.debug(
            "openweathermap | elapsed time importing data: {} s.".format(
                elapsed_time))

        upload_log = "New: {} (uploaded);".format(len(new_data_df.index))
        log_upload_event(
            CONST_API_WEATHER_TYPE,
            "Openweathermap API",
            success,
            upload_log,
            conn_string,
        )

    else:
        log_upload_event(
            CONST_API_WEATHER_TYPE,
            "Openweathermap API",
            success,
            error,
            conn_string,
        )
    return True, None