def insert_location_data(engine):
    """
    Bulk inserts test location data.

    Arguments:
        engine: SQL engine object
    """

    test_csv = "locations.csv"
    loc_df = pd.read_csv(os.path.join(CONST_COREDATA_DIR, test_csv))

    assert not loc_df.empty

    # Creates/Opens a new connection to the db and binds the engine
    session = session_open(engine)

    # Check if table is empty and bulk inserts if it is
    first_entry = session.query(LocationClass).first()

    if first_entry is None:
        session.bulk_insert_mappings(LocationClass,
                                     loc_df.to_dict(orient="records"))
        session_close(session)

        assert session.query(LocationClass).count() == len(loc_df.index)
    else:
        session_close(session)

        assert session.query(LocationClass).count() == len(loc_df.index)
Пример #2
0
def insert_user_data(engine):
    """
    Bulk inserts users data.

    Arguments:
        engine: SQL engine object
    """

    users_df = pd.read_csv("users.csv")

    assert not users_df.empty

    # Creates/Opens a new connection to the db and binds the engine
    session = session_open(engine)

    for user_row in users_df.iterrows():

        data = UserClass(
            username=user_row[1]["username"],
            email=user_row[1]["email"],
            password=user_row[1]["password"],
        )

        session.add(data)

    session_close(session)
Пример #3
0
def import_data(pd_df, sensor_type, user, password, host, port, db_name):
    """
    This function will take the checked sensor data (pd_df)
    perform data checks and insert them into the db.
    -data: raw data from a sensor as a csv (or dataframe??)
    -sensor_type: type of sensor

    Parameters required to connect to the database:
    -user: my user name
    -password: my password
    -host: the host name of the server
    -port: the port number the server is listening on
    -db_name: my database name

    """

    connection_string = make_conn_string(SQL_ENGINE, user, password, host,
                                         port)

    # Try to connect to a database that exists
    success, log, engine = connect_db(connection_string, db_name)
    if not success:
        return success, log

    # Try to connect to a database that exists
    success, log, engine = connect_db(connection_string, db_name)
    if not success:
        return success, log

    # Creates/Opens a new connection to the db and binds the engine
    session = session_open(engine)

    if sensor_type == CONST_ADVANTICSYS:
        # load advanticsys sensor data to db
        success, log = insert_advanticsys_data(session, pd_df)
        if not success:
            return success, log

    #TODO: add the other types
    else:
        return False, "Sensor type des not exist"

    session_close(session)

    return True, log
Пример #4
0
def log_upload_event(sensor_type, filename, status, log, connection_string):
    """
    Function will log the upload event in the database by capturing information
        suchas sensor_type, time(now), filename, status, log message.

    - sensor_type: the type of sensor(s) for which the data is being uploaded
    - filename: the name of the file uploaded
    - status: boolean
    - log: log message from the upload routine
    - connection_string: connecetion string to the database

    """

    # Try to connect to a database that exists
    success, error, engine = connect_db(connection_string, SQL_DBNAME)

    if not success:
        return success, error

    # Creates/Opens a new connection to the db and binds the engine
    session = session_open(engine)

    type_id, error = find_sensor_type_id(session, sensor_type)

    success = type_id > -1

    if not success:
        return success, error

    if status:
        status_msg = "OK"
    else:
        status_msg = "FAILED"

    event_log = DataUploadLogClass(type_id, filename, status_msg, log)

    session.add(event_log)

    session_close(session)

    return success, error
def insert_adv_data(engine):
    """
    Bulk inserts test advanticsys data

    Arguments:
        engine: SQL engine object
    """

    file_path = os.path.join(CONST_ADVANTICSYS_DIR, CONST_ADVANTICSYS_TEST_1)
    success, log, test_ingress_df = advanticsys_import(file_path)

    assert success, log
    assert isinstance(test_ingress_df, pd.DataFrame)

    # Creates/Opens a new connection to the db and binds the engine
    session = session_open(engine)

    # tests loading sensor data to db
    success, log = insert_advanticsys_data(session, test_ingress_df)
    session_close(session)
    assert success, log
Пример #6
0
def test_insert_advanticsys_data():
    """
    Bulk inserts test advanticsys data

    Arguments:
        engine: SQL engine object
    """

    file_path = os.path.join(CONST_ADVANTICSYS_DIR, CONST_ADVANTICSYS_TEST_1)
    success, log, test_ingress_df = advanticsys_import(file_path)
    assert success, log
    assert isinstance(test_ingress_df, pd.DataFrame)

    # Try to connect to an engine that exists
    status, log, engine = connect_db(SQL_CONNECTION_STRING, SQL_TEST_DBNAME)
    assert status, log

    # trying to import the same data twice
    session = session_open(engine)
    success, log = insert_advanticsys_data(session, test_ingress_df)
    session_close(session)

    # This should pass and reeport 75 duplicatee values
    assert success is True, log
    assert log == "New: 0 (uploaded); Duplicates: 75 (ignored)"

    file_path = os.path.join(CONST_ADVANTICSYS_DIR, CONST_ADVANTICSYS_TEST_10)
    success, log, test_ingress_df = advanticsys_import(file_path)

    assert success, log
    assert isinstance(test_ingress_df, pd.DataFrame)

    session = session_open(engine)
    success, log = insert_advanticsys_data(session, test_ingress_df)
    session_close(session)

    assert success is False, log
def import_sensor_location(engine):
    """
    Bulk inserts sensor location data

    Arguments:
        engine: SQL engine object
    """

    test_csv = "sensor_location.csv"

    sensor_df = pd.read_csv(os.path.join(CONST_COREDATA_DIR, test_csv))
    assert not sensor_df.empty

    # Creates/Opens a new connection to the db and binds the engine
    session = session_open(engine)

    # Check if table is empty and bulk inserts if it is
    first_entry = session.query(SensorLocationClass).first()

    if first_entry is None:
        session.bulk_insert_mappings(SensorLocationClass,
                                     sensor_df.to_dict(orient="records"))
        session_close(session)

        assert session.query(SensorLocationClass).count() == len(
            sensor_df.index)
    else:
        session_close(session)

        assert session.query(SensorLocationClass).count() == len(
            sensor_df.index)

    # Trying to upload location history data for a sensor that does not exist
    test_csv = "sensor_location_test_1.csv"

    sensor_df = pd.read_csv(
        os.path.join(CONST_TEST_DIR_DATA, CONST_SENSOR_LOCATION_TESTS,
                     test_csv))
    assert not sensor_df.empty

    session = session_open(engine)
    try:
        session.bulk_insert_mappings(SensorLocationClass,
                                     sensor_df.to_dict(orient="records"))
        result = True
    except:
        session.rollback()
        result = False

    session_close(session)

    assert not result

    # Trying to upload location history data for a location that does not exist
    test_csv = "sensor_location_test_2.csv"

    sensor_df = pd.read_csv(
        os.path.join(CONST_TEST_DIR_DATA, CONST_SENSOR_LOCATION_TESTS,
                     test_csv))
    assert not sensor_df.empty

    session = session_open(engine)

    try:
        session.bulk_insert_mappings(SensorLocationClass,
                                     sensor_df.to_dict(orient="records"))
        result = True
    except:
        session.rollback()
        result = False

    session_close(session)

    assert not result

    # Trying to upload location history data with an empty installation date
    test_csv = "sensor_location_test_3.csv"

    sensor_df = pd.read_csv(
        os.path.join(CONST_TEST_DIR_DATA, CONST_SENSOR_LOCATION_TESTS,
                     test_csv))
    assert not sensor_df.empty

    session = session_open(engine)

    try:
        session.bulk_insert_mappings(SensorLocationClass,
                                     sensor_df.to_dict(orient="records"))
        result = True
    except:
        session.rollback()
        result = False

    session_close(session)

    assert not result
Пример #8
0
def import_energy_data(electricity_df, conn_string, database):
    """
    Uploads electricity data to the CROP database.

    Arguments:
        electricity_df: pandas dataframe containing electricity data
        conn_string: connection string
        database: the name of the database
    """

    stark_type_id = -1

    success, log, engine = connect_db(conn_string, database)
    if not success:
        return success, log

    # Check if the stark sensor type is in the database
    try:
        session = session_open(engine)

        stark_type_id = (
            session.query(TypeClass)
            .filter(TypeClass.sensor_type == CONST_STARK)
            .first()
            .id
        )

        session_close(session)

    except Exception:
        status = False
        log = "Sensor type {} was not found.".format(CONST_STARK)

        return log_upload_event(CONST_STARK, "stark.co.uk", status, log, conn_string)

    # Check if data sources are in the database
    data_sources = electricity_df["data_source"].unique()

    data_sources_dict = {}

    for data_source in data_sources:
        stark_sensor_id = -1

        try:
            stark_sensor_id = (
                session.query(SensorClass)
                .filter(SensorClass.device_id == str(data_source))
                .filter(SensorClass.type_id == stark_type_id)
                .first()
                .id
            )
        except Exception:

            status = False
            log = "{} sensor with {} = '{}' was not found.".format(
                CONST_STARK, "name", str(data_source)
            )

            return log_upload_event(
                CONST_STARK, "stark.co.uk", status, log, conn_string
            )

        data_sources_dict[data_source] = stark_sensor_id

    # Uploading electricity readings data
    add_cnt = 0
    dulp_cnt = 0

    try:
        session = session_open(engine)

        for _, row in electricity_df.iterrows():

            sensor_id = data_sources_dict[row["data_source"]]
            timestamp = row["timestamp"]
            electricity = row["electricity"]

            try:
                query_result = (
                    session.query(ReadingsEnergyClass)
                    .filter(ReadingsEnergyClass.sensor_id == sensor_id)
                    .filter(ReadingsEnergyClass.timestamp == timestamp)
                    .first()
                )

                if query_result is not None:
                    found = True
                    dulp_cnt += 1
                else:
                    found = False
            except Exception:
                found = False

            if not found:

                data = ReadingsEnergyClass(
                    sensor_id=sensor_id,
                    timestamp=timestamp,
                    electricity_consumption=electricity,
                )
                session.add(data)

                add_cnt += 1

            session.query(SensorClass).filter(SensorClass.id == sensor_id).update(
                {"last_updated": datetime.utcnow()}
            )

        session_close(session)

        status = True
        log = "New: {} (uploaded); Duplicates: {} (ignored)".format(add_cnt, dulp_cnt)

        return log_upload_event(CONST_STARK, "stark.co.uk", status, log, conn_string)

    except Exception:
        session_close(session)

        status = False
        log = "Cannot insert new data to database"

        return log_upload_event(CONST_STARK, "stark.co.uk", status, log, conn_string)
Пример #9
0
def import_zensie_trh_data(conn_string, database, dt_from, dt_to):
    """
    Uploads zensie temperature and relative humidity data to the CROP database.

    Arguments:
        conn_string: connection string
        database: the name of the database
        dt_from: date range from
        dt_to: date range to
    Returns:
        status, error
    """

    log = ""
    sensor_type = CONST_ZENSIE_TRH_SENSOR_TYPE

    success, log, engine = connect_db(conn_string, database)

    if not success:
        logging.info(log)
        return success, log

    # get the list of zensie trh sensors
    try:
        session = session_open(engine)
        zensie_sensor_list = get_zensie_sensors_list(session, sensor_type)
        session_close(session)

        if zensie_sensor_list is None or len(zensie_sensor_list) == 0:
            success = False
            log = "No sensors with sensor type {} were found.".format(
                sensor_type)

    except:
        session_close(session)
        success = False
        log = "No sensors with sensor type {} were found.".format(sensor_type)

    if not success:
        logging.info(log)

        return log_upload_event(CONST_ZENSIE_TRH_SENSOR_TYPE, "Zensie API",
                                success, log, conn_string)

    for _, zensie_sensor in enumerate(zensie_sensor_list):

        sensor_id = zensie_sensor["sensors_id"]
        sensor_check_id = zensie_sensor["sensors_device_id"]

        logging.info("sensor_id: {} | sensor_check_id: {}".format(
            sensor_id, sensor_check_id))

        if sensor_id > 0 and len(sensor_check_id) > 0:

            logging.info("sensor_id: {} | dt_from: {}, dt_to: {}".format(
                sensor_id, dt_from, dt_to))

            # Sensor data from Zensie
            sensor_success, sensor_error, api_data_df = get_api_sensor_data(
                CONST_CROP_30MHZ_APIKEY, sensor_check_id, dt_from, dt_to)

            logging.info(
                "sensor_id: {} | sensor_success: {}, sensor_error: {}".format(
                    sensor_id, sensor_success, sensor_error))

            if sensor_success:
                # Sensor data from database
                session = session_open(engine)
                db_data_df = get_zensie_trh_sensor_data(
                    session,
                    sensor_id,
                    dt_from + timedelta(hours=-1),
                    dt_to + timedelta(hours=1),
                )
                session_close(session)

                if len(db_data_df) > 0:
                    # Filtering only new data
                    new_data_df = api_data_df[~api_data_df.index.
                                              isin(db_data_df.index)]

                    logging.info("sensor_id: {} | len(db_data_df): {}".format(
                        sensor_id, len(db_data_df)))
                else:
                    new_data_df = api_data_df

                logging.info("sensor_id: {} | len(new_data_df): {}".format(
                    sensor_id, len(new_data_df)))

                if len(new_data_df) > 0:

                    start_time = time.time()

                    session = session_open(engine)
                    for idx, row in new_data_df.iterrows():

                        data = ReadingsZensieTRHClass(
                            sensor_id=sensor_id,
                            timestamp=idx,
                            temperature=row["Temperature"],
                            humidity=row["Humidity"],
                        )

                        session.add(data)

                    session.query(SensorClass).\
                        filter(SensorClass.id == sensor_id).\
                        update({"last_updated": datetime.now()})

                    session_close(session)

                    elapsed_time = time.time() - start_time

                    logging.debug(
                        "sensor_id: {} | elapsed time importing data: {} s.".
                        format(sensor_id, elapsed_time))

                    upload_log = "New: {} (uploaded);".format(
                        len(new_data_df.index))
                    log_upload_event(
                        CONST_ZENSIE_TRH_SENSOR_TYPE,
                        "Zensie API; Sensor ID {}".format(sensor_id),
                        sensor_success,
                        upload_log,
                        conn_string,
                    )

            else:
                log_upload_event(
                    CONST_ZENSIE_TRH_SENSOR_TYPE,
                    "Zensie API; Sensor ID {}".format(sensor_id),
                    sensor_success,
                    sensor_error,
                    conn_string,
                )

    return True, None
Пример #10
0

def issue_warnings():
    None


def upload_warnings(session, warning):
    start_time = time.time()

    session = session_open(engine)
    for idx, row in warning.iterrows():

        data = DataWarningsClass(
            type_id=type_id,
            timestamp=idx,
            priority=prior,
            log=warning_log,
            # temperature=row["Temperature"],
            # humidity=row["Humidity"],
        )

    session.add(data)


if __name__ == "__main__":
    session = session_open(engine)

    check_issues_in_farm(session)

    session_close(session)
Пример #11
0
def upload_openweathermap_data(conn_string: str, database: str,
                               dt_from: datetime, dt_to: datetime):
    """
    Uploads openweathermap data to the CROP database.

    Arguments:
        conn_string: connection string
        database: the name of the database
        dt_from: date range from
        dt_to: date range to
    Returns:
        status, error
    """
    # connect to the DB to get weather data already there, so we don't duplicate
    success, log, engine = connect_db(conn_string, database)
    if not success:
        logging.error(log)
        return success, log
    session = session_open(engine)
    df_db = get_db_weather_data(session, dt_from, dt_to)

    # now get the Openweathermap API data
    success, error, df_api = get_openweathermap_data(dt_from, dt_to)

    # filter out the rows that are already in the db data
    new_data_df = df_api[~df_api.index.isin(df_db.index)]

    logging.info("new data with size len(new_data_df): {}\n\n".format(
        len(new_data_df)))
    if len(new_data_df) > 0:
        # this is the current time in seconds since epoch
        start_time: float = time.time()
        session = session_open(engine)
        for idx, row in new_data_df.iterrows():
            data = ReadingsWeatherClass(
                sensor_id=0,
                timestamp=idx,
                temperature=row["temperature"],
                rain_probability=None,  # not in openweathermap data
                rain=row["rain"],
                relative_humidity=row["relative_humidity"],
                wind_speed=row["wind_speed"],
                wind_direction=row["wind_direction"],
                air_pressure=row["air_pressure"],
                radiation=None,  # not in openweathermap data
                icon=row["icon"],
                source=row["source"],
            )
            session.add(data)
        session_close(session)

        elapsed_time = time.time() - start_time

        logging.debug(
            "openweathermap | elapsed time importing data: {} s.".format(
                elapsed_time))

        upload_log = "New: {} (uploaded);".format(len(new_data_df.index))
        log_upload_event(
            CONST_API_WEATHER_TYPE,
            "Openweathermap API",
            success,
            upload_log,
            conn_string,
        )

    else:
        log_upload_event(
            CONST_API_WEATHER_TYPE,
            "Openweathermap API",
            success,
            error,
            conn_string,
        )
    return True, None