def test_read(Client, read_type, size):
    if read_type == "SNAPSHOT":
        df = Client.read(
            TAGS["Float32"],
            read_type=getattr(ReaderType, read_type),
        )
    else:
        df = Client.read(
            TAGS["Float32"],
            start_time=START_TIME,
            end_time=STOP_TIME,
            ts=SAMPLE_TIME,
            read_type=getattr(ReaderType, read_type),
        )

    if read_type not in ["SNAPSHOT", "RAW"]:
        assert df.shape == (size, 1)
        assert df.index[0] == ensure_datetime_with_tz(START_TIME)
        assert df.index[-1] == df.index[0] + (size - 1) * pd.Timedelta(
            SAMPLE_TIME, unit="s"
        )
    elif read_type in "RAW":
        # Weirdness for test-tag which can have two different results,
        # apparently depending on the day of the week, mood, lunar cycle...
        assert df.shape == (size, 1) or df.shape == (size - 1, 1)
        assert df.index[0] >= ensure_datetime_with_tz(START_TIME)
        assert df.index[-1] <= ensure_datetime_with_tz(STOP_TIME)
def test_generate_tag_read_query(read_type):
    starttime = ensure_datetime_with_tz(START_TIME)
    stoptime = ensure_datetime_with_tz(STOP_TIME)
    ts = pd.Timedelta(SAMPLE_TIME, unit="s")

    if read_type == "SNAPSHOT":
        res = AspenHandlerODBC.generate_read_query(
            "thetag", None, None, None, None, getattr(ReaderType, read_type))
    else:
        res = AspenHandlerODBC.generate_read_query(
            "thetag", None, starttime, stoptime, ts,
            getattr(ReaderType, read_type))

    expected = {
        "RAW":
        ('SELECT ISO8601(ts) AS "time", value AS "value" FROM history WHERE '
         "name = 'thetag' AND (request = 4) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "INT":
        ('SELECT ISO8601(ts) AS "time", value AS "value" FROM history WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 7) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "MIN":
        ('SELECT ISO8601(ts_start) AS "time", min AS "value" FROM aggregates WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 1) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "MAX":
        ('SELECT ISO8601(ts_start) AS "time", max AS "value" FROM aggregates WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 1) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "RNG":
        ('SELECT ISO8601(ts_start) AS "time", rng AS "value" FROM aggregates WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 1) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "AVG":
        ('SELECT ISO8601(ts_start) AS "time", avg AS "value" FROM aggregates WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 1) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "STD":
        ('SELECT ISO8601(ts_start) AS "time", std AS "value" FROM aggregates WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 1) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "VAR":
        ('SELECT ISO8601(ts_start) AS "time", var AS "value" FROM aggregates WHERE '
         "name = 'thetag' AND (period = 600) AND (request = 1) "
         "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
         "ORDER BY ts"),
        "SNAPSHOT":
        ('SELECT ISO8601(IP_INPUT_TIME) AS "time", IP_INPUT_VALUE AS "value" '
         'FROM "thetag"'),
    }

    assert expected[read_type] == res
예제 #3
0
def test_generate_tag_read_query(PIHandler, read_type):
    starttime = utils.ensure_datetime_with_tz(START_TIME)
    stoptime = utils.ensure_datetime_with_tz(STOP_TIME)
    ts = pd.Timedelta(SAMPLE_TIME, unit="s")

    if read_type == "SNAPSHOT":
        res = PIHandler.generate_read_query("thetag", None, None, None,
                                            getattr(ReaderType, read_type))
    else:
        res = PIHandler.generate_read_query("thetag", starttime, stoptime, ts,
                                            getattr(ReaderType, read_type))

    expected = {
        "RAW":
        ("SELECT TOP 100000 CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[picomp2] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "ORDER BY time"),
        "INT":
        ("SELECT CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[piinterp2] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "MIN":
        ("SELECT CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[pimin] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "MAX":
        ("SELECT CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[pimax] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "RNG":
        ("SELECT CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[pirange] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "AVG":
        ("SELECT CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[piavg] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "STD":
        ("SELECT CAST(value as FLOAT32) AS value, time "
         "FROM [piarchive]..[pistd] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "VAR":
        ("SELECT POWER(CAST(value as FLOAT32), 2) AS value, time "
         "FROM [piarchive]..[pistd] WHERE tag='thetag' "
         "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
         "AND (timestep = '60s') ORDER BY time"),
        "SNAPSHOT": ("SELECT CAST(value as FLOAT32) AS value, time "
                     "FROM [piarchive]..[pisnapshot] WHERE tag='thetag'"),
    }
    assert expected[read_type] == res
예제 #4
0
def test_genreadquery_long_sampletime(PIHandler):
    starttime = ensure_datetime_with_tz(START_TIME)
    stoptime = ensure_datetime_with_tz(STOP_TIME)
    ts = pd.Timedelta(86410, unit="s")

    (url, params) = PIHandler.generate_read_query(
        PIHandler.tag_to_webid("alreadyknowntag"),
        starttime,
        stoptime,
        ts,
        ReaderType.INT,
    )
    assert params["interval"] == f"{86410}s"
예제 #5
0
def test_genreadquery_long_sampletime(AspenHandler):
    start_time = utils.ensure_datetime_with_tz("2020-06-24 17:00:00")
    stop_time = utils.ensure_datetime_with_tz("2020-06-24 18:00:00")
    ts = pd.Timedelta(86401, unit="s")

    res = AspenHandler.generate_read_query("ATCAI", None, start_time,
                                           stop_time, ts, ReaderType.INT)
    expected = ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>1</RT><S>0</S><P>86401</P><PU>3</PU></Tag></Q>")

    assert expected == res
예제 #6
0
def test_generate_read_query(PIHandler, read_type):  # TODO: Move away from test*connect
    starttime = ensure_datetime_with_tz(START_TIME)
    stoptime = ensure_datetime_with_tz(STOP_TIME)
    ts = pd.Timedelta(SAMPLE_TIME, unit="s")

    (url, params) = PIHandler.generate_read_query(
        PIHandler.tag_to_webid("alreadyknowntag"),
        starttime,
        stoptime,
        ts,
        getattr(ReaderType, read_type),
    )
    if read_type != "SNAPSHOT":
        assert params["startTime"] == "01-Apr-20 09:05:00"
        assert params["endTime"] == "01-Apr-20 10:05:00"
        assert params["timeZone"] == "UTC"

    if read_type == "INT":
        assert url == f"streams/{PIHandler.webidcache['alreadyknowntag']}/interpolated"
        assert (
            params["selectedFields"] == "Links;Items.Timestamp;Items.Value;Items.Good"
        )
        assert params["interval"] == f"{SAMPLE_TIME}s"
    elif read_type in ["AVG", "MIN", "MAX", "RNG", "STD", "VAR"]:
        assert url == f"streams/{PIHandler.webidcache['alreadyknowntag']}/summary"
        assert (
            params["selectedFields"]
            == "Links;Items.Value.Timestamp;Items.Value.Value;Items.Value.Good"
        )
        assert {
            "AVG": "Average",
            "MIN": "Minimum",
            "MAX": "Maximum",
            "RNG": "Range",
            "STD": "StdDev",
            "VAR": "StdDev",
        }.get(read_type) == params["summaryType"]
        assert params["summaryDuration"] == f"{SAMPLE_TIME}s"
    elif read_type == "SNAPSHOT":
        assert url == f"streams/{PIHandler.webidcache['alreadyknowntag']}/value"
        assert (
            params["selectedFields"] == "Timestamp;Value;Good"
        )
        assert len(params) == 3
    elif read_type == "RAW":
        assert url == f"streams/{PIHandler.webidcache['alreadyknowntag']}/recorded"
        assert (
            params["selectedFields"] == "Links;Items.Timestamp;Items.Value;Items.Good"
        )
        assert params["maxCount"] == 10000
예제 #7
0
def test_genreadquery_long_sampletime(PIHandler):
    starttime = utils.ensure_datetime_with_tz(START_TIME)
    stoptime = utils.ensure_datetime_with_tz(STOP_TIME)
    ts = pd.Timedelta(86401, unit="s")

    res = PIHandler.generate_read_query("thetag", starttime, stoptime, ts,
                                        ReaderType.INT)

    expected = (
        "SELECT CAST(value as FLOAT32) AS value, time "
        "FROM [piarchive]..[piinterp2] WHERE tag='thetag' "
        "AND (time BETWEEN '17-Jan-18 15:00:00' AND '17-Jan-18 16:00:00') "
        "AND (timestep = '86401s') ORDER BY time")

    assert expected == res
def test_genreadquery_long_sampletime():
    starttime = ensure_datetime_with_tz(START_TIME)
    stoptime = ensure_datetime_with_tz(STOP_TIME)
    ts = pd.Timedelta(86401, unit="s")

    res = AspenHandlerODBC.generate_read_query("thetag", None, starttime,
                                               stoptime, ts, ReaderType.INT)

    expected = (
        'SELECT ISO8601(ts) AS "time", value AS "value" FROM history WHERE '
        "name = 'thetag' AND (period = 864010) AND (request = 7) "
        "AND (ts BETWEEN '2018-01-17T15:00:00Z' AND '2018-01-17T16:00:00Z') "
        "ORDER BY ts")

    assert expected == res
예제 #9
0
def test_read(Client, read_type, size):
    if read_type == "SNAPSHOT":
        df = Client.read(TAGS["Float32"], read_type=getattr(ReaderType, read_type))
    else:
        df = Client.read(
            TAGS["Float32"],
            start_time=START_TIME,
            end_time=STOP_TIME,
            ts=SAMPLE_TIME,
            read_type=getattr(ReaderType, read_type),
        )
    assert df.shape == (size, 1)
    if read_type not in ["SNAPSHOT", "RAW"]:
        assert df.index[0] == ensure_datetime_with_tz(START_TIME)
        assert df.index[-1] == df.index[0] + (size - 1) * pd.Timedelta(
            SAMPLE_TIME, unit="s"
        )
    elif read_type in "RAW":
        assert df.index[0] >= ensure_datetime_with_tz(START_TIME)
        assert df.index[-1] <= ensure_datetime_with_tz(STOP_TIME)
def test_cache_proper_fill_up(PIClientWeb):
    PIClientWeb.cache = SmartCache(PI_DS)
    if os.path.exists(PI_DS + ".h5"):
        os.remove(PI_DS + ".h5")
    df_int_1 = PIClientWeb.read(
        PI_TAG, PI_START_TIME, PI_END_TIME, TS, read_type=ReaderType.INT
    )
    df_int_2 = PIClientWeb.read(
        PI_TAG, PI_START_TIME_2, PI_END_TIME_2, TS, read_type=ReaderType.INT
    )
    assert len(df_int_1) == 16
    assert len(df_int_2) == 16
    df_cached = PIClientWeb.cache.fetch(
        PI_TAG,
        ReaderType.INT,
        TS,
        ensure_datetime_with_tz(PI_START_TIME),
        ensure_datetime_with_tz(PI_END_TIME_2),
    )
    assert len(df_cached) == 32
예제 #11
0
def test_ensure_is_datetime():
    assert ensure_datetime_with_tz("10. jan. 2018 13:45:15") == timezone(
        "Europe/Oslo"
    ).localize(datetime.datetime(2018, 1, 10, 13, 45, 15))
    assert ensure_datetime_with_tz("02.01.03 00:00:00") == timezone(
        "Europe/Oslo"
    ).localize(datetime.datetime(2003, 1, 2, 0, 0, 0))
    assert ensure_datetime_with_tz("02.01.03 00:00:00") == ensure_datetime_with_tz(
        "2003-02-01 0:00:00am"
    )
    assert ensure_datetime_with_tz(
        "02.01.03 00:00:00", "America/Sao_Paulo"
    ) == timezone("America/Sao_Paulo").localize(datetime.datetime(2003, 1, 2, 0, 0, 0))
    assert ensure_datetime_with_tz("02.01.03 00:00:00", tz="Brazil/East") == timezone(
        "Brazil/East"
    ).localize(datetime.datetime(2003, 1, 2, 0, 0, 0))
    assert ensure_datetime_with_tz(
        timezone("Brazil/East").localize(datetime.datetime(2003, 1, 2, 0, 0, 0)),
        tz="Europe/Oslo",
    ) == timezone("Brazil/East").localize(datetime.datetime(2003, 1, 2, 0, 0, 0))
예제 #12
0
def test_generate_tag_read_query(AspenHandler, read_type):
    start_time = utils.ensure_datetime_with_tz("2020-06-24 17:00:00")
    stop_time = utils.ensure_datetime_with_tz("2020-06-24 18:00:00")
    ts = pd.Timedelta(SAMPLE_TIME, unit="s")
    res = AspenHandler.generate_read_query("ATCAI", None, start_time,
                                           stop_time, ts,
                                           getattr(ReaderType, read_type))
    expected = {
        "RAW": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>0</RT><X>100000</X><O>0</O></Tag></Q>"),
        "SHAPEPRESERVING":
        ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
         "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
         "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
         "<RT>2</RT><X>100000</X><O>0</O><S>0</S></Tag></Q>"),
        "INT": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>1</RT><S>0</S><P>60</P><PU>3</PU></Tag></Q>"),
        "MIN": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>14</RT><O>0</O><S>0</S><P>60</P><PU>3</PU><AM>0</AM>"
                "<AS>0</AS><AA>0</AA><DSA>0</DSA></Tag></Q>"),
        "MAX": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>13</RT><O>0</O><S>0</S><P>60</P><PU>3</PU><AM>0</AM>"
                "<AS>0</AS><AA>0</AA><DSA>0</DSA></Tag></Q>"),
        "RNG": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>15</RT><O>0</O><S>0</S><P>60</P><PU>3</PU><AM>0</AM>"
                "<AS>0</AS><AA>0</AA><DSA>0</DSA></Tag></Q>"),
        "AVG": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>12</RT><O>0</O><S>0</S><P>60</P><PU>3</PU><AM>0</AM>"
                "<AS>0</AS><AA>0</AA><DSA>0</DSA></Tag></Q>"),
        "VAR": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>18</RT><O>0</O><S>0</S><P>60</P><PU>3</PU><AM>0</AM>"
                "<AS>0</AS><AA>0</AA><DSA>0</DSA></Tag></Q>"),
        "STD": ('<Q f="d" allQuotes="1"><Tag><N><![CDATA[ATCAI]]></N>'
                "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                "<HF>0</HF><St>1593010800000</St><Et>1593014400000</Et>"
                "<RT>17</RT><O>0</O><S>0</S><P>60</P><PU>3</PU><AM>0</AM>"
                "<AS>0</AS><AA>0</AA><DSA>0</DSA></Tag></Q>"),
        "COUNT": ("whatever"),
        "GOOD": ("whatever"),
        "BAD": ("whatever"),
        "TOTAL": ("whatever"),
        "SUM": ("whatever"),
        "SNAPSHOT": ('<Q f="d" allQuotes="1" rt="1593014400000" uc="0">'
                     "<Tag><N><![CDATA[ATCAI]]></N>"
                     "<D><![CDATA[sourcename]]></D><F><![CDATA[VAL]]></F>"
                     "<VS>1</VS><S>0</S></Tag></Q>"),
    }
    assert expected[read_type] == res
예제 #13
0
def test_ensure_is_datetime_datetime():
    dt = datetime.datetime(2018, 1, 10, 13, 45, 15)
    dt_with_tz = timezone("Europe/Oslo").localize(dt)

    assert ensure_datetime_with_tz(dt_with_tz) == dt_with_tz
    assert ensure_datetime_with_tz(dt) == dt_with_tz
예제 #14
0
def test_ensure_is_datetime_pd_timestamp():
    ts = pd.Timestamp(2018, 1, 10, 13, 45, 15)
    ts_with_tz = timezone("Europe/Oslo").localize(ts)
    assert ensure_datetime_with_tz(ts_with_tz) == ts_with_tz
    assert ensure_datetime_with_tz(ts) == ts_with_tz