Beispiel #1
0
    def test_ts_get_krls_predictor(self):
        t0=api.utctime_now()
        ta=api.TimeAxis(t0, api.deltahours(1), 30*24)
        data=np.sin(np.linspace(0, 2*np.pi, ta.size()))
        ts_data=api.TimeSeries(ta, data, api.POINT_INSTANT_VALUE)

        ts=api.TimeSeries("a")

        try:
            ts.get_krls_predictor()
            self.fail("should not be able to get predictor for unbound")
        except:
            pass

        fbi=ts.find_ts_bind_info()
        fbi[0].ts.bind(ts_data)
        ts.bind_done()

        pred=ts.get_krls_predictor(api.deltahours(3))

        ts_krls=pred.predict(ta)
        self.assertEqual(len(ts_krls), len(ts_data))
        ts_mse=pred.mse_ts(ts_data)
        self.assertEqual(len(ts_mse), len(ts_data))
        for i in range(len(ts_krls)):
            self.assertAlmostEqual(ts_krls.values[i], ts_data.values[i], places=1)
            self.assertAlmostEqual(ts_mse.values[i], 0, places=2)
        self.assertAlmostEqual(pred.predictor_mse(ts_data), 0, places=2)
Beispiel #2
0
    def test_extend_vector_of_timeseries(self):
        t0=api.utctime_now()
        dt=api.deltahours(1)
        n=512

        tsvector=api.TsVector()

        ta=api.TimeAxisFixedDeltaT(t0 + 3*n*dt, dt, 2*n)

        tsvector.push_back(api.TimeSeries(
            ta=api.TimeAxisFixedDeltaT(t0, dt, 2*n),
            fill_value=1.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))
        tsvector.push_back(api.TimeSeries(
            ta=api.TimeAxisFixedDeltaT(t0 + 2*n*dt, dt, 2*n),
            fill_value=2.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))

        extension=api.TimeSeries(ta=ta, fill_value=8.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        # extend after all time-series in the vector
        extended_tsvector=tsvector.extend_ts(extension)

        # assert first element
        for i in range(2*n):
            self.assertEqual(extended_tsvector[0](t0 + i*dt), 1.0)
        for i in range(n):
            self.assertTrue(math.isnan(extended_tsvector[0](t0 + (2*n + i)*dt)))
        for i in range(2*n):
            self.assertEqual(extended_tsvector[0](t0 + (3*n + i)*dt), 8.0)

        # assert second element
        for i in range(2*n):
            self.assertEqual(extended_tsvector[1](t0 + (2*n + i)*dt), 2.0)
        for i in range(n):
            self.assertEqual(extended_tsvector[1](t0 + (4*n + i)*dt), 8.0)

        tsvector_2=api.TsVector()
        tsvector_2.push_back(api.TimeSeries(
            ta=api.TimeAxisFixedDeltaT(t0 + 2*n*dt, dt, 4*n),
            fill_value=10.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))
        tsvector_2.push_back(api.TimeSeries(
            ta=api.TimeAxisFixedDeltaT(t0 + 4*n*dt, dt, 4*n),
            fill_value=20.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE))

        # extend each element in tsvector by the corresponding element in tsvector_2
        extended_tsvector=tsvector.extend_ts(tsvector_2)

        # assert first element
        for i in range(2*n):
            self.assertEqual(extended_tsvector[0](t0 + i*dt), 1.0)
        for i in range(4*n):
            self.assertEqual(extended_tsvector[0](t0 + (2*n + i)*dt), 10.0)

        # assert second element
        for i in range(2*n):
            self.assertEqual(extended_tsvector[1](t0 + (2*n + i)*dt), 2.0)
        for i in range(4*n):
            self.assertEqual(extended_tsvector[1](t0 + (4*n + i)*dt), 20.0)
 def test_trim_day(self):
     t = api.utctime_now()
     td = self.std.trim(t, api.Calendar.DAY)
     c = self.std.calendar_units(td)
     a = self.std.calendar_units(t)
     self.assertEqual(c.second, 0, 'incorrect seconds should be 0')
     self.assertEqual(c.minute, 0, 'trim day should set minutes to 0')
     self.assertEqual(c.hour, 0, 'trim day should set hours to 0')
     self.assertEqual(a.year, c.year, 'trim day Should leave same year')
     self.assertEqual(a.month, c.month, 'trim day  Should leave month')
     self.assertEqual(a.day, c.day, 'should leave same day')
 def test_trim_day(self):
     t = api.utctime_now()
     td = self.std.trim(t, api.Calendar.DAY)
     c = self.std.calendar_units(td)
     a = self.std.calendar_units(t)
     self.assertEqual(c.second, 0, 'incorrect seconds should be 0')
     self.assertEqual(c.minute, 0, 'trim day should set minutes to 0')
     self.assertEqual(c.hour, 0, 'trim day should set hours to 0')
     self.assertEqual(a.year, c.year, 'trim day Should leave same year')
     self.assertEqual(a.month, c.month, 'trim day  Should leave month')
     self.assertEqual(a.day, c.day, 'should leave same day')
Beispiel #5
0
    def test_basic_timeseries_math_operations(self):
        """
        Test that timeseries functionality is exposed, and briefly verify correctness
        of operators (the  shyft core do the rest of the test job, not repeated here).
        """
        c=api.Calendar()
        t0=api.utctime_now()
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxis(t0, dt, n)

        a=api.TimeSeries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        self.assertTrue(a)  # should evaluate to true
        b=api.TimeSeries(ta=ta, fill_value=1.0, point_fx=api.point_interpretation_policy.POINT_INSTANT_VALUE)
        b.fill(2.0)  # demo how to fill a point ts
        self.assertAlmostEqual((1.0 - b).values.to_numpy().max(), -1.0)
        self.assertAlmostEqual((b - 1.0).values.to_numpy().max(), 1.0)
        c=a + b*3.0 - a/2.0  # operator + * - /
        d=-a  # unary minus
        e=a.average(ta)  # average
        f=api.max(c, 300.0)
        g=api.min(c, -300.0)
        # h = a.max(c, 300) # class static method not supported
        h=c.max(300.0)
        k=c.min(-300)

        self.assertEqual(a.size(), n)
        self.assertEqual(b.size(), n)
        self.assertEqual(c.size(), n)
        self.assertAlmostEqual(c.value(0), 3.0 + 2.0*3.0 - 3.0/2.0)  # 7.5
        for i in range(n):
            self.assertAlmostEqual(c.value(i), a.value(i) + b.value(i)*3.0 - a.value(i)/2.0, delta=0.0001)
            self.assertAlmostEqual(d.value(i), - a.value(i), delta=0.0001)
            self.assertAlmostEqual(e.value(i), a.value(i), delta=0.00001)
            self.assertAlmostEqual(f.value(i), +300.0, delta=0.00001)
            self.assertAlmostEqual(h.value(i), +300.0, delta=0.00001)
            self.assertAlmostEqual(g.value(i), -300.0, delta=0.00001)
            self.assertAlmostEqual(k.value(i), -300.0, delta=0.00001)
        # now some more detailed tests for setting values
        b.set(0, 3.0)
        self.assertAlmostEqual(b.value(0), 3.0)
        #  3.0 + 3 * 3 - 3.0/2.0
        self.assertAlmostEqual(c.value(1), 7.5, delta=0.0001)  # 3 + 3*3  - 1.5 = 10.5
        self.assertAlmostEqual(c.value(0), 10.5, delta=0.0001)  # 3 + 3*3  - 1.5 = 10.5
Beispiel #6
0
    def test_timeseries_vector(self):
        c = api.Calendar()
        t0 = api.utctime_now()
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis(t0, dt, n)

        a = api.Timeseries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        b = api.Timeseries(ta=ta, fill_value=2.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        v = api.TsVector()
        v.append(a)
        v.append(b)

        self.assertEqual(len(v), 2)
        self.assertAlmostEqual(v[0].value(0), 3.0, "expect first ts to be 3.0")
        aa = api.Timeseries(ta=a.time_axis, values=a.values,
                            point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)  # copy construct (really copy the values!)
        a.fill(1.0)
        self.assertAlmostEqual(v[0].value(0), 1.0, "expect first ts to be 1.0, because the vector keeps a reference ")
        self.assertAlmostEqual(aa.value(0), 3.0)
Beispiel #7
0
    def test_timeseries_vector(self):
        c=api.Calendar()
        t0=api.utctime_now()
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxisFixedDeltaT(t0, dt, n)

        a=api.TimeSeries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        b=api.TimeSeries(ta=ta, fill_value=2.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        v=api.TsVector()
        v.append(a)
        v.append(b)

        self.assertEqual(len(v), 2)
        self.assertAlmostEqual(v[0].value(0), 3.0, "expect first ts to be 3.0")
        aa=api.TimeSeries(ta=a.time_axis, values=a.values,
                          point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)  # copy construct (really copy the values!)
        a.fill(1.0)
        self.assertAlmostEqual(v[0].value(0), 1.0, "expect first ts to be 1.0, because the vector keeps a reference ")
        self.assertAlmostEqual(aa.value(0), 3.0)
Beispiel #8
0
    def test_basic_timeseries_math_operations(self):
        """
        Test that timeseries functionality is exposed, and briefly verify correctness
        of operators (the  shyft core do the rest of the test job, not repeated here).
        """
        c = api.Calendar()
        t0 = api.utctime_now()
        dt = api.deltahours(1)
        n = 240
        ta = api.Timeaxis2(t0, dt, n)

        a = api.Timeseries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        b = api.Timeseries(ta=ta, fill_value=1.0)
        b.fill(2.0)  # demo how to fill a point ts
        c = a + b * 3.0 - a / 2.0  # operator + * - /
        d = -a  # unary minus
        e = a.average(ta)  # average
        f = api.max(c, 300.0)
        g = api.min(c, -300.0)
        h = a.max(c, 300)
        k = a.min(c, -300)

        self.assertEqual(a.size(), n)
        self.assertEqual(b.size(), n)
        self.assertEqual(c.size(), n)
        self.assertAlmostEqual(c.value(0), 3.0 + 2.0 * 3.0 - 3.0 / 2.0)  # 7.5
        for i in range(n):
            self.assertAlmostEqual(c.value(i), a.value(i) + b.value(i) * 3.0 - a.value(i) / 2.0, delta=0.0001)
            self.assertAlmostEqual(d.value(i), - a.value(i), delta=0.0001)
            self.assertAlmostEqual(e.value(i), a.value(i), delta=0.00001)
            self.assertAlmostEqual(f.value(i), +300.0, delta=0.00001)
            self.assertAlmostEqual(h.value(i), +300.0, delta=0.00001)
            self.assertAlmostEqual(g.value(i), -300.0, delta=0.00001)
            self.assertAlmostEqual(k.value(i), -300.0, delta=0.00001)
        # now some more detailed tests for setting values
        b.set(0, 3.0)
        self.assertAlmostEqual(b.value(0), 3.0)
        #  3.0 + 3 * 3 - 3.0/2.0
        self.assertAlmostEqual(c.value(1), 7.5, delta=0.0001)  # 3 + 3*3  - 1.5 = 10.5
        self.assertAlmostEqual(c.value(0), 10.5, delta=0.0001)  # 3 + 3*3  - 1.5 = 10.5
Beispiel #9
0
    def test_krls_ts(self):
        t0=api.utctime_now()
        ta=api.TimeAxis(t0, api.deltahours(1), 30*24)
        data=np.sin(np.linspace(0, 2*np.pi, ta.size()))
        ts_data=api.TimeSeries(ta, data, api.POINT_INSTANT_VALUE)

        ts=api.TimeSeries("a")
        ts_krls=ts.krls_interpolation(api.deltahours(3))

        ts_krls_blob=ts_krls.serialize()
        ts2_krls=api.TimeSeries.deserialize(ts_krls_blob)

        self.assertTrue(ts2_krls.needs_bind())
        fbi=ts2_krls.find_ts_bind_info()
        self.assertEqual(len(fbi), 1)
        fbi[0].ts.bind(ts_data)
        ts2_krls.bind_done()
        self.assertFalse(ts2_krls.needs_bind())

        self.assertEqual(len(ts2_krls), len(ts_data))
        for i in range(len(ts2_krls)):
            self.assertAlmostEqual(ts2_krls.values[i], ts_data.values[i], places=1)
Beispiel #10
0
    def __init__(self, *args, **kwargs):
        super(DtssTestCase, self).__init__(*args, **kwargs)
        self.callback_count = 0
        self.find_count = 0
        self.ts_infos = TsInfoVector()
        self.rd_throws = False
        self.cache_reads = False
        self.cache_dtss = None
        utc = Calendar()
        t_now = utctime_now()
        self.stored_tsv = list()

        for i in range(30):
            self.ts_infos.append(
                TsInfo(name=fake_store_url('{0}'.format(i)),
                       point_fx=point_fx.POINT_AVERAGE_VALUE,
                       delta_t=deltahours(1),
                       olson_tz_id='',
                       data_period=UtcPeriod(utc.time(2017, 1, 1),
                                             utc.time(2018, 1, 1)),
                       created=t_now,
                       modified=t_now))
Beispiel #11
0
    def test_rating_curve_ts(self):
        t0=api.utctime_now()
        ta=api.TimeAxis(t0, api.deltaminutes(30), 48*2)
        data=np.linspace(0, 10, ta.size())
        ts=api.TimeSeries(ta, data, api.POINT_INSTANT_VALUE)

        rcf1=api.RatingCurveFunction()
        rcf1.add_segment(0, 1, 0, 1)
        rcf1.add_segment(api.RatingCurveSegment(5, 2, 0, 1))

        rcf2=api.RatingCurveFunction()
        rcf2.add_segment(0, 3, 0, 1)
        rcf2.add_segment(api.RatingCurveSegment(8, 4, 0, 1))

        rcp=api.RatingCurveParameters()
        rcp.add_curve(t0, rcf1)
        rcp.add_curve(t0 + api.deltahours(24), rcf2)

        sts=api.TimeSeries("a")
        rcsts=sts.rating_curve(rcp)

        rcsts_blob=rcsts.serialize()
        rcsts_2=api.TimeSeries.deserialize(rcsts_blob)

        self.assertTrue(rcsts_2.needs_bind())
        fbi=rcsts_2.find_ts_bind_info()
        self.assertEqual(len(fbi), 1)
        fbi[0].ts.bind(ts)
        rcsts_2.bind_done()
        self.assertFalse(rcsts_2.needs_bind())

        self.assertEqual(len(rcsts_2), len(ts))
        for i in range(rcsts_2.size()):
            expected=(1*ts.get(i).v if ts.get(i).v < 5 else 2*ts.get(i).v) if ts.get(i).t < t0 + api.deltahours(24) else (
                3*ts.get(i).v if ts.get(i).v < 8 else 4*ts.get(i).v)
            self.assertEqual(rcsts_2.get(i).t, ts.get(i).t)
            self.assertEqual(rcsts_2.get(i).v, expected)
 def start_date(self):
     utc = api.Calendar()
     today = utc.trim(api.utctime_now(), api.Calendar.DAY)
     return today - api.Calendar.DAY  # yesterday
Beispiel #13
0
    def test_a_time_series_vector(self):
        c=api.Calendar()
        t0=api.utctime_now()
        dt=api.deltahours(1)
        n=240
        ta=api.TimeAxisFixedDeltaT(t0, dt, n)

        a=api.TimeSeries(ta=ta, fill_value=3.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        b=api.TimeSeries(ta=ta, fill_value=2.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        c=api.TimeSeries(ta=ta, fill_value=10.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        v=api.TsVector()
        v.append(a)
        v.append(b)

        self.assertEqual(len(v), 2)
        self.assertAlmostEqual(v[0].value(0), 3.0, "expect first ts to be 3.0")
        aa=api.TimeSeries(ta=a.time_axis, values=a.values,
                          point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)  # copy construct (really copy the values!)
        a.fill(1.0)
        self.assertAlmostEqual(v[0].value(0), 1.0, "expect first ts to be 1.0, because the vector keeps a reference ")
        self.assertAlmostEqual(aa.value(0), 3.0)

        vt=v.values_at(t0).to_numpy()
        self.assertEqual(len(vt), len(v))
        v1=v[0:1]
        self.assertEqual(len(v1), 1)
        self.assertAlmostEqual(v1[0].value(0), 1.0)
        v_clone=api.TsVector(v)
        self.assertEqual(len(v_clone), len(v))
        del v_clone[-1]
        self.assertEqual(len(v_clone), 1)
        self.assertEqual(len(v), 2)
        v_slice_all=v.slice(api.IntVector())
        v_slice_1=v.slice(api.IntVector([1]))
        v_slice_12=v.slice(api.IntVector([0, 1]))
        self.assertEqual(len(v_slice_all), 2)
        self.assertEqual(len(v_slice_1), 1)
        self.assertAlmostEqual(v_slice_1[0].value(0), 2.0)
        self.assertEqual(len(v_slice_12), 2)
        self.assertAlmostEqual(v_slice_12[0].value(0), 1.0)

        # multiplication by scalar
        v_x_2a=v*2.0
        v_x_2b=2.0*v
        for i in range(len(v)):
            self.assertAlmostEqual(v_x_2a[i].value(0), 2*v[i].value(0))
            self.assertAlmostEqual(v_x_2b[i].value(0), 2*v[i].value(0))

        # division by scalar
        v_d_a=v/3.0
        v_d_b=3.0/v
        for i in range(len(v)):
            self.assertAlmostEqual(v_d_a[i].value(0), v[i].value(0)/3.0)
            self.assertAlmostEqual(v_d_b[i].value(0), 3.0/v[i].value(0))

        # addition by scalar
        v_a_a=v + 3.0
        v_a_b=3.0 + v
        for i in range(len(v)):
            self.assertAlmostEqual(v_a_a[i].value(0), v[i].value(0) + 3.0)
            self.assertAlmostEqual(v_a_b[i].value(0), 3.0 + v[i].value(0))

        # sub by scalar
        v_s_a=v - 3.0
        v_s_b=3.0 - v
        for i in range(len(v)):
            self.assertAlmostEqual(v_s_a[i].value(0), v[i].value(0) - 3.0)
            self.assertAlmostEqual(v_s_b[i].value(0), 3.0 - v[i].value(0))

        # multiplication vector by ts
        v_x_ts=v*c
        ts_x_v=c*v
        for i in range(len(v)):
            self.assertAlmostEqual(v_x_ts[i].value(0), v[i].value(0)*c.value(0))
            self.assertAlmostEqual(ts_x_v[i].value(0), c.value(0)*v[i].value(0))

        # division vector by ts
        v_d_ts=v/c
        ts_d_v=c/v
        for i in range(len(v)):
            self.assertAlmostEqual(v_d_ts[i].value(0), v[i].value(0)/c.value(0))
            self.assertAlmostEqual(ts_d_v[i].value(0), c.value(0)/v[i].value(0))

        # add vector by ts
        v_a_ts=v + c
        ts_a_v=c + v
        for i in range(len(v)):
            self.assertAlmostEqual(v_a_ts[i].value(0), v[i].value(0) + c.value(0))
            self.assertAlmostEqual(ts_a_v[i].value(0), c.value(0) + v[i].value(0))

        # sub vector by ts
        v_s_ts=v - c
        ts_s_v=c - v
        for i in range(len(v)):
            self.assertAlmostEqual(v_s_ts[i].value(0), v[i].value(0) - c.value(0))
            self.assertAlmostEqual(ts_s_v[i].value(0), c.value(0) - v[i].value(0))

        # vector mult vector
        va=v
        vb=2.0*v

        v_m_v=va*vb
        self.assertEqual(len(v_m_v), len(va))
        for i in range(len(va)):
            self.assertAlmostEqual(v_m_v[i].value(0), va[i].value(0)*vb[i].value(0))

        # vector div vector
        v_d_v=va/vb
        self.assertEqual(len(v_d_v), len(va))
        for i in range(len(va)):
            self.assertAlmostEqual(v_d_v[i].value(0), va[i].value(0)/vb[i].value(0))

        # vector add vector
        v_a_v=va + vb
        self.assertEqual(len(v_a_v), len(va))
        for i in range(len(va)):
            self.assertAlmostEqual(v_a_v[i].value(0), va[i].value(0) + vb[i].value(0))

        # vector sub vector
        v_s_v=va - vb
        self.assertEqual(len(v_s_v), len(va))
        for i in range(len(va)):
            self.assertAlmostEqual(v_s_v[i].value(0), va[i].value(0) - vb[i].value(0))

        # vector unary minus
        v_u=- va
        self.assertEqual(len(v_u), len(va))
        for i in range(len(va)):
            self.assertAlmostEqual(v_u[i].value(0), -va[i].value(0))

        # integral functions, just to verify exposure works, and one value is according to spec.
        ta2=api.TimeAxis(t0, dt*24, n//24)
        v_avg=v.average(ta2)
        v_int=v.integral(ta2)
        v_acc=v.accumulate(ta2)
        v_sft=v.time_shift(dt*24)
        self.assertIsNotNone(v_avg)
        self.assertIsNotNone(v_int)
        self.assertIsNotNone(v_acc)
        self.assertIsNotNone(v_sft)
        self.assertAlmostEqual(v_avg[0].value(0), 1.0)
        self.assertAlmostEqual(v_int[0].value(0), 86400.0)
        self.assertAlmostEqual(v_acc[0].value(0), 0.0)
        self.assertAlmostEqual(v_sft[0].time(0), t0 + dt*24)

        # min/max functions
        min_v_double=va.min(-1000.0)
        max_v_double=va.max(1000.0)
        self.assertAlmostEqual(min_v_double[0].value(0), -1000.0)
        self.assertAlmostEqual(max_v_double[0].value(0), +1000.0)
        min_v_double=api.min(va, -1000.0)
        max_v_double=api.max(va, +1000.0)
        self.assertAlmostEqual(min_v_double[0].value(0), -1000.0)
        self.assertAlmostEqual(max_v_double[0].value(0), +1000.0)
        # c = 10.0
        c1000=100.0*c
        min_v_double=va.min(-c1000)
        max_v_double=va.max(c1000)
        self.assertAlmostEqual(min_v_double[0].value(0), -c1000.value(0))
        self.assertAlmostEqual(max_v_double[0].value(0), c1000.value(0))
        min_v_double=api.min(va, -c1000)
        max_v_double=api.max(va, c1000)
        self.assertAlmostEqual(min_v_double[0].value(0), -c1000.value(0))
        self.assertAlmostEqual(max_v_double[0].value(0), c1000.value(0))

        v1000=va*1000.0
        min_v_double=va.min(-v1000)
        max_v_double=va.max(v1000)
        self.assertAlmostEqual(min_v_double[0].value(0), -v1000[0].value(0))
        self.assertAlmostEqual(max_v_double[0].value(0), v1000[0].value(0))
        min_v_double=api.min(va, -v1000)
        max_v_double=api.max(va, v1000)
        self.assertAlmostEqual(min_v_double[0].value(0), -v1000[0].value(0))
        self.assertAlmostEqual(max_v_double[0].value(0), v1000[0].value(0))

        # finally, test that exception is raised if we try to multiply two unequal sized vectors

        try:
            x=v_clone*va
            self.assertTrue(False, 'We expected exception for unequal sized ts-vector op')
        except RuntimeError as re:
            pass

        # also test that empty vector + vector -> vector etc.
        va_2=va + api.TsVector()
        va_3=api.TsVector() + va
        va_4=va - api.TsVector()
        va_5=api.TsVector() - va
        va_x=api.TsVector() + api.TsVector()
        self.assertEqual(len(va_2), len(va))
        self.assertEqual(len(va_3), len(va))
        self.assertEqual(len(va_4), len(va))
        self.assertEqual(len(va_5), len(va))
        self.assertEqual(not va_x, True)
        self.assertEqual(not va_2, False)
        va_2_ok=False
        va_x_ok=True
        if va_2:
            va_2_ok=True
        if va_x:
            va_x_ok=False
        self.assertTrue(va_2_ok)
        self.assertTrue(va_x_ok)
Beispiel #14
0
    def test_ts_extend(self):
        t0=api.utctime_now()
        dt=api.deltahours(1)
        n=512
        ta_a=api.TimeAxisFixedDeltaT(t0, dt, 2*n)
        ta_b=api.TimeAxisFixedDeltaT(t0 + n*dt, dt, 2*n)
        ta_c=api.TimeAxisFixedDeltaT(t0 + 2*n*dt, dt, 2*n)
        ta_d=api.TimeAxisFixedDeltaT(t0 + 3*n*dt, dt, 2*n)

        a=api.TimeSeries(ta=ta_a, fill_value=1.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        b=api.TimeSeries(ta=ta_b, fill_value=2.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        c=api.TimeSeries(ta=ta_c, fill_value=4.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)
        d=api.TimeSeries(ta=ta_d, fill_value=8.0, point_fx=api.point_interpretation_policy.POINT_AVERAGE_VALUE)

        # default behavior: extend from end of a
        ac=a.extend(c)

        for i in range(2*n):  # valus from first ts
            self.assertEqual(ac(t0 + i*dt), 1.0)
        for i in range(2*n):  # values from extension ts
            self.assertEqual(ac(t0 + (i + 2*n)*dt), 4.0)

        # default behavior: extend from end of a, fill gap with nan
        ad=a.extend(d)

        for i in range(2*n):  # values from first
            self.assertEqual(ad(t0 + i*dt), 1.0)
        for i in range(n):  # gap
            self.assertTrue(math.isnan(ad(t0 + (i + 2*n)*dt)))
        for i in range(2*n):  # extension
            self.assertEqual(ad(t0 + (i + 3*n)*dt), 8.0)

        # split at the first value of d instead of last of c
        cd=c.extend(d, split_policy=api.extend_split_policy.RHS_FIRST)

        for i in range(n):  # first, only until the extension start
            self.assertEqual(cd(t0 + (2*n + i)*dt), 4.0)
        for i in range(2*n):  # extension
            self.assertEqual(cd(t0 + (3*n + i)*dt), 8.0)

        # split at a given time step, and extend the last value through the gap
        ac=a.extend(c, split_policy=api.extend_split_policy.AT_VALUE, split_at=(t0 + dt*n//2),
                    fill_policy=api.extend_fill_policy.USE_LAST)

        for i in range(n//2):  # first, only until the given split value
            self.assertEqual(ac(t0 + i*dt), 1.0)
        for i in range(3*n//2):  # gap, uses last value before gap
            self.assertEqual(ac(t0 + (n//2 + i)*dt), 1.0)
        for i in range(2*n):  # extension
            self.assertEqual(ac(t0 + (2*n + i)*dt), 4.0)

        # split at the beginning of the ts to extend when the extension start before it
        cb=c.extend(b, split_policy=api.extend_split_policy.AT_VALUE, split_at=(t0 + 2*n*dt))

        for i in range(n):  # don't extend before
            self.assertTrue(math.isnan(cb(t0 + (n + i)*dt)))
        for i in range(n):  # we split at the beginning => only values from extension
            self.assertEqual(cb(t0 + (2*n + i)*dt), 2.0)
        for i in range(n):  # no values after extension
            self.assertTrue(math.isnan(cb(t0 + (3*n + i)*dt)))

        # extend with ts starting after the end, fill the gap with a given value
        ad=a.extend(d, fill_policy=api.extend_fill_policy.FILL_VALUE, fill_value=5.5)

        for i in range(2*n):  # first
            self.assertEqual(ad(t0 + i*dt), 1.0)
        for i in range(n):  # gap, filled with 5.5
            self.assertEqual(ad(t0 + (2*n + i)*dt), 5.5)
        for i in range(2*n):  # extension
            self.assertEqual(ad(t0 + (3*n + i)*dt), 8.0)

        # check extend with more exotic combination of time-axis(we had an issue with this..)
        a=api.TimeSeries(api.TimeAxis(0, 1, 10), fill_value=1.0, point_fx=api.POINT_AVERAGE_VALUE)
        b=api.TimeSeries(api.TimeAxis(api.Calendar(), 0, 1, 20), fill_value=2.0, point_fx=api.POINT_AVERAGE_VALUE)
        ab=a.extend(b)
        ba=b.extend(a, split_policy=api.extend_split_policy.AT_VALUE, split_at=a.time_axis.time(5))
        self.assertAlmostEqual(ab.value(0), 1.0)
        self.assertAlmostEqual(ab.value(11), 2.0)
        self.assertAlmostEqual(ba.value(0), 2.0)
        self.assertAlmostEqual(ab.value(7), 1.0)
Beispiel #15
0
from statkraft.ltm.io.run_repository import RunRepository
import shyft.api as sa
from statkraft.ltm.scripting import plot_ts, plot_percentiles
import matplotlib.pyplot as plt

t0 = sa.utctime_now() - sa.Calendar.WEEK

rr = RunRepository()

run_id = rr.find_closest_operational_run(t0)

run = rr.recreate(run_id=run_id)

sts = run.model.areas["Sorland"].aggregated_hydro.bypass
time_axis = sa.TimeAxis(run.start_utc, sa.Calendar.DAY, 100)
ts_vec = sts.mean(time_axis=time_axis, unit='MW', years=[1931, 1945])

plot_ts(ts_vec)
Beispiel #16
0
import matplotlib.pyplot as plt
import shyft.api as sa
from statkraft.ltm.io.run_repository import RunRepository
from statkraft.ltm.scripting import plot_ts

calendar = sa.Calendar("Europe/Oslo")
utc_start = calendar.time(2018, 7, 1)
time_axis = sa.TimeAxis(calendar, utc_start, calendar.QUARTER, 1)

now = sa.utctime_now()
then = now - calendar.WEEK

rr = RunRepository()
rid1 = rr.find_closest_operational_run(now)
rid2 = rr.find_closest_operational_run(then)

run1 = rr.recreate(run_id=rid1)
run2 = rr.recreate(run_id=rid2)

sp1 = run1.model.market.areas["NO2"].power_price.mean(time_axis=time_axis)
sp2 = run2.model.market.areas["NO2"].power_price.mean(time_axis=time_axis)

plot_ts(sp1 - sp2)
plt.legend([f"NO2 price difference: run[{rid1}] - run[{rid2}]"])
 def start_date(self):
     utc = api.Calendar()
     today = utc.trim(api.utctime_now(), api.Calendar.DAY)
     return today - api.Calendar.DAY  # yesterday
Beispiel #18
0
 def setUp(self):
     self.c=api.Calendar()
     self.d=api.deltahours(1)
     self.n=24
     self.t= self.c.trim(api.utctime_now(),self.d)
     self.ta=api.Timeaxis(self.t,self.d,self.n)
Beispiel #19
0
from statkraft.ltm.io.run_repository import RunRepository
import shyft.api as sa
from statkraft.ltm.state import quantity
from statkraft.ltm.scripting import plot_ts
import matplotlib.pyplot as plt
from statkraft.ltm.io.converter import to_pandas

rr = RunRepository()
t0 = sa.utctime_now() - sa.Calendar.DAY * 2
res = rr.search(labels=["operational", "norway"], created_from=t0)
areas = ["NO1", "NO2", "NO5"]
tsv = quantity(sa.TsVector(), "GWh")

tot_cons_list = []
legend_list = []
for key in res.keys():
    run_info = res[key]
    run = rr.recreate(run_id=key)
    legend_list.append(key)
    time_axis = sa.TimeAxis(sa.utctime_now(), sa.Calendar.DAY, 365)
    tot_cons = quantity(sa.TsVector(), "GWh")
    for key1 in run.model.market.areas.keys():
        this_area = run.model.market.areas[key1]
        if key1 in areas:
            cons = this_area.consumption.mean(unit="GWh", time_axis=time_axis)
            tot_cons += cons
    tot_cons_list.append(tot_cons)

diff_cons = tot_cons_list[0] - tot_cons_list[1]
tsv.extend(diff_cons.magnitude)
Beispiel #20
0
    def test_speed(self):
        """
        the purpose of this test is to figure out the
        speed characteristics of qm.
        testcases of interest is
          12 (4x3)  forecast typical arome 4 times a day, use last 3 days, 1-3 hour dt, 14 days ahead
          100 historical scenarios
          time-axis wanted is
          next 14 days plus historical scenarios 3..60 weeks ahead

        expected performance:
          the first 14 days includes sorting 10 forcasts, 100 historical pr. timestep.
           the period after should be close to 'memcpy' performance.

        """
        # Arrange the inputs

        utc = Calendar()
        n_hist_ts = 100
        n_fc_days = 14
        n_hist_days = n_fc_days + 360
        n_fc_ts = 10
        t0 = utc.time(2017, 1, 1, 0, 0, 0)

        def generate_ts(ta: TimeAxis, n_fc: int) -> TsVector:
            fx_avg = ts_point_fx.POINT_AVERAGE_VALUE
            r = TsVector()
            w = 2 * 3.14 / len(ta)
            for i in range(n_fc):
                a = np.random.ranf() * 20 - 10.0
                b = np.random.ranf() * 5.0
                v = dv([a + b * math.sin(w * i) for i in range(len(ta))])
                r.append(TimeSeries(ta, v, fx_avg))
            return r

        ta_hist = TimeAxis(t0, deltahours(1), 24 * n_hist_days)
        historical_scenario_ts = generate_ts(ta_hist, n_hist_ts)
        fc_set = TsVectorSet()
        fc_weight = dv()
        n_fc_sets = 4 * 2
        fc_every_dt = deltahours(6)  # six hours between each arome fc.
        dt_fc = deltahours(1)
        for i in range(n_fc_sets):
            t0_fc = t0 + fc_every_dt * i
            fc_set.append(
                generate_ts(TimeAxis(t0_fc, dt_fc, 24 * n_fc_days), n_fc_ts))
            fc_weight.append(float(3 + i))

        # interpolation_start= no_utctime
        # Act
        interpolated_quantiles = False
        qm_end_idx1 = 24 * (n_fc_days - 2)
        qm_end_idx2 = 24 * (n_fc_days - 1)
        n_ts = 0
        n_v = 0
        print(r"n_days\ttime_used[s]\n")
        tot_seconds = 0.0
        for h_days in range(n_fc_days + 10, n_hist_days, 30):
            ta_qm = TimeAxis(t0 + n_fc_sets * fc_every_dt, dt_fc, 24 * h_days)
            a0 = utctime_now()
            result = quantile_map_forecast(fc_set, fc_weight,
                                           historical_scenario_ts, ta_qm,
                                           ta_qm.time(qm_end_idx1),
                                           ta_qm.time(qm_end_idx2),
                                           interpolated_quantiles)
            self.assertIsNotNone(result)
            n_ts += len(result)
            n_v += len(result[0]) * len(result)
            a1 = utctime_now()
            tot_seconds += float(a1 - a0)
            print(f' {h_days}\t{float(a1-a0)}')
        print(
            f'Total of {n_ts} ts, was forecasted, number values produced {n_v/1000000} Mpts, Mb/s={8.0*n_v/1000000/tot_seconds}'
        )
Beispiel #21
0
import socket
from datetime import datetime
import logging
import os
import traceback

from shyft.api import utctime_now  # To time the reading from SMG

from modules import r2_and_regression as reg
from modules import read_and_setup as rs

start_time = utctime_now()
logging_path = '../logging'

log_file = os.path.join(
    logging_path,  # folder with log files
    '{}#{}#{}#{}.log'.format(
        os.path.splitext(os.path.basename(__file__))[0],  # script file name
        socket.gethostname().lower(),  # host_name
        datetime.now().strftime('%Y%m%dT%H%M%S'),  # timestamp
        os.getpid()  # process ID
    ))
logging.basicConfig(
    filename=log_file,
    level=logging.INFO,
    format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.info('autojob started.')

try:
    auto_input = {}
    auto_input['tilsig'] = rs.read_and_setup('tilsig')
Beispiel #22
0
import matplotlib.pyplot as plt
import shyft.api as sa
from statkraft.ltm.io.run_repository import RunRepository
from statkraft.ltm.scripting import plot_ts
from statkraft.ltm.state import quantity

rr = RunRepository()
rid = rr.find_closest_operational_run(sa.utctime_now())
run = rr.recreate(run_id=rid)

time_axis = sa.TimeAxis(run.start_utc, sa.Calendar.DAY, 52 * 7)

tsv = quantity(sa.TsVector(), "EUR/MWh")

legend_list = []
for area_name, area in run.model.areas.items():
    legend_list.append(area_name)
    tsv.extend(area.power_price.mean(time_axis=time_axis).magnitude)

plot_ts(tsv)
plt.legend(legend_list)
plt.show()
Beispiel #23
0
from statkraft.ltm.io.run_repository import RunRepository
import shyft.api as sa
from statkraft.ltm.state import quantity
from statkraft.ltm.io.converter import to_pandas

rr = RunRepository()
t0 = sa.utctime_now() - sa.Calendar.DAY

run_id = rr.find_closest_operational_run(t0)
run = rr.recreate(run_id=run_id)
areas = ["SE1", "SE2", "SE3", "SE4"]
energy_inflow = quantity(sa.TsVector(), "GWh")
time_axis = sa.TimeAxis(sa.utctime_now(), sa.Calendar.MONTH, 24)

years = run.run_info.scenario_years

for key in run.model.areas.keys():
    area = run.model.areas[key]
    if area.market_price_area != None and area.market_price_area.name in areas:
        storable_inflow = area.aggregated_hydro.storable_inflow(
            unit="GWh", time_axis=time_axis)
        non_storable_inflow = area.aggregated_hydro.nonstorable_inflow(
            unit="GWh", time_axis=time_axis)
        bypass = area.aggregated_hydro.bypass(unit="GWh", time_axis=time_axis)
        spill = area.aggregated_hydro.spillage(unit="GWh", time_axis=time_axis)
        energy_inflow += storable_inflow + non_storable_inflow - bypass - spill

df, pip = to_pandas(energy_inflow)
df = df.magnitude
df.columns = years + [df.columns[-1]]
df.to_csv("inflow_sweden.csv")
 def test_utctime_now(self):
     a = api.utctime_now()
     x = dt.datetime.utcnow()
     b = self.utc.time(api.YMDhms(x.year, x.month, x.day,
                                  x.hour, x.minute, x.second))
     self.assertLess(abs(a - b), 2, 'Should be less than 2 seconds')
 def test_utctime_now(self):
     a = api.utctime_now()
     x = dt.datetime.utcnow()
     b = self.utc.time(api.YMDhms(x.year, x.month, x.day,
                                  x.hour, x.minute, x.second))
     self.assertLess(abs(a - b), 2, 'Should be less than 2 seconds')
Beispiel #26
0
from statkraft.ltm.io.run_repository import RunRepository
import shyft.api as sa
from statkraft.ltm.scripting import plot_ts, plot_percentiles
from matplotlib import pyplot as plt
from statkraft.ltm.io.converter import to_pandas
import pandas as pd

rr = RunRepository()
t0 = sa.utctime_now()
run_id = rr.find_closest_operational_run(t0)

run = rr.recreate(run_id = run_id)
time_axis = sa.TimeAxis(run.start_utc, sa.Calendar.DAY,365)


prices = {}
df_tot = pd.DataFrame()

for area_name, area in run.model.areas.items():
    df, pip = to_pandas(area.power_price.mean(time_axis = time_axis))
    prices[area_name] = df.magnitude



Beispiel #27
0
obs_series_f = "P:/projects/config_auto/observed_inflow_series.yml"

forecast_period=864000 # seconds in 10days

enki_root=os.path.join("P:/","projects")
config_file = os.path.join(enki_root, "config_auto","runner_configurations_auto.yaml")

# Using python datetime
#current_d = datetime.utcnow()
#log.write(current_d.strftime('%Y%m%dT%H%M')+'\n')
#current_d = current_d.replace(hour=0, minute=0, second=0, microsecond=0)-timedelta(hours=1)
#current_d_timestamp = int(round((current_d - datetime.utcfromtimestamp(0)).total_seconds()))

# Using enki Calendar
cal=api.Calendar()
current_d_timestamp = cal.trim(api.utctime_now(),api.Calendar.DAY)-3600
#current_d = cal.toString(current_d_timestamp)
current_d = datetime.utcfromtimestamp(current_d_timestamp)
#log.write(cal.toString(current_d_timestamp)+'\n')

#log.close()

for model_name in model_names:
    for run_mode in run_modes:

        if(run_mode=='observed'):
            config_name=model_name+'_observed'
            with open(config_file) as f:
                dct = yaml.load(f)
            val=dct[config_name]
            #init_state_timestamp=int(round((val['start_datetime'] - datetime.utcfromtimestamp(0)).total_seconds())) # Python datetime
Beispiel #28
0
from shyft import shyftdata_dir
from shyft import api

# points as in c++ test
print(" --------------------------------------------------------- ")
print(" --- Simple point test --- ")
print(" --- just some calculation to check api --- ")
print(" --------------------------------------------------------- ")
utc = api.Calendar(
)  # provides shyft build-in functionality for date/time handling

radparam = api.RadiationParameter(0.2, 1.0)
radcal = api.RadiationCalculator(radparam)
radresponse = api.RadiationResponse()
time = api.utctime_now()
radcal.net_radiation(radresponse, 40.4, time, 0.0, 0.0, 20.0, 40.0, 150.0, 0.0)
print(radresponse.net)

pmparam = api.PenmanMonteithParameter(0.12, 2.0, 2.0, 144.0, 0)
pmcalculator = api.PenmanMonteithCalculator(pmparam)
pmresponse = api.PenmanMonteithResponse()

temperature = 20.0
rhumidity = 40.0
elevation = 150.0
windspeed = 1.0

pmcalculator.reference_evapotranspiration(pmresponse, api.deltahours(24),
                                          radresponse.net, temperature,
                                          temperature, rhumidity, elevation,
Beispiel #29
0
def run_regression(auto_input,
                   variables: list = ['magasin', 'tilsig'],
                   regions: list = [
                       'NO1', 'NO2', 'NO3', 'NO4', 'NO5', 'SE1', 'SE2', 'SE3',
                       'SE4'
                   ],
                   jupyter: bool = False,
                   backup: bool = False,
                   loop: bool = False) -> None:
    """This function is the head function for the regression and it also deals with the outputs.

    Args:
        variable: Must be either 'magasin' or 'tilsig'
        regions: Must be one or more of the default regions
        jupyter: Set to Tru if the code is runned on Jupyter Notebooks
        backup: Set to True if you would rather use the backup input variables (input_variables_backup.txt) than the
                automatically updated variables from the last tuning (input_variables_from_tuning.txt).
        loop: Set to True if you want to do a Tuning to update input_variavles_from_tuning.txt and run the loop. This
                takes approximately half an hour.

    Tuning example:
        >> run_regression(auto_input, loop=True)
    Updating SMG on Jupyter Notebooks exaple:
        >> run_regression(auto_input, jupyter=True)
    """

    start_tuning = utctime_now()

    for variable in variables:

        if not variable in ['magasin', 'tilsig']:
            sys.exit("Variable must be either 'tilsig' or 'magasin'")

        df_week, MagKap, period, forecast_time, read_start = auto_input[
            variable]
        reg_end = (
            pd.to_datetime(time.strftime(forecast_time), format="%Y.%m.%d") -
            Timedelta(days=7)).strftime('%Y.%m.%d')

        if (0 <= today.weekday() <= 1) or (
                today.weekday() == 2 and today.hour < 14):  # True for tipping
            last_forecast = forecast_time
        else:
            last_forecast = reg_end

        df_cleaned = deletingNaNs(df_week.loc[:last_forecast])

        if loop:
            if variable == 'tilsig':
                print(
                    '\n\n---------------------------------------------------------------'
                )
                print(
                    '                        TILSIG                                 '
                )
                print(
                    '---------------------------------------------------------------\n'
                )
                max_kandidater = 196
                min_kandidater = 1

            else:
                print(
                    '\n\n---------------------------------------------------------------'
                )
                print(
                    '                        MAGASIN                                '
                )
                print(
                    '---------------------------------------------------------------\n'
                )
                max_kandidater = 171
                min_kandidater = 1

            max_weeks = 230
            min_weeks = 16
            print('max ant. kandidater: {}, min ant. kandidater: {}'.format(
                max_kandidater, min_kandidater))
            print('max ant. uker: {}, min ant. uker: {}'.format(
                max_weeks, min_weeks))

        for region in regions:
            print(
                '---------------------------------------------------------------'
            )
            print(
                '                          {}                                  '
                .format(region))
            print(
                '---------------------------------------------------------------'
            )
            if not region in [
                    'NO1', 'NO2', 'NO3', 'NO4', 'NO5', 'SE1', 'SE2', 'SE3',
                    'SE4'
            ]:
                sys.exit(
                    "Region must one out of: 'NO1', 'NO2', 'NO3', 'NO4', 'NO5', 'SE1', 'SE2', 'SE3', 'SE4'"
                )

            start_time_loop = utctime_now()
            fasit, fasit_key = make_fasit(variable, region, reg_end, period)
            print('Fasit er lest inn.\n')

            if fasit[fasit_key].isnull().any():
                print(
                    'OBS: Det mangler verdier på fasiten! Går videre til neste region i loopen..'
                )
                continue

            sorted_r2 = get_R2_sorted(variable, df_cleaned, fasit, fasit_key)

            if loop:
                max_p = 0.025

                # First loop: Tuning number of candidates for best possible R2 combined
                df_ant_kandidater = pd.DataFrame(columns=columns)
                for antall in range(min_kandidater, max_kandidater + 1, 1):
                    if antall > len(sorted_r2):
                        chosen_r2 = sorted_r2
                    else:
                        chosen_r2 = sorted_r2[:antall]
                    output = make_estimate(df_cleaned,
                                           fasit,
                                           fasit_key,
                                           last_forecast,
                                           first_period,
                                           max_p,
                                           chosen_r2,
                                           loop=True)
                    df_ant_kandidater = df_ant_kandidater.append(
                        {
                            columns[0]: output[0],
                            columns[1]: output[1],
                            columns[2]: output[2],
                            columns[3]: output[3],
                            columns[4]: output[4],
                            columns[5]: output[5],
                            columns[6]: output[6]
                        },
                        ignore_index=True)
                    if antall > len(sorted_r2):
                        print(
                            'Feilmelding: Ønsket antall kandidater overskrider maks (%i).\n'
                            % len(sorted_r2))
                        break
                idx_max = df_ant_kandidater.r2_samlet.idxmax(skipna=True)
                ant_kandidater_beste = int(
                    df_ant_kandidater.ant_kandidater.values[idx_max])
                print('Beste ant_kandidater loop 1: ', ant_kandidater_beste)

                # Second loop: tuning length of the short regression for best possible R2 combined, using the best number of
                # candidates found in the First loop.
                df_short_period = pd.DataFrame(columns=columns)
                for short_period in range(min_weeks, max_weeks + 1, 1):
                    short_period = int(short_period)
                    final_chosen_r2 = sorted_r2[:ant_kandidater_beste]
                    output = make_estimate(df_cleaned,
                                           fasit,
                                           fasit_key,
                                           last_forecast,
                                           short_period,
                                           max_p,
                                           final_chosen_r2,
                                           loop=True)
                    df_short_period = df_short_period.append(
                        {
                            columns[0]: output[0],
                            columns[1]: output[1],
                            columns[2]: output[2],
                            columns[3]: output[3],
                            columns[4]: output[4],
                            columns[5]: output[5],
                            columns[6]: output[6]
                        },
                        ignore_index=True)
                idx_max = df_short_period.r2_samlet.idxmax(skipna=True)
                short_period_beste = int(
                    df_short_period.short_period.values[idx_max])
                print('Beste short_period loop 2: ', short_period_beste)

                # Getting the best input variables from loop and write to input_variables_from_tuning.txt
                df_all_methods = pd.concat(
                    [df_ant_kandidater, df_short_period],
                    ignore_index=True,
                    sort=False)
                idx_max = df_all_methods.r2_samlet.idxmax(skipna=True)
                ant_kandidater_beste = int(
                    df_all_methods.ant_kandidater.values[idx_max])
                chosen_r2_beste = sorted_r2[:ant_kandidater_beste]
                short_period_beste = df_all_methods.short_period.values[
                    idx_max]
                ws.write_input_variables_to_file(region, variable, max_p,
                                                 ant_kandidater_beste,
                                                 short_period_beste)

                print('\nTuning for regionen tok %.0f minutter. \n' %
                      ((utctime_now() - start_time_loop) / 60))

            else:
                # getting the best variables from input_variables_from_tuning.txt or input_variables_backup.txr
                short_period_beste, max_p, ant_kandidater_beste, input_file = rs.get_input_variables_from_file(
                    variable, region, backup)
                chosen_r2_beste = sorted_r2[:ant_kandidater_beste]
                print("Input variables was read from: ", input_file)

            # Show results
            input1 = make_estimate(df_cleaned,
                                   fasit,
                                   fasit_key,
                                   last_forecast,
                                   short_period_beste,
                                   max_p,
                                   chosen_r2_beste,
                                   loop=False)
            input2 = fasit_key, ant_kandidater_beste, max_p, reg_end, read_start

            if not loop:
                # Write results from the regression to SMG.
                fasit, long_results, short_results, df_tot, chosen_p, chosen_r2, r2_modelled, prediction, tipping_df, short_period, nb_weeks_tipping = input1

                # write to SMG:
                ws.write_SMG_regresjon(variable, region, tipping_df)

                # write to SMG, virtual:
                ws.write_V_SMG_Regresjon(short_results, chosen_p, fasit_key,
                                         r2_modelled, MagKap)

            if jupyter:
                ws.show_result_jupyter(input1, input2)
            else:
                ws.show_result(input1, input2)

    print('---------------------------------------------------------------')
    print('                         SLUTT                                 ')
    print('---------------------------------------------------------------')
    print(
        '\nRegresjon for alle regioner og variabler brukte totalt %.0f minutter. \n'
        % ((utctime_now() - start_tuning) / 60))
Beispiel #30
0
 def setUp(self):
     self.c=api.Calendar()
     self.d=api.deltahours(1)
     self.n=24
     self.t=self.c.trim(api.utctime_now(), self.d)
     self.ta=api.TimeAxisFixedDeltaT(self.t, self.d, self.n)
Beispiel #31
0
 def t_now(self):
     return int(sa.utctime_now())