Esempio n. 1
0
def update_ts_temp_file(cache_dir, connection, id):
    full_rewrite = False

    afilename = os.path.join(cache_dir, '%d.hts' % (id, ))
    if os.path.exists(afilename):
        if os.path.getsize(afilename) < 3:
            full_rewrite = True
    #Update the file in the case of logged data, if this is possible
    if os.path.exists(afilename) and not full_rewrite:
        with ropen(afilename) as fileobject:
            line = fileobject.readline()
        lastdate = datetime_from_iso(line.split(',')[0])
        ts = Timeseries(id)
        ts.read_from_db(connection, bottom_only=True)
        if len(ts) > 0:
            db_start, db_end = ts.bounding_dates()
            if db_start > lastdate:
                full_rewrite = True
            elif db_end > lastdate:
                lastindex = ts.index(lastdate)
                with open(afilename, 'a') as fileobject:
                    ts.write(fileobject, start=ts.keys()[lastindex + 1])
#Check for tmmp file or else create it
    if not os.path.exists(afilename) or full_rewrite:
        ts = Timeseries(id)
        ts.read_from_db(connection)
        if not os.path.exists(cache_dir):
            os.mkdir(cache_dir)
        tempfile_handle, tempfile_name = tempfile.mkstemp(dir=cache_dir)
        with os.fdopen(tempfile_handle, 'w') as afile:
            ts.write(afile)
        shutil.move(tempfile_name, afilename)
Esempio n. 2
0
 def get(self, request, pk, format=None):
     ts = Timeseries(id=int(pk))
     self.check_object_permissions(request, ts)
     ts.read_from_db(connection)
     result = StringIO()
     ts.write(result)
     return HttpResponse(result.getvalue(), content_type="text/plain")
Esempio n. 3
0
 def get(self, request, pk, format=None):
     ts = Timeseries(id=int(pk))
     self.check_object_permissions(request, ts)
     ts.read_from_db(connection)
     result = StringIO()
     ts.write(result)
     return HttpResponse(result.getvalue(), content_type="text/plain")
Esempio n. 4
0
def update_ts_temp_file(cache_dir, connection, id):
    full_rewrite = False

    afilename = os.path.join(cache_dir, '%d.hts'%(id,))
    if os.path.exists(afilename):
        if os.path.getsize(afilename)<3:
            full_rewrite = True
#Update the file in the case of logged data, if this is possible
    if os.path.exists(afilename) and not full_rewrite:
        with open(afilename, 'r') as fileobject:
            xr = xreverse(fileobject, 2048)
            line = xr.next()
        lastdate = datetime_from_iso(line.split(',')[0])
        ts = Timeseries(id)
        ts.read_from_db(connection, bottom_only=True)
        if len(ts)>0:
            db_start, db_end = ts.bounding_dates()
            if db_start>lastdate:
                full_rewrite = True
            elif db_end>lastdate:
                lastindex = ts.index(lastdate)
                with open(afilename, 'a') as fileobject:
                    ts.write(fileobject, start=ts.keys()[lastindex+1])
#Check for tmmp file or else create it
    if not os.path.exists(afilename) or full_rewrite:
        ts = Timeseries(id)
        ts.read_from_db(connection)
        if not os.path.exists(cache_dir):
            os.mkdir(cache_dir)
        tempfile_handle, tempfile_name = tempfile.mkstemp(dir=cache_dir)
        with os.fdopen(tempfile_handle, 'w') as afile:
            ts.write(afile)
        shutil.move(tempfile_name, afilename)
Esempio n. 5
0
    def update_one_timeseries(base_url, id, user=None, password=None):
        if base_url[-1] != '/':
            base_url += '/'

        # Read timeseries from cache file
        cache_filename = os.path.join(
            settings.BITIA_TIMESERIES_CACHE_DIR, '{}.hts'.format(id))
        ts1 = Timeseries()
        if os.path.exists(cache_filename):
            with open(cache_filename) as f:
                try:
                    ts1.read_file(f)
                except ValueError:
                    # File may be corrupted; continue with empty time series
                    ts1 = Timeseries()

        # Get its end date
        try:
            end_date = ts1.bounding_dates()[1]
        except TypeError:
            # Timeseries is totally empty; no start and end date
            end_date = datetime(1, 1, 1, 0, 0)
        start_date = end_date + timedelta(minutes=1)

        # Get newer timeseries and append it
        session_cookies = enhydris_api.login(base_url, user, password)
        url = base_url + 'timeseries/d/{}/download/{}/'.format(
            id, start_date.isoformat())
        r = requests.get(url, cookies=session_cookies)
        r.raise_for_status()
        ts2 = Timeseries()
        ts2.read_file(StringIO(r.text))
        ts1.append(ts2)

        # Save it
        with open(cache_filename, 'w') as f:
            ts1.write(f)
Esempio n. 6
0
    def test_update(self):
        self.parms = json.loads(os.getenv('PTHELMA_TEST_ENHYDRIS_API'))
        timeseries_group = [{'base_url': self.parms['base_url'],
                             'id': self.ts1_id,
                             'user': self.parms['user'],
                             'password': self.parms['password'],
                             'file': 'file1',
                             },
                            {'base_url': self.parms['base_url'],
                             'id': self.ts2_id,
                             'user': self.parms['user'],
                             'password': self.parms['password'],
                             'file': 'file2',
                             },
                            ]
        # Cache the two timeseries
        cache = TimeseriesCache(timeseries_group)
        cache.update()

        # Check that the cached stuff is what it should be
        with open('file1') as f:
            ts1_before = Timeseries()
            ts1_before.read_file(f)
            self.assertEqual(ts1_before.time_step.length_minutes, 1440)
            self.assertEqual(ts1_before.time_step.length_months, 0)
            c = StringIO()
            ts1_before.write(c)
            self.assertEqual(c.getvalue().replace('\r', ''),
                             self.timeseries1_top)
        with open('file2') as f:
            ts2_before = Timeseries()
            ts2_before.read_file(f)
            self.assertEqual(ts2_before.time_step.length_minutes, 1440)
            self.assertEqual(ts2_before.time_step.length_months, 0)
            c = StringIO()
            ts2_before.write(c)
            self.assertEqual(c.getvalue().replace('\r', ''),
                             self.timeseries2_top)

        # Append a record to the database for each timeseries
        ts = Timeseries(self.ts1_id)
        ts.read(StringIO(self.timeseries1_bottom))
        enhydris_api.post_tsdata(self.parms['base_url'], self.cookies, ts)
        ts = Timeseries(self.ts2_id)
        ts.read(StringIO(self.timeseries2_bottom))
        enhydris_api.post_tsdata(self.parms['base_url'], self.cookies, ts)

        # Update the cache
        cache.update()

        # Check that the cached stuff is what it should be
        with open('file1') as f:
            ts1_after = Timeseries()
            ts1_after.read_file(f)
            self.assertEqual(ts1_after.time_step.length_minutes, 1440)
            self.assertEqual(ts1_after.time_step.length_months, 0)
            c = StringIO()
            ts1_after.write(c)
            self.assertEqual(c.getvalue().replace('\r', ''),
                             self.test_timeseries1)
        with open('file2') as f:
            ts2_after = Timeseries()
            ts2_after.read_file(f)
            self.assertEqual(ts2_after.time_step.length_minutes, 1440)
            self.assertEqual(ts2_after.time_step.length_months, 0)
            c = StringIO()
            ts2_after.write(c)
            self.assertEqual(c.getvalue().replace('\r', ''),
                             self.test_timeseries2)

        # Check that the time series comments are the same before and after
        self.assertEqual(ts1_before.comment, ts1_after.comment)
        self.assertEqual(ts2_before.comment, ts2_after.comment)