Exemple #1
0
    def test_consolidate(self):
        """Consolidating 2 files and checking variable"""

        datafolder = os.path.join(test_dir, 'data')
        new_csv = fluksoapi.consolidate_sensor(folder=datafolder,
                                               sensor='sensorD')

        ts1 = fluksoapi.load_file(
            os.path.join(
                datafolder,
                'FL12345678_sensorD_FROM_2014-01-07_08-02-00_TO_2014-01-08_08-01-00.csv'
            ))
        self.assertTrue(
            np.isnan(ts1['sensorD'].loc[dt.datetime(2014,
                                                    1,
                                                    8,
                                                    8,
                                                    0,
                                                    0,
                                                    tzinfo=pytz.UTC)]))
        ts2 = fluksoapi.load_file(
            os.path.join(
                datafolder,
                'FL12345678_sensorD_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'
            ))
        #ts = fluksoapi.load_file(os.path.join(datafolder, 'f81fb35a62f59a987d8eea3ffc845ed0_FROM_2014-01-07_08-02-00_TO_2014-01-08_16-01-00.csv'))
        #pdb.set_trace()

        ts = fluksoapi.load_file(new_csv)
        self.assertEqual(ts.index[0], ts1.index[0])
        self.assertEqual(ts.index[-1], ts2.index[-1])
        self.assertEqual(ts['sensorD'].loc['2014-01-08 08:00:00'], 1120.0,
                         "Last file should overwrite identical indices")
        os.remove(new_csv)
Exemple #2
0
    def test_consolidate_day(self):
        """Consolidating 2 files for a single day and checking variable"""

        datafolder = os.path.join(test_dir, 'data')
        new_csv = fluksoapi.consolidate_sensor(folder=datafolder,
                                               sensor='sensorD',
                                               dt_day=dt.datetime(2014, 1, 7))

        ts1 = fluksoapi.load_file(
            os.path.join(
                datafolder,
                'FL12345678_sensorD_FROM_2014-01-07_08-02-00_TO_2014-01-08_08-01-00.csv'
            ))
        self.assertTrue(
            np.isnan(ts1['sensorD'].loc[dt.datetime(2014,
                                                    1,
                                                    8,
                                                    8,
                                                    0,
                                                    0,
                                                    tzinfo=pytz.UTC)]))
        ts2 = fluksoapi.load_file(
            os.path.join(
                datafolder,
                'FL12345678_sensorD_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'
            ))

        ts = fluksoapi.load_file(new_csv)
        self.assertEqual(ts.index[0], ts1.index[0])
        self.assertEqual(ts.index[-1],
                         dt.datetime(2014, 1, 8, 0, 0, 0, tzinfo=pytz.UTC))

        os.remove(new_csv)
Exemple #3
0
 def test_consolidate_single_file(self):
     """Consolidating a single file should NOT consolidate but should return the file"""
     
     datafolder = os.path.join(test_dir, 'data')        
     new_csv=fluksoapi.consolidate_sensor(folder = datafolder, 
                                          sensor = 'sensorS')
           
     self.assertEqual(new_csv, os.path.join(datafolder,'FL12345678_sensorS_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'))
Exemple #4
0
 def test_consolidate_multiple(self):
     """Consolidate and return single filename if more than one file found"""
     
     datafolder = os.path.join(test_dir, 'data')
     csv_expected = os.path.join(datafolder, 'FL12345678_sensorD_FROM_2014-01-07_08-02-00_TO_2014-01-08_16-01-00.csv' )
     self.assertEqual(csv_expected, fluksoapi.consolidate_sensor(datafolder, 'sensorD'))
                      
     os.remove(csv_expected)
Exemple #5
0
 def test_consolidate_with_hidden_file(self):
     """Consolidate should skip hidden file"""
     
     datafolder = os.path.join(test_dir, 'data')        
     new_csv=fluksoapi.consolidate_sensor(folder = datafolder, 
                                          sensor = 'sensorH')
                                          
     self.assertEqual(new_csv, os.path.join(datafolder, 'FL12345678_sensorH_FROM_2014-01-07_12-02-00_TO_2014-01-08_16-01-00.csv'))
     os.remove(new_csv)
Exemple #6
0
 def test_consolidate_single(self):
     """Return abspath if a single file found"""
     
     datafolder = os.path.join(test_dir, 'data')
     self.assertRaises(ValueError, fluksoapi.consolidate_sensor, datafolder, 'f81fb35a62f59a987d8eea3ffc845ed0')
     
     csv_expected = os.path.join(datafolder, 'FL12345678_sensorS_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv' )
     self.assertEqual(csv_expected,
                      fluksoapi.consolidate_sensor(datafolder, 'sensorS'))
Exemple #7
0
    def test_consolidate_multiple(self):
        """Consolidate and return single filename if more than one file found"""

        datafolder = os.path.join(test_dir, 'data')
        csv_expected = os.path.join(
            datafolder,
            'FL12345678_sensorD_FROM_2014-01-07_08-02-00_TO_2014-01-08_16-01-00.csv'
        )
        self.assertEqual(csv_expected,
                         fluksoapi.consolidate_sensor(datafolder, 'sensorD'))

        os.remove(csv_expected)
Exemple #8
0
    def test_consolidate_single(self):
        """Return abspath if a single file found"""

        datafolder = os.path.join(test_dir, 'data')
        self.assertRaises(ValueError, fluksoapi.consolidate_sensor, datafolder,
                          'f81fb35a62f59a987d8eea3ffc845ed0')

        csv_expected = os.path.join(
            datafolder,
            'FL12345678_sensorS_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'
        )
        self.assertEqual(csv_expected,
                         fluksoapi.consolidate_sensor(datafolder, 'sensorS'))
Exemple #9
0
    def test_consolidate_single_file(self):
        """Consolidating a single file should NOT consolidate but should return the file"""

        datafolder = os.path.join(test_dir, 'data')
        new_csv = fluksoapi.consolidate_sensor(folder=datafolder,
                                               sensor='sensorS')

        self.assertEqual(
            new_csv,
            os.path.join(
                datafolder,
                'FL12345678_sensorS_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'
            ))
Exemple #10
0
    def test_consolidate_with_hidden_file(self):
        """Consolidate should skip hidden file"""

        datafolder = os.path.join(test_dir, 'data')
        new_csv = fluksoapi.consolidate_sensor(folder=datafolder,
                                               sensor='sensorH')

        self.assertEqual(
            new_csv,
            os.path.join(
                datafolder,
                'FL12345678_sensorH_FROM_2014-01-07_12-02-00_TO_2014-01-08_16-01-00.csv'
            ))
        os.remove(new_csv)
Exemple #11
0
    def test_consolidate_day(self):
        """Consolidating 2 files for a single day and checking variable"""
        
        datafolder = os.path.join(test_dir, 'data')        
        new_csv=fluksoapi.consolidate_sensor(folder = datafolder, 
                                             sensor = 'sensorD',
                                             dt_day = dt.datetime(2014,1,7))
              
        ts1 = fluksoapi.load_file(os.path.join(datafolder, 'FL12345678_sensorD_FROM_2014-01-07_08-02-00_TO_2014-01-08_08-01-00.csv'))
        self.assertTrue(np.isnan(ts1['sensorD'].loc[dt.datetime(2014,1,8,8,0,0, tzinfo=pytz.UTC)]))       
        ts2 = fluksoapi.load_file(os.path.join(datafolder, 'FL12345678_sensorD_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'))

        ts = fluksoapi.load_file(new_csv)
        self.assertEqual(ts.index[0], ts1.index[0])
        self.assertEqual(ts.index[-1], dt.datetime(2014,1,8,0,0,0, tzinfo=pytz.UTC))
        
        os.remove(new_csv)
Exemple #12
0
    def test_consolidate(self):
        """Consolidating 2 files and checking variable"""
        
        datafolder = os.path.join(test_dir, 'data')        
        new_csv=fluksoapi.consolidate_sensor(folder = datafolder, 
                                             sensor = 'sensorD')
              
        ts1 = fluksoapi.load_file(os.path.join(datafolder, 'FL12345678_sensorD_FROM_2014-01-07_08-02-00_TO_2014-01-08_08-01-00.csv'))
        self.assertTrue(np.isnan(ts1['sensorD'].loc[dt.datetime(2014,1,8,8,0,0, tzinfo=pytz.UTC)]))       
        ts2 = fluksoapi.load_file(os.path.join(datafolder, 'FL12345678_sensorD_FROM_2014-01-07_16-02-00_TO_2014-01-08_16-01-00.csv'))    
        #ts = fluksoapi.load_file(os.path.join(datafolder, 'f81fb35a62f59a987d8eea3ffc845ed0_FROM_2014-01-07_08-02-00_TO_2014-01-08_16-01-00.csv'))
        #pdb.set_trace()        

        ts = fluksoapi.load_file(new_csv)        
        self.assertEqual(ts.index[0], ts1.index[0])
        self.assertEqual(ts.index[-1], ts2.index[-1])
        self.assertEqual(ts['sensorD'].loc['2014-01-08 08:00:00'], 1120.0, "Last file should overwrite identical indices")
        os.remove(new_csv)
Exemple #13
0
import zipfile

script_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
# add the path to opengrid to sys.path
sys.path.append(os.path.join(script_dir, os.pardir, os.pardir))
from opengrid.library import houseprint
from opengrid.library import fluksoapi

##############################################################################
path_to_data = os.path.abspath('/usr/local/data')
path_to_webserver = os.path.abspath('/var/www/private')

# consolidate for yesterday
yesterday = dt.datetime.now() - dt.timedelta(days=1)

# get all sensors
hp = houseprint.load_houseprint_from_file(os.path.join(script_dir, 'hp_anonymous.pkl'))
sensors = hp.get_sensors()
print("{} sensors found".format(len(sensors)))

# create a empty zip-file with the date as filename
zipfilename = yesterday.strftime(format="%Y%m%d")
with zipfile.ZipFile(os.path.join(path_to_webserver, zipfilename+'.zip'), 'w') as myzip:
    for sensor in sensors:
        # create a csv with the data of the given day
        csv = fluksoapi.consolidate_sensor(path_to_data, sensor, file_type='csv', dt_day=yesterday)
        # add to myzip
        myzip.write(csv, arcname=os.path.split(csv)[-1])
        # and remove the file.  original files are kept
        os.remove(csv)