Example #1
0
def setup_package():
    """Nosetests package setup function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    - Copies the original data files to be restored after testing.
    - Sets a non-interactive backend for matplotlib (even though we don't 
    show any figures, it can import PyQt, for example).
    """
    from nilmtk.tests.testingtools import data_dir
    
    # Create backups of the data files
    from glob import glob
    from os.path import join, isdir
    import os, shutil
    
    target_paths = glob(join(data_dir(), '*.h5'))
    target_paths.append(join(data_dir(), 'random_csv'))
    
    for original_fn in target_paths:
        target_fn = original_fn + '.original'
        if isdir(original_fn):
            shutil.copytree(original_fn, target_fn)
        else:
            shutil.copyfile(original_fn, target_fn)
            
    # Use the most basic Matplotlib backend
    import matplotlib
    matplotlib.use('AGG')
Example #2
0
def setup_package():
    """Nosetests package setup function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    - Copies the original data files to be restored after testing.
    - Sets a non-interactive backend for matplotlib (even though we don't 
    show any figures, it can import PyQt, for example).
    """
    from nilmtk.tests.testingtools import data_dir

    # Create backups of the data files
    from glob import glob
    from os.path import join, isdir
    import os, shutil

    target_paths = glob(join(data_dir(), '*.h5'))
    target_paths.append(join(data_dir(), 'random_csv'))

    for original_fn in target_paths:
        target_fn = original_fn + '.original'
        if isdir(original_fn):
            shutil.copytree(original_fn, target_fn)
        else:
            shutil.copyfile(original_fn, target_fn)

    # Use the most basic Matplotlib backend
    import matplotlib
    matplotlib.use('AGG')
Example #3
0
 def test_total_energy(self):
     filename = join(data_dir(), 'random.h5')
     ds = DataSet(filename)
     ds.buildings[1].elec.total_energy()
     ds.buildings[1].elec.total_energy() # test cache
     ds.buildings[1].elec.clear_cache()
     ds.store.close()
Example #4
0
def create_random_hdf5():
    FILENAME = join(data_dir(), 'random.h5')
    N_METERS = 5

    store = pd.HDFStore(FILENAME, 'w', complevel=9, complib='zlib')
    elec_meter_metadata = {}
    for meter in range(1, N_METERS+1):
        key = 'building1/elec/meter{:d}'.format(meter)
        print("Saving", key)
        store.put(key, create_random_df(), format='table')
        elec_meter_metadata[meter] = {
            'device_model': TEST_METER['model'], 
            'submeter_of': 1,
            'data_location': key
        }

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {'meter_devices': {TEST_METER['model']: TEST_METER}}
    print(store.root._v_attrs.metadata)

    # Building metadata
    add_building_metadata(store, elec_meter_metadata)

    store.flush()
    store.close()
Example #5
0
def create_random_hdf5():
    FILENAME = join(data_dir(), 'random.h5')
    N_METERS = 5

    store = pd.HDFStore(FILENAME, 'w', complevel=9, complib='zlib')
    elec_meter_metadata = {}
    for meter in range(1, N_METERS + 1):
        key = 'building1/elec/meter{:d}'.format(meter)
        print("Saving", key)
        store.put(key, create_random_df(), format='table')
        elec_meter_metadata[meter] = {
            'device_model': TEST_METER['model'],
            'submeter_of': 1,
            'data_location': key
        }

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {
        'meter_devices': {TEST_METER['model']: TEST_METER}}
    print(store.root._v_attrs.metadata)

    # Building metadata
    add_building_metadata(store, elec_meter_metadata)

    store.flush()
    store.close()
    def test_pipeline(self):
        meter1 = ElecMeter(store=self.datastore,
                           metadata=self.meter_meta,
                           meter_id=METER_ID)

        # load co_test.h5
        dataset = DataSet(join(data_dir(), 'co_test.h5'))
        meter2 = dataset.buildings[1].elec.mains()

        for meter in [meter1, meter2]:
            for chunksize in [None, 2**10, 2**29]:
                if chunksize is None:
                    load_kwargs = {}
                else:
                    load_kwargs = {'chunksize': chunksize}

                source_node = meter.get_source_node(**load_kwargs)
                good_sections = GoodSections(source_node)
                good_sections.run()
                combined = good_sections.results  #.simple()
                meter.clear_cache()
                meter.good_sections(**load_kwargs)
                meter.good_sections(**load_kwargs)
                meter.clear_cache()

        dataset.store.close()
Example #7
0
def teardown_package():
    """Nosetests package teardown function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    Closes remaining open HDF5 files to avoid warnings and resets the 
    files in data_dir to their original states.
    """
    from nilmtk.tests.testingtools import data_dir

    # Workaround for open .h5 files on Windows
    from tables.file import _open_files
    _open_files.close_all()

    # Restore the original copies of the data files
    from glob import glob
    from os.path import join, isdir
    import os, shutil

    for original_fn in glob(join(data_dir(), '*.original')):
        target_fn = original_fn[:original_fn.rfind('.original')]
        try:
            if isdir(target_fn):
                shutil.rmtree(target_fn)
            else:
                os.remove(target_fn)

            os.rename(original_fn, target_fn)

        except:
            warnings.warn(
                'Could not restore file or directory "{}"'.format(target_fn))
 def test_total_energy(self):
     filename = join(data_dir(), 'random.h5')
     ds = DataSet(filename)
     ds.buildings[1].elec.total_energy()
     ds.buildings[1].elec.total_energy()  # test cache
     ds.buildings[1].elec.clear_cache()
     ds.store.close()
Example #9
0
def teardown_package():
    """Nosetests package teardown function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    Uses git to reset data_dir after tests have run.
    """
    from nilmtk.tests.testingtools import data_dir
    import subprocess

    cmd = "git checkout -- {}".format(data_dir())
    try:
        output = subprocess.check_output(cmd, shell=True, cwd=data_dir())
    except Exception:
        print "Failed to run '{}'".format(cmd)
        raise
    else:
        print "Succeeded in running '{}'".format(cmd)
Example #10
0
def teardown_package():
    """Nosetests package teardown function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    Uses git to reset data_dir after tests have run.
    """
    from nilmtk.tests.testingtools import data_dir
    import subprocess
    
    cmd = "git checkout -- {}".format(data_dir())
    try:
        output = subprocess.check_output(cmd, shell=True, cwd=data_dir())
    except Exception:
        print "Failed to run '{}'".format(cmd)
        raise
    else:
        print "Succeeded in running '{}'".format(cmd)
Example #11
0
def create_energy_hdf5(simple=True):
    fname = 'energy.h5' if simple else 'energy_complex.h5'
    FILENAME = join(data_dir(), fname)

    df = power_data(simple=simple)

    meter_device = {
        'manufacturer': 'Test Manufacturer',
        'model': 'Energy Meter',
        'sample_period': 10,
        'max_sample_period': MAX_SAMPLE_PERIOD,
        'measurements': []
    }

    for physical_quantity, ac_type in df.columns.tolist():
        meter_device['measurements'].append({
            'physical_quantity': physical_quantity,
            'type': ac_type,
            'lower_limit': 0,
            'upper_limit': 6000
        })

    store = pd.HDFStore(FILENAME, 'w', complevel=9, complib='zlib')

    elec_meter_metadata = {}

    # Save sensor data
    for meter_i in [1, 2, 3]:
        key = 'building1/elec/meter{:d}'.format(meter_i)
        print("Saving", key)
        store.put(key, df, format='table')
        meta = {'device_model': meter_device['model'], 'data_location': key}
        additional_meta = {
            1: {
                'site_meter': True
            },
            2: {
                'submeter_of': 1
            },
            3: {
                'submeter_of': 2
            }
        }
        meta.update(additional_meta[meter_i])
        elec_meter_metadata[meter_i] = meta

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {
        'meter_devices': {
            meter_device['model']: meter_device
        }
    }

    # Add building metadata
    add_building_metadata(store, elec_meter_metadata)

    store.flush()
    store.close()
Example #12
0
def create_co_test_hdf5():
    FILENAME = join(data_dir(), 'co_test.h5')
    N_METERS = 3
    chunk = 1000
    N_PERIODS = 4 * chunk
    rng = pd.date_range('2012-01-01', freq='S', periods=N_PERIODS)

    dfs = OrderedDict()
    data = OrderedDict()

    # mains meter data
    data[1] = np.array([0, 200, 1000, 1200] * chunk)

    # appliance 1 data
    data[2] = np.array([0, 200, 0, 200] * chunk)

    # appliance 2 data
    data[3] = np.array([0, 0, 1000, 1000] * chunk)

    for i in range(1, 4):
        dfs[i] = pd.DataFrame(data=data[i],
                              index=rng,
                              dtype=np.float32,
                              columns=measurement_columns([('power', 'active')
                                                           ]))

    store = pd.HDFStore(FILENAME, 'w', complevel=9, complib='zlib')
    elec_meter_metadata = {}
    for meter in range(1, N_METERS + 1):
        key = 'building1/elec/meter{:d}'.format(meter)
        print("Saving", key)
        store.put(key, dfs[meter], format='table')
        elec_meter_metadata[meter] = {
            'device_model': TEST_METER['model'],
            'submeter_of': 1,
            'data_location': key
        }

    # For mains meter, we need to specify that it is a site meter
    del elec_meter_metadata[1]['submeter_of']
    elec_meter_metadata[1]['site_meter'] = True

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {
        'meter_devices': {
            TEST_METER['model']: TEST_METER
        }
    }
    print(store.root._v_attrs.metadata)

    # Building metadata
    add_building_metadata(store, elec_meter_metadata)
    for key in store.keys():
        print(store[key])

    store.flush()
    store.close()
Example #13
0
def teardown_package():
    """Nosetests package teardown function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    Uses git to reset data_dir after tests have run.
    """
    from nilmtk.tests.testingtools import data_dir
    import subprocess
    
    #Workaround for open .h5 files on Windows
    from tables.file import _open_files
    _open_files.close_all()
    
    cmd = "git checkout -- {}".format(data_dir())
    try:
        subprocess.check_output(cmd, shell=True, cwd=data_dir())
    except Exception:
        print("Failed to run '{}'".format(cmd))
        raise
    else:
        print("Succeeded in running '{}'".format(cmd))
Example #14
0
 def test_load(self):
     filename = join(data_dir(), 'energy.h5')
     ds = DataSet(filename)
     elec = ds.buildings[1].elec
     df = next(elec.load())
     self.assertEqual(len(df), 13)
     df = next(elec.load(chunksize=5))
     self.assertEqual(len(df), 5)
     df = next(elec.load(physical_quantity='energy'))
     self.assertEqual(len(df), 13)
     self.assertEqual(df.columns.levels, [['energy'], ['reactive']])
     df = next(elec.load(ac_type='active'))
     self.assertEqual(df.columns.levels, [['power'], ['active']])
 def test_load(self):
     filename = join(data_dir(), 'energy.h5')
     ds = DataSet(filename)
     elec = ds.buildings[1].elec
     df = next(elec.load())
     self.assertEqual(len(df), 13)
     df = next(elec.load(chunksize=5))
     self.assertEqual(len(df), 5)
     df = next(elec.load(physical_quantity='energy'))
     self.assertEqual(len(df), 13)
     self.assertEqual(df.columns.levels, [['energy'], ['reactive']])
     df = next(elec.load(ac_type='active'))
     self.assertEqual(df.columns.levels, [['power'], ['active']])
Example #16
0
def create_energy_hdf5(simple=True):
    fname = 'energy.h5' if simple else 'energy_complex.h5'
    FILENAME = join(data_dir(), fname)

    df = power_data(simple=simple)

    meter_device = {
        'manufacturer': 'Test Manufacturer', 
        'model': 'Energy Meter', 
        'sample_period': 10,
        'max_sample_period': MAX_SAMPLE_PERIOD,
        'measurements': []
    }

    for physical_quantity, ac_type in df.columns.tolist():
        meter_device['measurements'].append({
            'physical_quantity': physical_quantity, 'type': ac_type,
            'lower_limit': 0, 'upper_limit': 6000})

    store = pd.HDFStore(FILENAME, 'w', complevel=9, complib='zlib')

    elec_meter_metadata = {}

    # Save sensor data
    for meter_i in [1,2,3]:
        key = 'building1/elec/meter{:d}'.format(meter_i)
        print("Saving", key)
        store.put(key, df, format='table')
        meta = {
            'device_model': meter_device['model'],
            'data_location': key
        }
        additional_meta = {
            1: {'site_meter': True},
            2: {'submeter_of': 1},
            3: {'submeter_of': 2}
        }
        meta.update(additional_meta[meter_i])
        elec_meter_metadata[meter_i] = meta

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {'meter_devices': {meter_device['model']: meter_device}}

    # Add building metadata 
    add_building_metadata(store, elec_meter_metadata)

    store.flush()
    store.close()
Example #17
0
def create_co_test_hdf5():
    FILENAME = join(data_dir(), "co_test.h5")
    N_METERS = 3
    chunk = 1000
    N_PERIODS = 4 * chunk
    rng = pd.date_range("2012-01-01", freq="S", periods=N_PERIODS)

    dfs = OrderedDict()
    data = OrderedDict()

    # mains meter data
    data[1] = np.array([0, 200, 1000, 1200] * chunk)

    # appliance 1 data
    data[2] = np.array([0, 200, 0, 200] * chunk)

    # appliance 2 data
    data[3] = np.array([0, 0, 1000, 1000] * chunk)

    for i in range(1, 4):
        dfs[i] = pd.DataFrame(
            data=data[i], index=rng, dtype=np.float32, columns=measurement_columns([("power", "active")])
        )

    store = pd.HDFStore(FILENAME, "w", complevel=9, complib="zlib")
    elec_meter_metadata = {}
    for meter in range(1, N_METERS + 1):
        key = "building1/elec/meter{:d}".format(meter)
        print("Saving", key)
        store.put(key, dfs[meter], format="table")
        elec_meter_metadata[meter] = {"device_model": TEST_METER["model"], "submeter_of": 1, "data_location": key}

    # For mains meter, we need to specify that it is a site meter
    del elec_meter_metadata[1]["submeter_of"]
    elec_meter_metadata[1]["site_meter"] = True

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {"meter_devices": {TEST_METER["model"]: TEST_METER}}
    print(store.root._v_attrs.metadata)

    # Building metadata
    add_building_metadata(store, elec_meter_metadata)
    for key in store.keys():
        print(store[key])

    store.flush()
    store.close()
Example #18
0
def create_energy_hdf5(simple=True):
    fname = "energy.h5" if simple else "energy_complex.h5"
    FILENAME = join(data_dir(), fname)

    df = power_data(simple=simple)

    meter_device = {
        "manufacturer": "Test Manufacturer",
        "model": "Energy Meter",
        "sample_period": 10,
        "max_sample_period": MAX_SAMPLE_PERIOD,
        "measurements": [],
    }

    for physical_quantity, ac_type in df.columns.tolist():
        meter_device["measurements"].append(
            {"physical_quantity": physical_quantity, "type": ac_type, "lower_limit": 0, "upper_limit": 6000}
        )

    store = pd.HDFStore(FILENAME, "w", complevel=9, complib="zlib")

    elec_meter_metadata = {}

    # Save sensor data
    for meter_i in [1, 2, 3]:
        key = "building1/elec/meter{:d}".format(meter_i)
        print("Saving", key)
        store.put(key, df, format="table")
        meta = {"device_model": meter_device["model"], "data_location": key}
        additional_meta = {1: {"site_meter": True}, 2: {"submeter_of": 1}, 3: {"submeter_of": 2}}
        meta.update(additional_meta[meter_i])
        elec_meter_metadata[meter_i] = meta

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {"meter_devices": {meter_device["model"]: meter_device}}

    # Add building metadata
    add_building_metadata(store, elec_meter_metadata)

    store.flush()
    store.close()
Example #19
0
def create_random_hdf5():
    FILENAME = join(data_dir(), "random.h5")
    N_METERS = 5

    store = pd.HDFStore(FILENAME, "w", complevel=9, complib="zlib")
    elec_meter_metadata = {}
    for meter in range(1, N_METERS + 1):
        key = "building1/elec/meter{:d}".format(meter)
        print("Saving", key)
        store.put(key, create_random_df(), format="table")
        elec_meter_metadata[meter] = {"device_model": TEST_METER["model"], "submeter_of": 1, "data_location": key}

    # Save dataset-wide metadata
    store.root._v_attrs.metadata = {"meter_devices": {TEST_METER["model"]: TEST_METER}}
    print(store.root._v_attrs.metadata)

    # Building metadata
    add_building_metadata(store, elec_meter_metadata)

    store.flush()
    store.close()
 def setUpClass(cls):
     filename = join(data_dir(), 'energy.h5')
     cls.datastore = HDFDataStore(filename)
     ElecMeter.load_meter_devices(cls.datastore)
 def setUpClass(cls):
     filename = join(data_dir(), 'energy_complex.h5')
     cls.datastore = HDFDataStore(filename)
     ElecMeter.load_meter_devices(cls.datastore)
     cls.meter_meta = cls.datastore.load_metadata('building1')['elec_meters'][METER_ID.instance]
Example #22
0
 def setUpClass(cls):
     filename = join(data_dir(), 'co_test.h5')
     cls.dataset = DataSet(filename)
 def setUpClass(cls):
     filename = join(data_dir(), 'energy_complex.h5')
     cls.datastore = HDFDataStore(filename)
     ElecMeter.load_meter_devices(cls.datastore)
     cls.meter_meta = cls.datastore.load_metadata(
         'building1')['elec_meters'][METER_ID.instance]
Example #24
0
 def setUpClass(cls):
     filename = join(data_dir(), 'co_test.h5')
     cls.dataset = DataSet(filename)
Example #25
0
 def setUpClass(cls):
     filename = join(data_dir(), 'random_csv')
     cls.datastore = CSVDataStore(filename)
     cls.keys = ['/building1/elec/meter{:d}'.format(i) for i in range(1, 6)]
Example #26
0
 def test_total_energy(self):
     filename = join(data_dir(), 'random.h5')
     ds = DataSet(filename)
     ds.buildings[1].elec.total_energy()
Example #27
0
 def test_total_energy(self):
     filename = join(data_dir(), 'random.h5')
     ds = DataSet(filename)
     ds.buildings[1].elec.total_energy()
Example #28
0
 def setUpClass(cls):
     filename = join(data_dir(), 'energy.h5')
     cls.datastore = HDFDataStore(filename)
     ElecMeter.load_meter_devices(cls.datastore)
Example #29
-1
def teardown_package():
    """Nosetests package teardown function (run when tests are done).
    See http://nose.readthedocs.org/en/latest/writing_tests.html#test-packages

    Closes remaining open HDF5 files to avoid warnings and resets the 
    files in data_dir to their original states.
    """
    from nilmtk.tests.testingtools import data_dir
    
    # Workaround for open .h5 files on Windows
    from tables.file import _open_files
    _open_files.close_all()

    # Restore the original copies of the data files    
    from glob import glob
    from os.path import join, isdir
    import os, shutil
    
    for original_fn in glob(join(data_dir(), '*.original')):
        target_fn = original_fn[:original_fn.rfind('.original')]
        try:
            if isdir(target_fn):
                shutil.rmtree(target_fn)
            else:
                os.remove(target_fn)
                
            os.rename(original_fn, target_fn)
            
        except:
            warnings.warn('Could not restore file or directory "{}"'.format(target_fn))
Example #30
-1
    def test_pipeline(self):
        meter1 = ElecMeter(store=self.datastore, metadata=self.meter_meta,
                           meter_id=METER_ID)

        # load co_test.h5
        dataset = DataSet(join(data_dir(), 'co_test.h5'))
        meter2 = dataset.buildings[1].elec.mains()

        for meter in [meter1, meter2]:
            for chunksize in [None, 2**10, 2**29]:
                if chunksize is None:
                    kwargs = {}
                else:
                    kwargs = {'chunksize': chunksize}

                source_node = meter.get_source_node(**kwargs)
                good_sections = GoodSections(source_node)
                good_sections.run()
                combined = good_sections.results.simple()
                meter.clear_cache()
                meter.good_sections(**kwargs)
                meter.good_sections(**kwargs)
                meter.clear_cache()

        dataset.store.close()