Exemplo n.º 1
0
def code():

    # CONVERT THE REDD DATASET TO NILMTK'S HDF5 FORMAT
    from nilmtk.dataset_converters import convert_redd
    convert_redd('/data/REDD/low_freq', '/data/REDD/redd.h5')

    # IMPORT HDF5 FORMAT INTO NILMTK
    from nilmtk import DataSet
    redd = DataSet('/data/REDD/redd.h5')
Exemplo n.º 2
0
def code():


    # CONVERT THE REDD DATASET TO NILMTK'S HDF5 FORMAT
    from nilmtk.dataset_converters import convert_redd
    convert_redd('/data/REDD/low_freq', '/data/REDD/redd.h5')

    # IMPORT HDF5 FORMAT INTO NILMTK
    from nilmtk import DataSet
    redd = DataSet('/data/REDD/redd.h5')
import os
import sys
import nilmtk
from nilmtk import DataSet
from nilmtk.dataset_converters import convert_redd
from nilmtk.utils import print_dict, dict_to_html

if os.sep == "/":
    convert_redd("../data/low_freq", "../data/redd.5h")
else:
    convert_redd(
        r'C:\Users\job heersink\Desktop\Projects\AS-NALM\data\low_freq',
        r"C:\Users\job heersink\Desktop\Projects\AS-NALM\data\redd.5h")
Exemplo n.º 4
0
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 21 15:12:35 2018

@author: Raymond
"""

from nilmtk.dataset_converters import convert_redd

convert_redd('data/low_freq', 'redd.h5', format='HDF')
Exemplo n.º 5
0
from pathlib import Path
from nilmtk import DataSet
from nilm_models.gru.grudisaggregator import GRUDisaggregator
from nilmtk.dataset_converters import convert_redd
import tensorflow as tf
print("Num GPUs Available: ",
      len(tf.config.experimental.list_physical_devices('GPU')))

cwd = Path.cwd()
dataset_path = '..\\..\\experiments\\data\\low_freq'
full_path = cwd.joinpath(dataset_path)

if not Path(r'..\\..\\experiments\\data\\redd.h5').exists():
    convert_redd(str(full_path), r'..\\..\\experiments\\data\\redd.h5')

redd = DataSet(r'..\\..\\experiments\\data\\redd.h5')

redd.set_window(end="30-4-2011")  #Use data only until 4/30/2011
train_elec = redd.buildings[1].elec

train_mains = train_elec.mains().all_meters()[
    0]  # The aggregated meter that provides the input
train_meter = train_elec.submeters()['fridge']

gru = GRUDisaggregator()

if not Path("model-redd5.h5").exists():
    gru.train(train_mains, train_meter, epochs=5, sample_period=1)
    gru.export_model("model-redd5.h5")
else:
    gru.import_model("model-redd5.h5")
Exemplo n.º 6
0
from nilmtk.dataset_converters import convert_redd, convert_eco
#convert redd and eco to h5
convert_redd('/net/linse8/no_backup_01/s1183/data/low_freq', '/net/linse8/no_backup_01/s1183/data/redd.h5')
convert_eco('/net/linse8/no_backup_01/s1183/data/eco', '/net/linse8/no_backup_01/s1183/data/eco.h5')
Exemplo n.º 7
0
#Este projeto treina usando todas as casas e usando o método 
from __future__ import print_function, division
import time
from matplotlib import rcParams
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from six import iteritems
from nilmtk import DataSet, TimeFrame, MeterGroup, HDFDataStore
from nilmtk.legacy.disaggregate import CombinatorialOptimisation, FHMM
import nilmtk.utils
import time
from datetime import datetime

from nilmtk.dataset_converters import convert_redd
convert_redd(r'low_freq', 'redd.h5')

#Carrega os dados na memória
train = DataSet('redd.h5')
test = DataSet('redd.h5')

#Enumera todas as casas
buildings = [ i for i in range(6)]

# The dates are interpreted by Pandas, prefer using ISO dates (yyyy-mm-dd)
train.set_window(end="2011-04-30")
test.set_window(start="2011-04-30")

#Vetor que guarda os dados de todas as casas
train_elec = [None for i in range(6)]
test_elec = [None for i in range(6)]
Exemplo n.º 8
0
from nilmtk.dataset import DataSet
from nilmtk.dataset_converters import convert_redd


#convert data from redd into a .hdf file for future loading
convert_redd("C:/NILM/Data/REDD/low_freq/","C:/NILM/Data/Output/redd.h5")

Exemplo n.º 9
0
from nilmtk.dataset_converters import convert_redd
from pathlib import Path

print(Path.cwd())

cwd = Path.cwd()
dataset_path = 'data\\low_freq'
full_path = cwd.joinpath(dataset_path)

if not Path(r'data\\redd.h5').exists():
    convert_redd(str(full_path), r'data\\redd.h5')

from nilmtk import DataSet
from nilmtk.utils import print_dict

redd = DataSet(r'data\\redd.h5')

print_dict(redd.metadata)

elec = redd.buildings[1].elec
print("\n All data from building 1  ----- \n")
print(elec)

fridge = elec['fridge']
print("\n All columns available for a fridge from Building 1   ----- \n")
print(fridge.available_columns())

df = next(fridge.load())
print("\n Df Head  ----- \n")
print(df.head())
Exemplo n.º 10
0
from nilmtk.dataset_converters import convert_redd

convert_redd('C:\\Users\\dl50129\\Desktop\\nilmtk\\data\\REDD\\low_freq',
             r'C:\\Users\\dl50129\\Desktop\\nilmtk\\data\\redd.h5',
             format='HDF')
convert_redd('C:\\Users\\dl50129\\Desktop\\nilmtk\\data\\REDD\\low_freq',
             r'C:\\Users\\dl50129\\Desktop\\nilmtk\\data\\redd_csv',
             format='CSV')
Exemplo n.º 11
0
from nilmtk.dataset import DataSet
from nilmtk.dataset_converters import convert_redd


#convert data from redd into a .hdf file for future loading
convert_redd("C:/NILM/Data_Sets/low_freq/","C:/NILM/Data_Sets/redd_data.h5")

Exemplo n.º 12
0
from nilmtk.dataset import DataSet
from nilmtk.dataset_converters import convert_redd

#convert data from redd into a .hdf file for future loading
convert_redd("C:/NILM/Data/REDD/low_freq/", "C:/NILM/Data/Output/redd.h5")
Exemplo n.º 13
0
from nilmtk.dataset_converters import convert_redd
from pathlib import Path
from nilmtk import DataSet
from nilm_models.dae.daedisaggregator import DAEDisaggregator

import tensorflow as tf
print("Num GPUs Available: ",
      len(tf.config.experimental.list_physical_devices('GPU')))

cwd = Path.cwd()
dataset_path = '..\\experiments\\data\\low_freq'
raw_data = cwd.joinpath(dataset_path).resolve()
nilmtk_h5_path = Path(r'..\\experiments\\data\\redd.h5').resolve()

if not nilmtk_h5_path.exists():
    convert_redd(str(raw_data), nilmtk_h5_path)

dae = DAEDisaggregator(256)
#if not Path('model-redd100.h5').exists():

redd = DataSet(nilmtk_h5_path)

redd.set_window(end="30-4-2011")  #Use data only until 4/30/2011
train_elec = redd.buildings[1].elec

train_mains = train_elec.mains().all_meters()[
    0]  # The aggregated meter that provides the input
train_meter = train_elec.submeters()[
    'fridge']  # The microwave meter that is used as a training target

if not Path("model-redd100.h5").exists():
Exemplo n.º 14
0
from bokeh.palettes import brewer
from bokeh.io import output_file, show
from bokeh.plotting import figure
from bokeh.embed import components

from flask import Flask, render_template

# # 데이터 컨버터 (return : DataSet)
#
# ---

# In[ ]:

# 데이터 컨버트
# .dat ==> .h5
convert_redd('C:\\Users\\dlsrk\\Desktop\\nilm\\low_freq',
             'C:\\Users\\dlsrk\\Desktop\\nilm\\data\\redd.h5')
# .h5 데이터(컨버트된) read
# redd = DataSet('C:\\Users\\Kim-Taesu\\Documents\\nilm\\data\\redd.h5')
redd = DataSet('C:\\Users\\dlsrk\\Desktop\\nilm\\data\\redd.h5')


# date load 함수
def getData(inputPath, convertOutputPath):
    convert_redd(inputPath, convertOutputPath)
    return DataSet(convertOutputPath)


# # 시각화 데이터 준비
#
# ---
Exemplo n.º 15
0
    dataset_directory = Path.cwd() / 'data' / nd
    if dataset_directory.exists():
        break
dataset_directory_str = str(dataset_directory)
datastore_file = dataset_directory / ('%s.h5' % (dataset_name.lower()))
datastore_file_str = str(datastore_file)
# If the datastore does not exist (data not converted yet, load it)
if not datastore_file.exists():
    if dataset_name.startswith('sortd'):
        ntkdsc.convert_sortd(dataset_directory_str, datastore_file_str)
    elif dataset_name.startswith('fortum'):
        ntkdsc.convert_fortum(dataset_directory_str, datastore_file_str)
    elif dataset_name == 'eco':
        ntkdsc.convert_eco(dataset_directory_str, datastore_file_str, 'CET')
    elif dataset_name == 'redd':
        ntkdsc.convert_redd(dataset_directory_str, datastore_file_str)
# Then load the dataset into memory
dataset = DataSet(datastore_file_str)
# print basic info of the dataset)
print('\n\n%s\n#### DATASET %s\n' % ('#' * 80, dataset_name))
print('\n== dataset.metadata')
print(dataset.metadata)

## Exploring dataset

for bkey in dataset.buildings:
    building = dataset.buildings[bkey]
    elec = building.elec
    print('\n== elec.meters')
    print(elec.meters)
    print('\n== elec.appliances')
Exemplo n.º 16
0
def getData(inputPath, convertOutputPath):
    convert_redd(inputPath, convertOutputPath)
    return DataSet(convertOutputPath)
from six import iteritems

from nilmtk.dataset_converters import convert_redd
from nilmtk import DataSet
from nilmtk import global_meter_group

from const import *
from utils import number_list_duplicates


def main():

    # if not isfile(REDD_FILE):
        # convert raw data into hd5 file
<<<<<<< HEAD
    convert_redd(join(REDD_DIR, 'low_freq'), REDD_FILE)
=======
        convert_redd(join(REDD_DIR, 'low_freq'), REDD_FILE)
>>>>>>> aef264f3ffe1171080a897c5ccc7ad180010cc49

    redd = DataSet(REDD_FILE)

    # iterate over each building
    for id in range(1,7):
        # parse all building data and generate dataframe
        elec = redd.buildings[id].elec
        mains = next(elec.mains().load(sample_period=SAMPLE_PERIOD))

        # iterate over meters and gather time series
        meter_dict = {}
        for i, chunk in enumerate(elec.mains().load(sample_period=SAMPLE_PERIOD)):
Exemplo n.º 18
0
	def convert_dataset(self, folder, destination_file):
		#convert_greend(folder, destination_file)
		convert_redd(folder, destination_file)
Exemplo n.º 19
0
 def convert_dataset(self, folder, destination_file):
     #convert_greend(folder, destination_file)
     convert_redd(folder, destination_file)