import os import sys import pytz import time import inspect import numpy as np import pandas as pd import datetime as dt import tmpo from opengrid import config from opengrid.library import plotting from opengrid.library import houseprint c = config.Config() try: if os.path.exists(c.get('tmpo', 'data')): path_to_tmpo_data = c.get('tmpo', 'data') except: path_to_tmpo_data = None # configuration for the plots DEV = c.get( 'env', 'type' ) == 'dev' # DEV is True if we are in development environment, False if on the droplet print("Environment configured for development: {}".format(DEV)) if not DEV: # production environment: don't try to display plots import matplotlib
# -*- coding: utf-8 -*- """ General caching functionality. This module defines: 1. the Cache class 2. generic cache functions to store daily results Created on Thu Jan 7 09:34:04 2016 @author: roel """ import os import numpy as np import pandas as pd from opengrid import config cfg = config.Config() from opengrid.library import misc from opengrid.library import analysis class Cache(object): """ A class to handle daily aggregated data or intermediate results The file format for the data is result_sensor.csv """ def __init__(self, variable, folder=None): """ Create a cache object specifically for the specified variable Arguments
def synchronize(folder, unzip=True, consolidate=True, file_type='hdf'): """Download the latest zip-files from the opengrid droplet, unzip and consolidate. The files will be stored in folder/zip and unzipped and consolidated into folder/csv Parameters ---------- folder : path The *data* folder, containing subfolders *zip* and *csv* unzip : [True]/False If True, unzip the downloaded files to folder/csv consolidate : [True]/False If True, all csv files in folder/csv will be consolidated to a single file per sensor Notes ----- This will only unzip the downloaded files and then consolidate all csv files in the csv folder. If you want to rebuild the consolidated csv from all available data you can either delete all zip files and run this function or run _unzip(folder, consolidate=True) on the data folder. """ t0 = time.time() if not os.path.exists(folder): raise IOError("Provide your path to the data folder where a zip and csv subfolder will be created.") from opengrid import config # Get the pwd; start from the path of this current file c = config.Config() pwd = c.get('opengrid_server', 'password') host = c.get('opengrid_server','host') port = c.get('opengrid_server','port') user = c.get('opengrid_server','user') URL = "".join(['http://',host,':',port,'/']) # create a session to the private opengrid webserver session = requests.Session() session.auth = (user, pwd) resp = session.get(URL) # make a list of all zipfiles pattern = '("[0-9]{8}.zip")' zipfiles = re.findall(pattern, resp.content) zipfiles = [x.strip('"') for x in zipfiles] zipfiles.append('all_data_till_20140711.zip') zipfolder = os.path.join(folder, 'zip') csvfolder = os.path.join(folder, 'csv') # create the folders if they don't exist for fldr in [zipfolder, csvfolder]: if not os.path.exists(fldr): os.mkdir(fldr) downloadfiles = [] # these are the successfully downloaded files for f in zipfiles: # download the file to zipfolder if it does not yet exist if not os.path.exists(os.path.join(zipfolder, f)): print("Downloading {}".format(f)) with open(os.path.join(zipfolder, f), 'wb') as handle: response = session.get('http://95.85.34.168:8080/' + f, stream=True) if not response.ok: raise IOError('Something went wrong in downloading of {}'.format(f)) for block in response.iter_content(1024): if not block: break handle.write(block) downloadfiles.append(f) t1 = time.time() # Now unzip and/or consolidate if unzip: _unzip(folder, downloadfiles) t2 = time.time() if consolidate: consolidate_folder(csvfolder, file_type=file_type) t3 = time.time() print('Download time: {} s'.format(t1-t0)) print('Unzip time: {} s'.format(t2-t1)) print('Consolidate time: {} s'.format(t3-t2)) print('Total time: {} s'.format(t3-t0))