Example #1
0
def process_by_basin(config, ohcs, flxs, basins, areas):
    """  For each basin, process data and invoke Kalman filter """
        
    ### Create cubes for data output using ohc as template
    output_template = create_output_template(config, ohcs[0])
    out_cubes = create_output_cubes(config, output_template)

    ### Process basins
    nbasins = np.unique(basins.data.filled())    
    for ntask, nbasin in enumerate(nbasins):
        if config.getboolean('output', 'print_stdout'): 
            tools.print_progress('Processing basins', len(nbasins), ntask+1, nbar=20)   
            
        if nbasin != basins.data.fill_value:
            kout = process_basin(config, ohcs, flxs, basins, areas, nbasin)
            out_cubes = update_output_cubes(out_cubes, kout, basins, nbasin)
            
    return out_cubes
Example #2
0
def load_data():
    print("load_data")
    data = pd.read_csv("data/fer2013.csv")
    X_data = data['pixels'].as_matrix()
    D = SIZE**2
    N = data.shape[0]

    X = np.zeros((N, D))
    Y = data['emotion'].as_matrix()

    for i in range(0, N, 1):
        tools.print_progress(i,
                             N,
                             prefix='Progress:',
                             suffix='Complete',
                             bar_length=50)
        X[i] += np.array(X_data[i].split(), dtype=float)

    return X, Y
Example #3
0
def generate_encoded_data_file():
    print("generate_encoded_data_file")
    X, Y = load_data_set()
    Y = np.reshape(Y, (Y.shape[0], 1))
    encoder = get_encoder(X)
    serialize(encoder(), "encoder")
    X_encoded = encode_string("", encoder)

    for i in range(0, X.shape[0], 1):
        print_progress(i,
                       X.shape[0],
                       prefix='Progress:',
                       suffix='Complete',
                       bar_length=50)
        line = encode_string(X[i], encoder)
        X_encoded = np.vstack((X_encoded, line))

    X_encoded = X_encoded[1:, :]
    X_encoded = np.hstack((X_encoded, Y))
    if not os.path.exists("preprocessed"):
        os.makedirs("preprocessed")
    np.savetxt("preprocessed/data.csv", X_encoded, delimiter=',', fmt="%d")
Example #4
0
def process_resolution(params, reslist, nvar, nfiles, currfile):
    '''
    Processes files for variable defined by params for all resolutions in reslist

    Parameters
    ----------
    params : list
        Row in variables table corresponding to variable processed in the call of this function
    reslist : list
        List of resolutions the variable should be processed at
    nvar : int
        Number of variables (needed for progress bar)
    nfiles : int
        Currently estimated total number of files for all variables (needed for progress bar)
    currfile : int
        Currently already processed number of files
    '''

    #Do seasonal resolution=
    if "sem" in reslist:
        seasonal = True
        reslist.remove("sem")
    else:
        seasonal = False

    log = logging.getLogger("cmorlight")

    # get cdf variable name
    var = params[config.get_config_value('index', 'INDEX_VAR')]
    varRCM = params[config.get_config_value('index', 'INDEX_RCM_NAME')]
    # create path to input files from basedir,model,driving_model
    in_dir = "%s/%s" % (tools.get_input_path(), params[config.get_config_value(
        'index', 'INDEX_RCM_NAME')])
    log.debug("Looking for input dir(1): %s" % (in_dir))

    if os.path.isdir(in_dir) == False:
        log.error(
            "Input directory does not exist(0): %s \n \t Change base path in .ini file or create directory! "
            % in_dir)
        return nfiles, currfile

    cores = config.get_config_value("integer", "multi", exitprog=False)
    multilst = []
    seaslst = []
    log.info("Used dir: %s" % (in_dir))
    for dirpath, dirnames, filenames in os.walk(in_dir, followlinks=True):
        if not nfiles:
            #estimate total
            if config.get_config_value('boolean', 'limit_range'):
                nfiles = nvar * (
                    config.get_config_value('integer', 'proc_end') -
                    config.get_config_value('integer', 'proc_start') + 1)
            else:
                nfiles = nvar * len(filenames)
        if len(filenames) == 0:
            log.warning("No files found! Skipping this variable...")

        i = 0
        for f in sorted(filenames):
            if f[-3:] != ".nc":
                continue

            if var not in settings.var_list_fixed:
                year = f.split("_")[-1][:4]

            #use other logger
            if cores > 1 and var not in settings.var_list_fixed:
                logger = logging.getLogger("cmorlight_" + year)
                logger.info(
                    "\n###########################################################\n# Var in work: %s / %s\n###########################################################"
                    % (var, varRCM))
                logger.info("Start processing at: " +
                            str(datetime.datetime.now()))
            else:
                logger = logging.getLogger("cmorlight")

            #if limit_range is set: skip file if it is out of range
            if config.get_config_value(
                    'boolean',
                    'limit_range') and var not in settings.var_list_fixed:
                if int(year) < config.get_config_value(
                        'integer',
                        'proc_start') or int(year) > config.get_config_value(
                            'integer', 'proc_end'):
                    continue
                #Define first and last month of file
                if config.get_config_value('integer',
                                           "proc_start") == int(year):
                    firstlast = [
                        config.get_config_value('integer', "first_month"), 12
                    ]
                elif config.get_config_value('integer',
                                             "proc_end") == int(year):
                    firstlast = [
                        1, config.get_config_value('integer', "last_month")
                    ]
                else:
                    firstlast = [1, 12]

            else:
                firstlast = [1, 12]

            logger.info(
                "\n###########################################################"
            )
            if f.find("%s_" % var) == 0 or f.find(
                    "%s.nc" % var) == 0 or f.find(
                        "%s_" % varRCM) == 0 or f.find(
                            "%s.nc" % varRCM) == 0 or f.find(
                                "%s_" % varRCM[:varRCM.find('p')]) == 0:
                in_file = "%s/%s" % (dirpath, f)
                logger.log(35, "Input from: %s" % (in_file))
                if os.access(in_file, os.R_OK) == False:
                    logger.error("Could not read file '%s', no permission!" %
                                 in_file)
                else:
                    if var in settings.var_list_fixed:
                        tools.process_file_fix(params, in_file)

                    else:
                        if cores > 1:
                            multilst.append([
                                params, in_file, var, reslist, year, firstlast
                            ])
                            seaslst.append([params, year])

                        else:
                            reslist = tools.process_file(
                                params, in_file, var, reslist, year, firstlast)
                            if seasonal:
                                tools.proc_seasonal(params, year)
            else:
                logger.warning(
                    "File %s does match the file name conventions for this variable. File not processed..."
                )

            i = i + 1

            #process as many files simultaneously as there are cores specified
            if i == cores and multilst != []:
                log.info("Processing years %s to %s simultaneously" %
                         (seaslst[0][1], seaslst[-1][1]))
                pool = Pool(processes=cores)
                R = pool.map(process_file_unpack, multilst)
                pool.terminate()
                #seasonal processing:
                if seasonal:
                    pool = Pool(processes=cores)
                    pool.map(proc_seasonal_unpack, seaslst)
                    pool.terminate()

                currfile += len(multilst)
                #start new
                multilst = []
                seaslst = []
                i = 0
                #change reslist
                reslist = R[0]
                #update currfile

            if cores <= 1:
                currfile += 1

            #print progress bar
            tools.print_progress(currfile, nfiles)

    #process remaining files
    if len(multilst) != 0:
        log.info("Processing years %s to %s simultaneously" %
                 (seaslst[0][1], seaslst[-1][1]))
        pool = Pool(processes=len(multilst))
        R = pool.map(process_file_unpack, multilst)
        pool.terminate()
        #seasonal processing:
        if seasonal:
            pool = Pool(processes=cores)
            pool.map(proc_seasonal_unpack, seaslst)
            pool.terminate()

        #update currfile
        currfile += len(multilst)
        tools.print_progress(currfile, nfiles)

    log.info("Variable '%s' finished!" % (var))

    return nfiles, currfile
Example #5
0
import numpy as np
from PIL import Image

from settings import *
from tools import print_progress, sigmoid

session = vk.AuthSession(access_token=TOKEN, )
vkapi = vk.API(session)

count_of_messages = vkapi.messages.getHistory(count=1, user_id=USER_ID)[0]
messages = []

print('Total count of messages: ', count_of_messages)

for i in range(0, count_of_messages, 200):
    print_progress(i, count_of_messages)
    messages += vkapi.messages.getHistory(offset=i, count=200,
                                          user_id=USER_ID)[1:]
    time.sleep(0.4)
print('')

m = [0] * count_of_messages
heatmap = np.zeros(count_of_messages)

for i, msg in enumerate(messages[::-1]):
    if 'attachment' in msg.keys():
        if 'sticker' in msg['attachment'].keys():
            m[i] = 5
    else:
        m[i] = len(re.findall(r'[\U0001f600-\U0001f650]', msg['body']))
Example #6
0
def combining(config, n, nmax):
    """ Print progress bar for combining synthetic profiles"""
    if config.getboolean('options', 'print_stdout'):
        tools.print_progress('Combining synthetic data', nmax, n)
Example #7
0
def extracting(config, n, nmax):
    """ Print progress bar for extraction of data"""
    if config.getboolean('options', 'print_stdout'):
        tools.print_progress('Extracting synthetic data', nmax, n)
Example #8
0
def combining(config, n, nmax):
    """ Print progress bar for combining synthetic profiles"""
    if config.getboolean('options', 'print_stdout'):
        tools.print_progress('Combining synthetic data', nmax, n)
Example #9
0
def extracting(config, n, nmax):
    """ Print progress bar for extraction of data"""
    if config.getboolean('options', 'print_stdout'):
        tools.print_progress('Extracting synthetic data', nmax, n)