# -*- coding: utf-8 -*-
# date: 2019-2-25
# Author: Joe-BU

import datetime
import imghdr
import os
import shutil
from PIL import Image
import numpy as np
import sys
from settings import *
from mylog import mylog as lg

preprocess_log = lg.init_logger(preprocess_log_path)


def crop(input_dir, filename, output_dir):
    img = Image.open(os.path.join(input_dir, filename))
    img = img.convert('RGB')
    q = np.asarray(img)
    q = q[0:800, 224:1024]
    q = q[100:500, 300:700]
    final = Image.fromarray(q.astype('uint8')).convert('RGB')
    final.save(os.path.join(output_dir, filename))


def wipeBaseMap(local_path, filename):
    img = Image.open(os.path.join(local_path, filename))
    width = img.size[0]
    height = img.size[1]
Exemplo n.º 2
0
from mylog import mylog as lg

# sparnn.utils.quick_logging_config('deep-weather.log')


def float_formatter(x): return "%.2f" % x


np.set_printoptions(formatter={'float_kind': float_formatter})
# Enable or disable parallel computation on the CPU with OpenMP
theano.config.openmp = True
theano.config.optimizer = "fast_compile"
# The number of user stack level to keep for variables.
theano.config.traceback.limit = 100

my_log = lg.init_logger(deduce_log_path)
# dictMerged2=dict(dict1, **dict2)

radar_config = {
    'name': 'radar',
    'max': 14.0,
    'offset': 0.0,
    'cmap': 'radar',
    'cost_func': 'Fade',
    'level': 0,
    'data_dir': radar_data_dir,
    'save_dir': radar_save_dir
}
wind_config = {
    'name': 'wind',
    'level': 10,
Exemplo n.º 3
0
    def raw_input(self, config):
        filenames = load_range_filenames(config, self.mode)

        X, X_last = [], []
        if self.mode == 'train':
            load_lg = lg.init_logger(base_config['load_log_path'])
            load_lg.info(' <<< Start Image : ' +
                         str(filenames[0].split("/")[-1]))
            load_lg.info(' >>> E n d Image : ' +
                         str(filenames[-1].split("/")[-1]))
            load_lg.info(' ** Image Length : ' + str(len(filenames)))
            for filename in filenames:
                try:
                    X_hour = read_data(filename, config)
                    # print "reader done"
                    if X_hour is not None:
                        X.append(X_hour)
                        X_last = X_hour
                        if len(X) == len(filenames) / 4:
                            load_lg.info(' Load  25%  data --> ' +
                                         str(filename.split("/")[-1]))
                        elif len(X) == len(filenames) / 2:
                            load_lg.info(' Load  50%  data --> ' +
                                         str(filename.split("/")[-1]))
                        elif len(X) == (len(filenames) / 4 * 3):
                            load_lg.info(' Load  75%  data --> ' +
                                         str(filename.split("/")[-1]))
                    elif len(X_last) > 0:
                        X.append(X_last)

                except IOError:
                    print(filename + ' does not exist! ')
                    pass

        elif self.mode == 'predict':
            print filenames[0]
            print filenames[-1]
            for i, filename in enumerate(filenames):
                try:
                    X_hour = read_data(filename, config)
                    if X_hour is not None:
                        X.append(X_hour)
                        X_last = X_hour
                    elif len(X_last) > 0:
                        X.append(X_last)
                except IOError:
                    print filename + ' not exists!'
                    pass
        else:
            pass
        X = np.array(X, dtype=np.dtype("float32"))
        X = np.reshape(X, (X.shape[0], 1, X[0].shape[0], X[0].shape[1]))
        X = (X + config['offset']) / config['max']
        print X.shape
        print X.max()
        if self.mode == 'train':
            load_lg.info('X shpae: ' + str(X.shape) + ', X max: ' +
                         str(X.max()) + ', X min: ' + str(X.min()))

        clips = [[] for i in range(2)]
        minibatch_size = config['minibatch_size']
        input_seq_length = config['input_seq_length']
        output_seq_length = config['output_seq_length']

        if self.mode == 'train':
            for x in range(X.shape[0] -
                           (input_seq_length + output_seq_length)):
                clips[0].append([x, input_seq_length])
                clips[1].append([x + input_seq_length, output_seq_length])
        elif self.mode == 'predict':
            for x in range(X.shape[0] / (input_seq_length)):
                clips[0].append([(input_seq_length + output_seq_length) * x,
                                 input_seq_length])
                clips[1].append([(input_seq_length + output_seq_length) * x +
                                 input_seq_length, 0])
        clips = np.array(clips, dtype=np.dtype("int32"))
        dims = np.array([[1, X[0].shape[1], X[0].shape[2]]],
                        dtype=np.dtype("int32"))
        return {'input_raw_data': X, 'dims': dims, "clips": clips}
Exemplo n.º 4
0
# -*- coding: utf-8 -*-
# Author: Joe BU
# Date: 2019-3-26 18:00:00

from model_config import base_config
import os
from datetime import datetime, timedelta
import sys

sys.path.append(base_config['append_path'])
from mylog import mylog as lg

runtime_log = lg.init_logger(base_config['runtime_log_path'])


def main(t1):
    #start_time = datetime(2018, 6, 30, 10, 0, 0)
    #end_time = datetime(2018, 6, 30, 11, 0, 0)
    start_time = t1
    end_time = start_time
    while start_time <= end_time:
        now = start_time.strftime("%Y-%m-%d %H:%M")
        print start_time
        runtime_log.info(' Predict time: ' + now)

        try:
            os.system(
                '/data/anaconda2/bin/python /data/python_scripts/Pre_processor.py '
                + start_time.strftime("%Y%m%d%H%M"))
            runtime_log.info(' 1. Pre_precess Done!')
        except Exception as e:
Exemplo n.º 5
0
import os
import os.path
import theano.tensor as TT
import numpy as np

#logger = logging.getLogger(__name__)
from sparnn.models import Model
from sparnn.utils import *
from model_config import base_config
import utils
from mylog import mylog as lg
'''
"autosave_mode": "best"

'''
train_log = lg.init_logger(base_config['train_log_path'])
process_log = lg.init_logger(base_config['process_log_path'])


class Optimizer(object):
    def __init__(self,
                 model,
                 train_data_iterator0,
                 train_data_iterator1,
                 train_data_iterator2,
                 train_data_iterator3,
                 train_data_iterator4,
                 train_data_iterator5,
                 valid_data_iterator,
                 test_data_iterator,
                 hyper_param,