Ejemplo n.º 1
0
def test():

    logging.info("creating customers ...")
    customers = create_customers(10)
    http_cmd_req('{}/customers'.format(BASE_URL), customers)

    time.sleep(1)

    rsp = request.urlopen('{}/customers'.format(BASE_URL))
    customers = get_result(rsp)

    logging.info("creating products ...")
    products = create_products(100)
    http_cmd_req('{}/products'.format(BASE_URL), products)

    time.sleep(1)

    rsp = request.urlopen('{}/products'.format(BASE_URL))
    products = get_result(rsp)

    logging.info("creating inventory ...")
    inventories = create_inventories(
        [product['entity_id'] for product in products], 100)
    http_cmd_req('{}/inventories'.format(BASE_URL), inventories)

    time.sleep(1)

    rsp = request.urlopen('{}/inventories'.format(BASE_URL))
    inventories = get_result(rsp)

    logging.info("creating carts ...")
    carts = create_carts(10, customers, products)
    http_cmd_req('{}/carts'.format(BASE_URL), carts)

    time.sleep(2)

    rsp = request.urlopen('{}/carts'.format(BASE_URL))
    carts = get_result(rsp)

    while True:

        logging.info("updating carts ...")

        for cart in carts:
            http_cmd_req('{}/cart/{}'.format(BASE_URL, cart['entity_id']),
                         cart,
                         _method='PUT')
Ejemplo n.º 2
0
def run(metal, provider, period, start=None, end=None, evaluate=False):
    # Get data
    data = get_data(metal, provider, start, end)
    dataset = data.values
    training_data_len = math.ceil(len(dataset) * 0.8)
    scaler = MinMaxScaler(feature_range=(0, 1))
    scaled_data = scaler.fit_transform(dataset)
    train_data = scaled_data[0:training_data_len, :]
    x_train = []
    y_train = []

    for i in range(60, len(train_data)):
        x_train.append(train_data[i - 60:i, 0])
        y_train.append(train_data[i, 0])

    x_train, y_train = np.array(x_train), np.array(y_train)
    x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))

    # Build model
    model = Sequential()
    model.add(
        LSTM(50, return_sequences=True, input_shape=(x_train.shape[1], 1)))
    model.add(LSTM(50, return_sequences=False))
    model.add(Dense(25))
    model.add(Dense(1))
    model.compile(optimizer='adam', loss='mean_squared_error', metrics=['acc'])
    model.fit(x_train, y_train, epochs=5, batch_size=32)

    if evaluate:
        return None, get_evaluate(model, x_train, y_train)

    # Get result
    test_data = scaled_data[training_data_len - 60:]
    x_test = []

    for i in range(60, len(test_data)):
        x_test.append(test_data[i - 60:i, 0])

    x_test = np.array(x_test)
    x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
    predictions = np.array([])
    last = x_test[-1]

    for i in range(period):
        curr_prediction = model.predict(np.array([last]))
        last = np.concatenate([last[1:], curr_prediction])
        predictions = np.concatenate([predictions, curr_prediction[0]])

    predictions = scaler.inverse_transform([predictions])[0]
    dicts = []
    curr_date = date.today()

    for i in range(period):
        curr_date = curr_date + timedelta(days=1)
        dicts.append({'Predictions': predictions[i], "Date": curr_date})

    new_data = pd.DataFrame(dicts).set_index("Date")

    return get_result(data, new_data)