コード例 #1
0
def start_day(market_open, market_close):
    print("MARKET STARTED")
    print(dt.now())
    collect_data()
    schedule.every(5).minutes.do(collect_data)

    # when to stop
    while (time_is_between(market_open, market_close)):
        schedule.run_pending()
        time.sleep(1)
コード例 #2
0
import numpy as np
import joblib
from keras.models import load_model
from data_collector import series_to_supervised, collect_data, read_dataset, get_filename
from datetime import datetime

loaded_model = load_model('model/saved_model.model')
scaler = joblib.load('model/scaler.save')

from_symbol = 'BTC'
to_symbol = 'USD'
exchange = 'Bitstamp'
datetime_interval = 'day'

collect_data(from_symbol, to_symbol, exchange, datetime_interval)
current_datetime = datetime.now().date().isoformat()
orginal_data = read_dataset(
    get_filename(from_symbol, to_symbol, exchange, datetime_interval,
                 current_datetime))

num_past_days = 20
num_features = len(orginal_data.columns)
num_obs = num_features * num_past_days
scaled = scaler.transform(orginal_data.values)
data = series_to_supervised(scaled, n_in=num_past_days, n_out=1)
values = data.values

# n_train_days = int(0.9 * len(data))

n_train_days = len(data) - 70
コード例 #3
0
from link_collector import collect_links
from data_collector import collect_data
from save_file import save


url = 'https://www.ru-chipdip.by/catalog'
header = {'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) OPR/55.0.2994.44'}
links = collect_links(url, header)
for key in links.keys():
    for dept in links[key]:
        item_data = collect_data(dept, header)
        data_dict = {key: { dept: { item_data }        }        }
        save(data_dict)
コード例 #4
0
def collect_data():
    """Collect a new data point and store it."""
    return data_collector.collect_data()