예제 #1
0
def update_signal_recalculate():

    dthour = request.args.get('datetime')
    du = DateUtil()
    specific_hour_dt = du.parse_time_string(dthour)
    predictions = []
    coins = generate_coins(specific_hour_dt)
    coininfo = CoinInfo()

    for coin in coins:

        earliest_date = dbconn.get_earliest_date_in_db(coin)
        print(coin.name, "earliest date", earliest_date)

        one_day_before = specific_hour_dt + timedelta(days=-1)
        coin.loadtime = one_day_before.strftime("%Y-%m-%d")
        print("checking pred in DB for ", coin, "at", specific_hour_dt)

        tsfrom = (specific_hour_dt +
                  timedelta(hours=-1)).strftime("%Y-%m-%d %H:00:00")
        tsto = specific_hour_dt.strftime("%Y-%m-%d %H:00:00")
        print(tsfrom, tsto)

        coininfo.do_prepare(coin, specific_hour_dt)
        predictions = do_predict(coin, predictions, specific_hour_dt)

    return jsonify({'predictions': predictions})
예제 #2
0
 def read_all_scraped_retweet(self,coin,tmpdir=''):
     coin_name = coin.name
     dir = './data/'+tmpdir+'altcoin-tweets/' + coin_name + '/'
     print("reading RETWEET files from: "+dir)
     ci=CoinInfo()
     dflist=[]
     list=ci.list_retweetfiles(dir)
     for retweetfile in list:
         print("reading in: "+retweetfile)
         df=pd.read_csv(retweetfile)
         dflist.append(df)
     df=pd.concat(dflist)
     print("collected retweets: " + str(df['retweet_id'].count()))
     print("dropping duplicate tweets: ")
     df=df.drop_duplicates('retweet_id')
     print("collected tweets: " +str(df['retweet_id'].count()))
     df.reset_index(inplace=True)
     return df
예제 #3
0
def get_signal():
    predictions = []
    try:

        du = DateUtil()
        last_round_hour = du.parse_time_string(du.last_round_hour())

        coins = generate_coins(last_round_hour)
        coininfo = CoinInfo()

        for coin in coins:
            pred = dbconn.check_prediction_in_db_last_hour(coin)
            if pred is None:
                coininfo.do_prepare(coin, last_round_hour)
                predictions = do_predict(coin, predictions, last_round_hour)
            else:
                predictions.append(pred)
        return jsonify({'predictions': predictions})
    except Exception as e:
        print("Exception: ", e)
        return jsonify({'exception': predictions})
예제 #4
0
def fill_past_signals():

    predictions = []
    coins = generate_coins(datetime.now())

    coininfo = CoinInfo()
    du = DateUtil()
    for coin in coins:

        earliest_date = dbconn.get_earliest_date_in_db(coin)
        print(coin.name, "earliest date", earliest_date)

        start_datetime = du.parse_time_string(
            du.round_datetime_down(earliest_date))
        last_round_hour = du.parse_time_string(du.last_round_hour())
        #adding 24 hours to have earlier data in the past for prediction
        curr_datetime = start_datetime + timedelta(hours=+24)
        while (curr_datetime < last_round_hour):
            coin.reset_data_frames()
            one_day_before = curr_datetime + timedelta(days=-1)
            coin.loadtime = one_day_before.strftime("%Y-%m-%d")

            curr_datetime = curr_datetime + timedelta(hours=+1)
            print("checking pred in DB for ", coin, "at", curr_datetime)

            tsfrom = (curr_datetime +
                      timedelta(hours=-1)).strftime("%Y-%m-%d %H:00:00")
            tsto = curr_datetime.strftime("%Y-%m-%d %H:00:00")
            print(tsfrom, tsto)

            pred = dbconn.check_prediction_in_db(coin, tsfrom, tsto)
            print("pred:", pred)

            if pred is None:
                coininfo.do_prepare(coin, curr_datetime)
                predictions = do_predict(coin, predictions, curr_datetime)
            else:
                predictions.append(pred)

    return jsonify({'predictions': predictions})
예제 #5
0
from coins.coininfo import CoinInfo
from coins.coinprice import CoinPrice
from twitter.statistics import Statistics
from twitter.tweetio import TweetIO
from twitter.sentiment import SentimentAnalyzer
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
from matplotlib.finance import candlestick_ohlc
import matplotlib.dates as mdates
import datetime as dt

print('Main starts plotting')
cinfo = CoinInfo()
coinlist = cinfo.list_coins('./data/altcoin-1hour')

## choosing first one: neo
coin = coinlist[18]
coin.ico = "2016-05-01"

tweetio = TweetIO()
coin.read_from_storeage("prepare2")

df_ohlc = coin.pricehourly.copy()
df_ohlc = df_ohlc[['datetime', 'open', 'high', 'low', 'close']]
#df_ohlc=df_ohlc.drop(['time1','volumefrom','volumeto'],axis=1)
fromperiod = '2017-08-01'
toperiod = '2017-08-25'
df_ohlc = df_ohlc[(df_ohlc['datetime'] >= fromperiod)
                  & (df_ohlc['datetime'] < toperiod)]
예제 #6
0
from keras.layers import Dense, Activation
from keras.layers import LSTM, Conv1D, MaxPooling1D
from keras.layers import Dense, Dropout
from keras.layers import Embedding
from sklearn.model_selection import train_test_split
import keras.backend as K
import math
import os
from datetime import datetime

print('Main starts searching for best model')

pd.set_option("display.max_rows", 100)
pd.set_option("display.max_columns", 100)

cinfo = CoinInfo()
#coinlist=cinfo.list_coins('./data/altcoin-1hour')

## choosing coin
coin = Coin()
#coin.path="./data/altcoin-1hour/neo.csv"
#coin.name="neo"
#coin.ico="2017-05-01"
coin.path = "./data/altcoin-1hour/omg.csv"
coin.name = "omg"
coin.ico = "2017-09-01"

starttime = datetime.now()
print(str(starttime))

coin.read_from_storeage("prepare2")