Exemplo n.º 1
0
    def test_tweetio_read_db2(self):
        coin = Coin()

        coin.name = 'omg'
        yesterday = date.today() + timedelta(days=-1)
        coin.loadtime = yesterday.strftime("%Y-%m-%d")
        coin.hashtags = ['omg', 'omisego']

        tio = TweetIO()
        df = tio.read_db_tweet_last_n_hour_by_coin(coin)
        print(df)
Exemplo n.º 2
0
    def test_date_time_binance3(self):
        bapi = BinanceApi()
        coin = Coin()
        coin.name = 'OMG'
        one_day_before = datetime.now() + timedelta(days=-1)
        coin.loadtime = one_day_before.strftime("%Y-%m-%d")
        spech = datetime.now() + timedelta(hours=-1)

        print("spec_closed_hour: ")
        pricehourly = bapi.collect_coindata(coin, spech)
        print(pricehourly)
Exemplo n.º 3
0
    def test_tweetio_read_train(self):

        pd.set_option("display.max_rows",100)
        pd.set_option("display.max_columns",100)

        coin = Coin()
        coin.name = 'neo'

        coin.read_from_storeage('train')
        print(coin.data_to_predict)

        print("test case finishes.")
Exemplo n.º 4
0
    def test_date_time_binance2(self):
        bapi = BinanceApi()
        coin = Coin()
        coin.name = 'OMG'
        spech = datetime.now() + timedelta(hours=-1)

        print("spec_closed_hour: ")
        json_spec_hour = bapi.get_last_n_hour_by_specific_hour_by_coin(
            coin, spech, 1)
        self.print_unix_as_date(json_spec_hour[0][0])
        self.print_unix_as_date(json_spec_hour[0][6])
        print(json_spec_hour)
Exemplo n.º 5
0
    def test_tweetio_read_db3(self):
        tio = TweetIO()
        coin = Coin()
        coin.name = 'neo'
        nowdt = datetime.now()

        dfnow1 = tio.read_db_tweet_last_n_hour_by_specific_hour_by_coin(
            coin, nowdt + timedelta(hours=-1))

        print("dfnow1:")
        print(dfnow1)

        print("test case finishes.")
Exemplo n.º 6
0
 def list_coins(self, dir):
     ret = list()
     for dirname, dirnames, filenames in os.walk(dir):
         filenames.sort()
         # print path to all subdirectories first.
         for filename in filenames:
             if (filename.endswith('.csv')):
                 path = os.path.join(dirname, filename)
                 name = filename[:-4]
                 c = Coin()
                 c.path = path
                 c.name = name
                 ret.append(c)
     return ret
Exemplo n.º 7
0
    def test_tweetio_read_predict(self):

        pd.set_option("display.max_rows",100)
        pd.set_option("display.max_columns",100)

        tio = TweetIO()
        coin = Coin()
        coin.name = 'omg'
        nowdt = datetime.now()
        argtime='2018-04-14_16-00-00'

        coin.read_from_storeage('rt','runtime/'+argtime+'/')
        print(coin.data_to_predict)

        print("test case finishes.")
Exemplo n.º 8
0
    def test_tweetio_read_db1(self):
        tio = TweetIO()
        coin = Coin()
        coin.name = 'neo'
        nowdt = datetime.now()

        dfnow1 = tio.read_db_retweet_last_n_hour_by_specific_hour_by_coin(
            coin, nowdt)
        dfnow2 = tio.read_db_retweet_last_n_hour_by_coin(coin)
        print("dfnow1:")
        print(dfnow1)
        print("dfnow2:")
        print(dfnow2)

        print("equality check: ")
        self.assertTrue(dfnow1.equals(dfnow2))

        print("test case finishes.")
Exemplo n.º 9
0
    def test_increase_one_hour(self):
        cointrain = CoinTrain()
        #df=pd.DataFrame(index=['year', 'month','day','hour'],columns=[i],data=)
        coin = Coin()
        coin.path = "./data/altcoin-1hour/neo.csv"
        coin.name = "neo"
        coin.ico = "2017-12-31"

        coin.read_from_storeage("prepare2")

        gtdf = coin.gtdf
        idx = gtdf.index
        gtdf.index = idx.set_names(['year', 'month', 'day', 'hour'])
        print(gtdf['sum_neumulfollower'].tail(27))
        gtdf = cointrain.increase_by_one_hour(gtdf)

        print(gtdf['sum_neumulfollower'].tail(27))
Exemplo n.º 10
0
from coins.coininfo import CoinInfo
from coins.coinprice import CoinPrice
from twitter.statistics import Statistics
from twitter.tweetio import TweetIO
from twitter.sentiment import SentimentAnalyzer
from coins.coin import Coin
import pandas as pd

print('Main starts')
#cinfo=CoinInfo()
#coinlist=cinfo.list_coins('./data/altcoin-1hour')

## choosing first one: neo
#coin=coinlist[19]
coin = Coin()
coin.path = "./data/altcoin-1hour/ada.csv"
coin.name = "ada"
coin.ico = "2017-10-01"
#coin.ico="2016-02-17"

tweetio = TweetIO()
coin.read_from_storeage("prepare1")
print(coin.tweets.columns)
print(coin.retweets.columns)
df = tweetio.sort_and_clip(coin.tweets, coin.ico)
coin.tweets = df

## MULTIPLYING RETWEETS FOLLOWERS

print("multiplying nr. of retweet followers by sentiments.")
sentanalyzer = SentimentAnalyzer()
Exemplo n.º 11
0
def generate_coins(specific_hour):
    coins = []

    #OMG
    coin = Coin()
    coin.name = 'omg'
    coin.treshold = 0.65
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['omg', 'omisego']
    coins.append(coin)

    #NEO
    coin = Coin()
    coin.name = 'neo'
    coin.treshold = 0.51
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['neo']
    #coins.append(coin)

    #XVG
    coin = Coin()
    coin.name = 'xvg'
    coin.treshold = 0.55
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['xvg', 'verge']
    #coins.append(coin)

    return coins
Exemplo n.º 12
0
from sklearn.model_selection import train_test_split
import keras.backend as K
import math
import os
from datetime import datetime

print('Main starts searching for best model')

pd.set_option("display.max_rows", 100)
pd.set_option("display.max_columns", 100)

cinfo = CoinInfo()
#coinlist=cinfo.list_coins('./data/altcoin-1hour')

## choosing coin
coin = Coin()
#coin.path="./data/altcoin-1hour/neo.csv"
#coin.name="neo"
#coin.ico="2017-05-01"
coin.path = "./data/altcoin-1hour/omg.csv"
coin.name = "omg"
coin.ico = "2017-09-01"

starttime = datetime.now()
print(str(starttime))

coin.read_from_storeage("prepare2")

print(coin.pricehourly.head())
possibleraise = [1.5, 1.1]
possibledeclineratio = [-0.5, -0.667]
Exemplo n.º 13
0
from keras.layers import LSTM, Conv1D, MaxPooling1D
from keras.layers import Dense, Dropout
from keras.layers import Embedding
from sklearn.model_selection import train_test_split
import keras.backend as K

print('Main starts plotting')

pd.set_option("display.max_rows", 100)
pd.set_option("display.max_columns", 100)

cinfo = CoinInfo()
#coinlist=cinfo.list_coins('./data/altcoin-1hour')

## choosing coin
coin = Coin()
coin.path = "./data/altcoin-1hour/neo.csv"
coin.name = "neo"
coin.ico = "2017-05-01"

tweetio = TweetIO()
coin.read_from_storeage("prepare2")

#print(coin.pricehourly.head())

cointrain = CoinTrain()
X = cointrain.create_buy_sig(coin, aimraise=10, declinelimit=-5, offset=12)
X_gtdf = cointrain.increase_by_one_hour(coin.gtdf)
X_grtdf = cointrain.increase_by_one_hour(coin.grtdf)

#Converting X to the same multi index type
Exemplo n.º 14
0
def generate_coins(specific_hour):
    coins = []

    #OMG
    coin = Coin()
    coin.name = 'omg'
    coin.treshold = 0.81
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['omg', 'omisego']
    coin.target = 1.7
    coin.stoploss = coin.target / (-0.5)
    coin.timelimit = 3
    coin.modelfile='omg/found_omg_bsig34.0_prec0.7083333333333334_tre64_aimr1.7_decr-0' \
                   '.5_offs3_actmet_tanh_keras_model.h5'
    coin.scalerfile = 'omg/found_omg_bsig34.0_prec0.7083333333333334_tre64_aimr1.7_decr-0.5_offs3_actmet_tanh_scaler.pkl'
    coins.append(coin)

    #NEO
    coin = Coin()
    coin.name = 'neo'
    coin.treshold = 0.72
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['neo']
    coin.target = 1.7
    coin.stoploss = coin.target / (-0.667)
    coin.timelimit = 3
    coin.modelfile = 'neo/found_neo_bsig18.0_prec0.782608695652174_tre72_aimr1.7_decr-0.667_offs4_actmet_relu_keras_model.h5'
    coin.scalerfile = 'neo/found_neo_bsig18.0_prec0.782608695652174_tre72_aimr1.7_decr-0.667_offs4_actmet_relu_scaler.pkl'
    coins.append(coin)

    #XVG
    coin = Coin()
    coin.name = 'xvg'
    coin.treshold = 0.99
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['xvg', 'verge']
    #coins.append(coin)

    #LSK
    coin = Coin()
    coin.name = 'lsk'
    coin.treshold = 0.61
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['lsk', 'lisk']
    #coins.append(coin)

    #ADA
    coin = Coin()
    coin.name = 'ada'
    coin.treshold = 0.56
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['ada', 'cardano']
    #coins.append(coin)

    #GNT
    coin = Coin()
    coin.name = 'gnt'
    coin.treshold = 0.55  #buysig 3, at prec 1.0!
    one_day_before = specific_hour + timedelta(days=-1)
    coin.loadtime = one_day_before.strftime("%Y-%m-%d")
    coin.hashtags = ['gnt', 'golem']
    #coins.append(coin)

    return coins
Exemplo n.º 15
0
from coins.coininfo import CoinInfo
from coins.coin import Coin
from twitter.statistics import Statistics
from twitter.tweetio import TweetIO
from twitter.sentiment import SentimentAnalyzer
from twitter.tweepy import TwitterApi
from twitter.tweetcollector import TweetCollector

print('Main starts')
cinfo = CoinInfo()
coinlist = cinfo.list_coins('./data/altcoin-1hour')

## choosing first one: neo
coin = Coin()
coin.name = "ada"
coin.ico = "2017-10-01"
#coin.ico="2016-02-17"

tweetio = TweetIO()
print("read already scraped  retweets:")
df = tweetio.read_all_scraped_retweet(coin)
setattr(coin, 'retweets', df)
print("coin.retweets.head()")
print(coin.retweets.tail())
print("retweets done...")

tapi = TwitterApi()
tweetcollector = TweetCollector(tapi)
print("read already scraped tweets:")
df = tweetio.read_all_scraped_tweet(coin)
print("before filter: ", len(df.index))