def test_date_time_binance_diff(self): bapi = BinanceApi() date_1 = datetime.strptime("2018-06-01 09:48:02", "%Y-%m-%d %H:%M:%S") date_2 = datetime.strptime("2018-05-01", "%Y-%m-%d") diff = bapi.get_diff_in_hours(date_1, date_2) print('diff: ', diff)
def test_dateutil1(self): dateutil = DateUtil() ba = BinanceApi() print(ba.get_server_time()) nowdt = datetime.now() nowdt = nowdt + timedelta(hours=-2) binancedt_int = dateutil.binance_datetime_int(nowdt) print("binancedt_int ", binancedt_int)
def test_date_time_binance3(self): bapi = BinanceApi() coin = Coin() coin.name = 'OMG' one_day_before = datetime.now() + timedelta(days=-1) coin.loadtime = one_day_before.strftime("%Y-%m-%d") spech = datetime.now() + timedelta(hours=-1) print("spec_closed_hour: ") pricehourly = bapi.collect_coindata(coin, spech) print(pricehourly)
def test_date_time_binance2(self): bapi = BinanceApi() coin = Coin() coin.name = 'OMG' spech = datetime.now() + timedelta(hours=-1) print("spec_closed_hour: ") json_spec_hour = bapi.get_last_n_hour_by_specific_hour_by_coin( coin, spech, 1) self.print_unix_as_date(json_spec_hour[0][0]) self.print_unix_as_date(json_spec_hour[0][6]) print(json_spec_hour)
def do_prepare(coin, specific_hour): pd.set_option('display.max_rows', 99) pd.set_option('precision', 10) pd.set_option('display.width', 1000) #Getting Tweets from DB tweetio = TweetIO() df = tweetio.read_db_tweet_last_n_hour_by_specific_hour_by_coin( coin, specific_hour) coin.tweets = df print("tweets from DB: ") print(len(df)) #Filter and sort tweets #tapi = TwitterApi() we dont need twitter connection: tweetcollector = TweetCollector(None) df = tweetcollector.filter_tweets(coin.tweets) df = tweetio.sort_and_clip(df, coin.loadtime) coin.tweets = df print("tweets>: ") print(len(df)) #Collect retweets, users rdf = tweetio.read_db_retweet_last_n_hour_by_specific_hour_by_coin( coin, specific_hour) setattr(coin, 'retweets', rdf) print("retweets>: ") #print(rdf) udf = tweetio.read_db_referenced_users(coin) print("Users>: ") print(len(udf)) #tweetcollector.collect_all_users(coin, tapi, tmpdir=tmpd) ##PREPARE 1 #df = tweetio.read_all_scraped_retweet(coin, tmpd) ## MERGING TWEET FOLLOWERS #tweetio.read_users_for_tweets(coin, tmpd) print("nr. of tweets before merge:") print(len(coin.tweets)) coin.tweets = coin.tweets.merge(udf, left_on='user_history_row_id', right_on='user_row_id', how='inner') print("nr. of tweets after merge:") print(len(coin.tweets)) sid = SentimentAnalyzer() #only with multicore CPU: #dfsents = sid.paralellanalyse(coin.tweets) #this with singlecore CPU: dfsents = sid.analysedf(coin.tweets) # print(dfsents.head()) print("coin.tweets ready.") setattr(coin, 'tweets', dfsents) # PREPARE 2 #df = tweetio.sort_and_clip(coin.tweets, coin.ico) #coin.tweets = df ## MULTIPLYING RETWEETS FOLLOWERS print("multiplying nr. of retweet followers by sentiments.") sentanalyzer = SentimentAnalyzer() sentanalyzer.merge_tweets_with_retweets(coin) sentanalyzer.sent_mul_tweet_followers(coin) sentanalyzer.sent_mul_retweet_followers(coin) print(len(coin.retweets)) #print(coin.retweets.head()) ## GROUPING RETWEETS BY HOUR print("grouping retweets by hour basis") sentanalyzer.group_retweet_by_hour(coin) #print(coin.grtdf.head()) print("grouping tweets by hour basis") sentanalyzer.group_tweet_by_hour(coin) #print(coin.gtdf.head()) print("RETWEET S") #print(coin.retweets) print(len(coin.retweets)) print("TWEETS") #print(coin.tweets) print(len(coin.tweets)) print("USERS") #print(udf) print(len(udf)) ## Setting in prices: bapi = BinanceApi() coin_price = bapi.collect_coindata(coin, specific_hour) setattr(coin, 'pricehourly', coin_price) coin.save_to_storeage(PHASE, tmpdir='runtime/') return coin
def test_date_time_binance1(self): bapi = BinanceApi() print(bapi.get_nr_of_hour_distance_from_server())