Example #1
0
import datetime
import sys, traceback
sys.path.append("..")  # Remove in production - KTZ
import ujson
import json
import pandas as pd
import websocket
import logging
import os
from CommonServices.EmailService import EmailSender
from CommonServices.LogCreater import CreateLogger
from CommonServices import ImportExtensions

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",logFileName="-TradesLiveLog.log",loggerName="TradesLiveLog")


try:
    import thread
except ImportError:
    import _thread as thread
import time
from MongoDB.PerMinDataOperations import PerMinDataOperations
import asyncio


def on_message(ws, message):
    start = time.time()
    responses = ujson.loads(message)
    #dataQ = [response for response in responses if response['ev'] == 'Q']
    dataAM = [response for response in responses if response['ev'] == 'AM']
Example #2
0
import sys, traceback
sys.path.append('..')
from datetime import datetime, timedelta
import CommonServices.ImportExtensions
from CommonServices.EmailService import EmailSender
import getpass
from MongoDB.MongoDBConnections import MongoDBConnectors
from CommonServices.LogCreater import CreateLogger
logObj = CreateLogger()
logger = logObj.createLogFile(dirName='ETFLiveAnalysis/',
                              logFileName='-DeleteScriptLog.log',
                              loggerName='DeleteScriptLogger')
sys_username = getpass.getuser()
if sys_username == 'ubuntu':
    readWriteConnection = MongoDBConnectors(
    ).get_pymongo_readWrite_production_production()
else:
    readWriteConnection = MongoDBConnectors().get_pymongo_devlocal_devlocal()
db = readWriteConnection.ETF_db


def delete_old_live_data_from_collection(collectionName):
    try:
        # LAST TIMESTAMP PRESENT IN THE COLLECTION
        if collectionName == db.TradePerMinWS:
            last = collectionName.find({}, {
                'e': 1,
                '_id': 0
            }).sort([('e', -1)]).limit(1)
            last_ts = list(last)[0]['e']
        elif collectionName == db.ArbitragePerMin:
import time
import pandas as pd

# Custom Imports
from CommonServices.EmailService import EmailSender
from CommonServices.LogCreater import CreateLogger
from CommonServices.MultiProcessingTasks import CPUBonundThreading
from CommonServices import ImportExtensions
from MongoDB.PerMinDataOperations import PerMinDataOperations
from MongoDB.Schemas import trade_per_min_WS
from ETFLiveAnalysisProdWS.Helper.CalculationHelper import LiveHelper, tradestruct
from functools import partial

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",
                              logFileName="-CalculateArbPerMinLog.log",
                              loggerName="CalculateArbPerMinLog")


class ArbPerMin():
    def __init__(self, etflist, etfdict, tickerlist):
        self.etflist = etflist  # Only used once per day
        self.etfdict = etfdict  # Only used once per day
        self.tickerlist = tickerlist
        self.trade_dict = {
        }  # Maintains only 1 copy throughout the day and stores {Ticker : trade objects}
        self.TradesDataDfPreviousMin = None
        self.TradesDataDfCurrentMin = None
        self.helperobj = LiveHelper()

    def calculation_for_each_etf(self, tradedf, etf):
Example #4
0
import sys, traceback

sys.path.append('../..')
from datetime import datetime, timedelta
import CommonServices.ImportExtensions
from CommonServices.EmailService import EmailSender
import getpass
from MongoDB.MongoDBConnections import MongoDBConnectors
from CommonServices.LogCreater import CreateLogger

logObj = CreateLogger()
logger = logObj.createLogFile(dirName='HistoricalArbitrage/',
                              logFileName='-HistoricDeleteScriptLog.log',
                              loggerName='HistoricDeleteScriptLogger')
sys_username = getpass.getuser()
if sys_username == 'ubuntu':
    readWriteConnection = MongoDBConnectors(
    ).get_pymongo_readWrite_production_production()
else:
    readWriteConnection = MongoDBConnectors().get_pymongo_devlocal_devlocal()
db = readWriteConnection.ETF_db


def delete_old_live_data_from_collection(collectionName):
    try:
        # LAST TIMESTAMP PRESENT IN THE COLLECTION
        last = collectionName.find({}, {
            'dateForData': 1,
            '_id': 0
        }).sort([('dateForData', -1)]).limit(1)
        last_date = list(last)[0]['dateForData']
import pandas as pd
import schedule
import time

# Custom Imports
# from CommonServices.EmailService import EmailSender
from ETFLiveAnalysisProdWS.TickListsGenerator import ListsCreator
from ETFLiveAnalysisProdWS.CalculatePerMinArb import ArbPerMin
from CommonServices.LogCreater import CreateLogger
from CommonServices import ImportExtensions
from MongoDB.PerMinDataOperations import PerMinDataOperations
from MongoDB.SaveArbitrageCalcs import SaveCalculatedArbitrage

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",
                              logFileName="-PerMinCaller.log",
                              loggerName="PerMinCallerLogs")


class PerMinAnalysis():
    def __init__(self):
        self.perMinDataObj = PerMinDataOperations()
        self.spreadDF = pd.DataFrame()

    def get_ts_for_fetching_data(self):
        #######################################################
        # UTC Timestamps for pulling data from QuotesLiveData DB, below:
        #######################################################
        end_dt = datetime.datetime.now().replace(second=0, microsecond=0)
        end_dt_ts = int(end_dt.timestamp() * 1000)
        start_dt = end_dt - datetime.timedelta(minutes=1)
Example #6
0
sys.path.append("../")

import time
import traceback
from functools import partial
from itertools import chain
from CommonServices.MultiProcessingTasks import multi_processing_method, CPUBonundThreading
from PerSecLive.Helpers import *
from MongoDB.MongoDBConnections import MongoDBConnectors
from pymongo import ASCENDING, DESCENDING
from CommonServices.LogCreater import CreateLogger

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="PerSecLive/",
                              logFileName="-PerSecLiveCalcLog.log",
                              loggerName="PerSecLiveCalcLog")

sys_private_ip = socket.gethostbyname(socket.gethostname())
if sys_private_ip == '172.31.76.32':
    connection = MongoDBConnectors(
    ).get_pymongo_readWrite_production_production()
else:
    connection = MongoDBConnectors().get_pymongo_devlocal_devlocal()
collection = connection.ETF_db.PerSecLiveArbitrage
collection.create_index([("ETFName", ASCENDING), ("End_Time", DESCENDING)])


def main_runner(etf_list, _date, ticker_list=None):
    ####################################################################################################################
    # CONSTANT TIME BLOCK
import sys
import json
import datetime
import traceback

sys.path.append("..")
from CommonServices.MakeCSV import CSV_Maker
import pandas as pd
from CalculateETFArbitrage.Helpers.LoadEtfHoldings import LoadHoldingsdata
from CommonServices.MultiProcessingTasks import CPUBonundThreading
from MongoDB.Schemas import etfholdings_collection
from CommonServices.LogCreater import CreateLogger

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",
                              logFileName="-TickListGenerator.log",
                              loggerName="TickListGenerator")


class ListsCreator():
    def convertDataToDict(self, df, etfname):
        df.drop(columns=['TickerName'], inplace=True)
        df.rename(columns={
            'TickerSymbol': 'symbol',
            'TickerWeight': 'weight'
        },
                  inplace=True)
        res = {}
        res['HoldingsList'] = df['symbol'].to_list()
        res['ETFHoldingsData'] = {etfname: [df.to_dict()]}
        return res
Example #8
0
import sys

sys.path.append('..')

from functools import partial
from CalculateETFArbitrage.Helpers.LoadEtfHoldings import LoadHoldingsdata
from PolygonTickData.PolygonCreateURLS import PolgonDataCreateURLS
from MongoDB.MongoDBConnections import MongoDBConnectors
from CalculateETFArbitrage.TradesQuotesRunner import TradesQuotesProcesses
from pymongo import ASCENDING, DESCENDING
from CommonServices.LogCreater import CreateLogger
from PerSecLive.Helpers import get_timestamp_ranges_1sec, get_local_time_for_date

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="PerSecLive/",
                              logFileName="-PerSecLiveDataFetchLog.log",
                              loggerName="PerSecLiveDataFetchLog")


class FetchAndSaveHistoricalPerSecData():
    def __init__(self, etf_name=None, date_=None):
        sys_private_ip = socket.gethostbyname(socket.gethostname())
        if sys_private_ip == '172.31.76.32':
            self.connection = MongoDBConnectors(
            ).get_pymongo_readWrite_production_production()
        else:
            self.connection = MongoDBConnectors(
            ).get_pymongo_devlocal_devlocal()
        self.per_sec_live_trades = self.connection.ETF_db.PerSecLiveTrades
        self.per_sec_live_trades.create_index([("Symbol", ASCENDING),
                                               ("t", DESCENDING)])