コード例 #1
0
import traceback
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC

from CommonServices.EmailService import EmailSender
from CommonServices.RetryDecor import retry
from CommonServices.LogCreater import CreateLogger
from CommonServices.WebdriverServices import masterclass

logger = CreateLogger().createLogFile(dirName='HoldingsScraperLogs/',
                                      logFileName="-HoldingsDataLogs.log",
                                      loggerName='TickerListLogger')


class Download523TickersList():
    def __init__(self):
        self.driver = None

    @retry(Exception, total_tries=2, initial_wait=2, backoff_factor=2)
    def webdriver_login_etfdb(self):
        driverclass = masterclass()
        driverclass.initialisewebdriver()
        driverclass.logintoetfdb()
        self.driver = driverclass.driver

    @retry(Exception, total_tries=2, initial_wait=2, backoff_factor=2)
    def open_webpage_for_list(self, url):
        try:
            self.driver.get(url)
コード例 #2
0
import datetime
import sys, traceback
sys.path.append("..")  # Remove in production - KTZ
import ujson
import json
import pandas as pd
import websocket
import logging
import os
from CommonServices.EmailService import EmailSender
from CommonServices.LogCreater import CreateLogger
from CommonServices import ImportExtensions

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",logFileName="-TradesLiveLog.log",loggerName="TradesLiveLog")


try:
    import thread
except ImportError:
    import _thread as thread
import time
from MongoDB.PerMinDataOperations import PerMinDataOperations
import asyncio


def on_message(ws, message):
    start = time.time()
    responses = ujson.loads(message)
    #dataQ = [response for response in responses if response['ev'] == 'Q']
    dataAM = [response for response in responses if response['ev'] == 'AM']
コード例 #3
0
import pathlib
import sys  # Remove in production - KTZ
import traceback
sys.path.append("..")  # Remove in production - KTZ
from CommonServices.Holidays import HolidayCheck
from CommonServices.EmailService import EmailSender
from CommonServices.MakeCSV import CSV_Maker
import pandas as pd
from datetime import datetime
from datetime import timedelta
from CalculateETFArbitrage.CalculateHistoricalArbitrage import ArbitrageCalculation
from MongoDB.SaveArbitrageCalcs import SaveCalculatedArbitrage
from MongoDB.FetchArbitrage import FetchArbitrage
from CommonServices.LogCreater import CreateLogger

logger = CreateLogger().createLogFile(dirName="HistoricalArbitrage/", logFileName="-ArbEventLog.log", loggerName="ArbEventLogger",
                                      filemode='a')
logger2 = CreateLogger().createLogFile(dirName="HistoricalArbitrage/", logFileName="-ArbErrorLog.log", loggerName="ArbErrorLogger",
                                       filemode='a')


class HistoricalArbitrageRunnerClass():
    def __init__(self):
        self.etflist = []
        self.etfwhichfailed = []
        self.date = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d")
        self.rootpath = pathlib.Path(os.getcwd())
        while str(self.rootpath).split('/')[-1] != 'ETFAnalyzer':
            self.rootpath = self.rootpath.parent

        self.csv_file_path = os.path.abspath(os.path.join(self.rootpath, 'CSVFiles/250M_WorkingETFs.csv'))
コード例 #4
0
import sys, traceback
sys.path.append('..')
from datetime import datetime, timedelta
import CommonServices.ImportExtensions
from CommonServices.EmailService import EmailSender
import getpass
from MongoDB.MongoDBConnections import MongoDBConnectors
from CommonServices.LogCreater import CreateLogger
logObj = CreateLogger()
logger = logObj.createLogFile(dirName='ETFLiveAnalysis/',
                              logFileName='-DeleteScriptLog.log',
                              loggerName='DeleteScriptLogger')
sys_username = getpass.getuser()
if sys_username == 'ubuntu':
    readWriteConnection = MongoDBConnectors(
    ).get_pymongo_readWrite_production_production()
else:
    readWriteConnection = MongoDBConnectors().get_pymongo_devlocal_devlocal()
db = readWriteConnection.ETF_db


def delete_old_live_data_from_collection(collectionName):
    try:
        # LAST TIMESTAMP PRESENT IN THE COLLECTION
        if collectionName == db.TradePerMinWS:
            last = collectionName.find({}, {
                'e': 1,
                '_id': 0
            }).sort([('e', -1)]).limit(1)
            last_ts = list(last)[0]['e']
        elif collectionName == db.ArbitragePerMin:
コード例 #5
0
import traceback
import datetime
import time
import pandas as pd

# Custom Imports
from CommonServices.EmailService import EmailSender
from CommonServices.LogCreater import CreateLogger
from CommonServices.MultiProcessingTasks import CPUBonundThreading
from CommonServices import ImportExtensions
from MongoDB.PerMinDataOperations import PerMinDataOperations
from MongoDB.Schemas import trade_per_min_WS
from ETFLiveAnalysisProdWS.Helper.CalculationHelper import LiveHelper, tradestruct
from functools import partial

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",
                              logFileName="-CalculateArbPerMinLog.log",
                              loggerName="CalculateArbPerMinLog")


class ArbPerMin():
    def __init__(self, etflist, etfdict, tickerlist):
        self.etflist = etflist  # Only used once per day
        self.etfdict = etfdict  # Only used once per day
        self.tickerlist = tickerlist
        self.trade_dict = {
        }  # Maintains only 1 copy throughout the day and stores {Ticker : trade objects}
        self.TradesDataDfPreviousMin = None
        self.TradesDataDfCurrentMin = None
        self.helperobj = LiveHelper()
コード例 #6
0
import os
import pathlib
import getpass
import traceback
import pandas as pd
from datetime import datetime
from mongoengine.errors import NotUniqueError

from CommonServices.LogCreater import CreateLogger
from CommonServices.EmailService import EmailSender
from MongoDB.Schemas import etfholdings_collection

logger = CreateLogger().createLogFile(dirName='HoldingsScraperLogs/', logFileName="-HoldingsDataLogs.log",
                                      loggerName='DataCleanFeed')


class PullandCleanData:

    def __init__(self):
        self.rootpath = pathlib.Path(os.getcwd())
        while str(self.rootpath).split('/')[-1] != 'ETFAnalyzer':
            self.rootpath = self.rootpath.parent
        self.savingpath = os.path.abspath(os.path.join(self.rootpath, 'ETFDailyData' + '/' + datetime.now().strftime("%Y%m%d")))
        self.system_username = getpass.getuser()
        self.coll = etfholdings_collection

    def readfilesandclean(self, etfname, etfdescdf):
        try:
            # Trying Dict Comprehension for file checking and loading to check etf file in directory
            x = {f.split('-')[0]: f for f in os.listdir(self.savingpath)}
            if etfname in x.keys():
コード例 #7
0
import sys, traceback

sys.path.append('../..')
from datetime import datetime, timedelta
import CommonServices.ImportExtensions
from CommonServices.EmailService import EmailSender
import getpass
from MongoDB.MongoDBConnections import MongoDBConnectors
from CommonServices.LogCreater import CreateLogger

logObj = CreateLogger()
logger = logObj.createLogFile(dirName='HistoricalArbitrage/',
                              logFileName='-HistoricDeleteScriptLog.log',
                              loggerName='HistoricDeleteScriptLogger')
sys_username = getpass.getuser()
if sys_username == 'ubuntu':
    readWriteConnection = MongoDBConnectors(
    ).get_pymongo_readWrite_production_production()
else:
    readWriteConnection = MongoDBConnectors().get_pymongo_devlocal_devlocal()
db = readWriteConnection.ETF_db


def delete_old_live_data_from_collection(collectionName):
    try:
        # LAST TIMESTAMP PRESENT IN THE COLLECTION
        last = collectionName.find({}, {
            'dateForData': 1,
            '_id': 0
        }).sort([('dateForData', -1)]).limit(1)
        last_date = list(last)[0]['dateForData']
コード例 #8
0
import sys, traceback
sys.path.append("..")
import datetime
import pandas as pd
from CommonServices.MultiProcessingTasks import CPUBonundThreading
from CommonServices.ThreadingRequests import IOBoundThreading
from CommonServices.LogCreater import CreateLogger
logger = CreateLogger().createLogFile(dirName='ETFLiveAnalysis/',
                                      logFileName='-QuotesLiveFetchLog.log',
                                      loggerName='QuotesLiveFetch')

from PolygonTickData.PolygonCreateURLS import PolgonDataCreateURLS
from MongoDB.PerMinDataOperations import PerMinDataOperations


class QuotesLiveFetcher():
    def __init__(self):
        # self.etflist = pd.read_csv('../CSVFiles/250M_WorkingETFs.csv').columns.to_list()
        self.etflist = [
            'SPY', 'VOO', 'QQQ', 'IVV', 'IJR', 'VO', 'VGT', 'XLK', 'XLF',
            'SCHX'
        ]
        self.getUrls = [
            PolgonDataCreateURLS().PolygonLastQuotes(etf)
            for etf in self.etflist
        ]

    def getDataFromPolygon(self, methodToBeCalled=None, getUrls=None):
        # Calling IO Bound Threading to fetch data for URLS
        if methodToBeCalled == None or getUrls == None:
            logger.debug('Either methodToBeCalled or getUrls not supplied')
コード例 #9
0
from selenium.common.exceptions import NoSuchElementException
from urllib3.exceptions import NewConnectionError, MaxRetryError
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait

from CommonServices.LogCreater import CreateLogger
from CommonServices.EmailService import EmailSender
from CommonServices.RetryDecor import retry
from CommonServices.WebdriverServices import masterclass
from MongoDB.MongoDBConnections import MongoDBConnectors

logger = CreateLogger().createLogFile(dirName='HoldingsScraperLogs/',
                                      logFileName="-HoldingsDataLogs.log",
                                      loggerName='DownloadHoldingsLogger')


class PullHoldingsListClass(object):
    def __init__(self):
        self.system_username = getpass.getuser()
        if self.system_username == 'ubuntu':
            ''' Production to Production readWrite '''
            self.conn = MongoDBConnectors(
            ).get_pymongo_readWrite_production_production()
        else:
            ''' Dev Local to Production Read Only '''
            # self.conn = MongoDBConnectors().get_pymongo_readonly_devlocal_production()
            ''' Dev Local to Dev Local readWrite '''
            self.conn = MongoDBConnectors().get_pymongo_devlocal_devlocal()
コード例 #10
0
import socket
import sys

sys.path.append("../")

import time
import traceback
from functools import partial
from itertools import chain
from CommonServices.MultiProcessingTasks import multi_processing_method, CPUBonundThreading
from PerSecLive.Helpers import *
from MongoDB.MongoDBConnections import MongoDBConnectors
from pymongo import ASCENDING, DESCENDING
from CommonServices.LogCreater import CreateLogger

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="PerSecLive/",
                              logFileName="-PerSecLiveCalcLog.log",
                              loggerName="PerSecLiveCalcLog")

sys_private_ip = socket.gethostbyname(socket.gethostname())
if sys_private_ip == '172.31.76.32':
    connection = MongoDBConnectors(
    ).get_pymongo_readWrite_production_production()
else:
    connection = MongoDBConnectors().get_pymongo_devlocal_devlocal()
collection = connection.ETF_db.PerSecLiveArbitrage
collection.create_index([("ETFName", ASCENDING), ("End_Time", DESCENDING)])


def main_runner(etf_list, _date, ticker_list=None):
コード例 #11
0
import datetime
import json
import pandas as pd
import schedule
import time

# Custom Imports
# from CommonServices.EmailService import EmailSender
from ETFLiveAnalysisProdWS.TickListsGenerator import ListsCreator
from ETFLiveAnalysisProdWS.CalculatePerMinArb import ArbPerMin
from CommonServices.LogCreater import CreateLogger
from CommonServices import ImportExtensions
from MongoDB.PerMinDataOperations import PerMinDataOperations
from MongoDB.SaveArbitrageCalcs import SaveCalculatedArbitrage

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",
                              logFileName="-PerMinCaller.log",
                              loggerName="PerMinCallerLogs")


class PerMinAnalysis():
    def __init__(self):
        self.perMinDataObj = PerMinDataOperations()
        self.spreadDF = pd.DataFrame()

    def get_ts_for_fetching_data(self):
        #######################################################
        # UTC Timestamps for pulling data from QuotesLiveData DB, below:
        #######################################################
        end_dt = datetime.datetime.now().replace(second=0, microsecond=0)
コード例 #12
0
import logging
from flask import has_request_context, request
from logging.handlers import RotatingFileHandler
from CommonServices.LogCreater import CreateLogger


class RequestFormatter(logging.Formatter):
    def format(self, record):
        if has_request_context():
            record.url = request.url
            record.remote_addr = request.remote_addr
        else:
            record.url = None
            record.remote_addr = None

        return super().format(record)


def return_formatter():
    return RequestFormatter(
        '[%(asctime)s] %(levelname)s %(remote_addr)s requested %(url)s in %(module)s: %(message)s'
    )


custom_server_logger = CreateLogger().createLogFile(
    dirName="Server/",
    logFileName="-ServerLog.log",
    loggerName="ServerLogger",
    filemode='a',
    user_format=return_formatter())
コード例 #13
0
import sys
import json
import datetime
import traceback

sys.path.append("..")
from CommonServices.MakeCSV import CSV_Maker
import pandas as pd
from CalculateETFArbitrage.Helpers.LoadEtfHoldings import LoadHoldingsdata
from CommonServices.MultiProcessingTasks import CPUBonundThreading
from MongoDB.Schemas import etfholdings_collection
from CommonServices.LogCreater import CreateLogger

logObj = CreateLogger()
logger = logObj.createLogFile(dirName="ETFLiveAnalysis/",
                              logFileName="-TickListGenerator.log",
                              loggerName="TickListGenerator")


class ListsCreator():
    def convertDataToDict(self, df, etfname):
        df.drop(columns=['TickerName'], inplace=True)
        df.rename(columns={
            'TickerSymbol': 'symbol',
            'TickerWeight': 'weight'
        },
                  inplace=True)
        res = {}
        res['HoldingsList'] = df['symbol'].to_list()
        res['ETFHoldingsData'] = {etfname: [df.to_dict()]}
        return res
コード例 #14
0
sys.path.append("..")  # Remove in production - KTZ
import json
import asyncio
from aiohttp import ClientSession
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry

from MongoDB.SaveFetchQuotesData import MongoTradesQuotesData
from PolygonTickData.Helper import Helper

from CommonServices.LogCreater import CreateLogger

logger = CreateLogger().createLogFile(dirName='HistoricalArbitrage/',
                                      logFileName='-ArbEventLog.log',
                                      loggerName='TradesQuotesEventLogger')


class FetchPolygonData(object):
    """Fetch and Store Methods for Trades/Quotes Data from Polygon.io"""
    def __init__(self,
                 date=None,
                 end_time='21:00:00',
                 end_time_loop='20:00:00',
                 polygon_method=None,
                 symbol_status=None,
                 collection_name=None,
                 insert_into_collection=None):
        self.helperObj = Helper()
        self.date = date
コード例 #15
0
from time import perf_counter
from datetime import datetime
import pandas as pd
import os
# Use absolute import paths
from CommonServices.LogCreater import CreateLogger
from HoldingsDataScripts.Download523TickersList import Download523TickersList
from HoldingsDataScripts.DownloadHoldings import DownloadsEtfHoldingsData
from HoldingsDataScripts.DataCleanFeed import PullandCleanData
from CommonServices.EmailService import EmailSender
from CommonServices.DirectoryRemover import Directory_Remover
from CommonServices.MultiProcessingTasks import multi_processing_method

t1_start = perf_counter()
logger = CreateLogger().createLogFile(dirName='HoldingsScraperLogs/',
                                      logFileName="-HoldingsDataLogs.log",
                                      loggerName='HoldingsLogger')


class holdingsProcess():
    def __init__(self):
        self.rootpath = pathlib.Path(os.getcwd())
        while str(self.rootpath).split('/')[-1] != 'ETFAnalyzer':
            self.rootpath = self.rootpath.parent

        self.csv_file_path = os.path.abspath(
            os.path.join(self.rootpath, 'CSVFiles/250M_WorkingETFs.csv'))
        self.ticker_description_path = os.path.abspath(
            os.path.join(
                self.rootpath, 'ETFDailyData/ETFTickersDescription/' +
                datetime.now().strftime("%Y%m%d") +
コード例 #16
0
import sys, traceback
sys.path.append('../..')
from CommonServices.ImportExtensions import *
import pandas as pd
import getpass, datetime
from pymongo import *
from MongoDB.MongoDBConnections import MongoDBConnectors
from FlaskAPI.Components.ETFArbitrage.ETFArbitrageMain import AnalyzeArbitrageDataForETF
from CommonServices.LogCreater import CreateLogger

logger = CreateLogger().createLogFile(dirName='HistoricalArbitrage/',
                                      logFileName='-PNLLog.log',
                                      loggerName='PNLLogger')


class CalculateAndSavePnLData():
    def __init__(self):
        self.sysUserName = getpass.getuser()
        if self.sysUserName == 'ubuntu':
            self.connforthis = MongoDBConnectors(
            ).get_pymongo_readWrite_production_production()
        else:
            self.connforthis = MongoDBConnectors(
            ).get_pymongo_readonly_devlocal_production()
        self.connforthis = MongoDBConnectors().get_pymongo_devlocal_devlocal()

        self.arbitragecollection = self.connforthis.ETF_db.ArbitrageCollectionNew

    ########################################################################################
    # Use to populate DB for the first time, for all dates which are not present
    ########################################################################################