def __init__(self, args_topic, args_kafkadetails, args_symbol, args_marketdate, arg_prevdate): agentObj.log = log.setup_custom_logger(args_topic) agentObj.topic = args_topic agentObj.kafka = args_kafkadetails agentObj.market_date = args_marketdate agentObj.symbol = args_symbol agentObj.prev_market_date = arg_prevdate if agentObj.backtest is False: self.exit_algo = datetime.datetime.strptime( agentObj.market_date + ' 09:45:00', '%Y%m%d %H:%M:%S') self.exit_algo_315pm = self.exit_algo.timestamp() else: self.exit_algo = datetime.datetime.strptime( agentObj.market_date + ' 15:15:00', '%Y%m%d %H:%M:%S') self.exit_algo_315pm = self.exit_algo.timestamp() self.tick_df = pd.DataFrame(None, columns=['Timestamp', 'Price']) self.ohlc_consumer = KafkaConsumer( bootstrap_servers=['localhost:9092'], auto_offset_reset='earliest', enable_auto_commit=True, value_deserializer=lambda x: loads(x.decode('utf-8')))
import sys, os sys.path.append(os.getcwd()[:os.getcwd().find("TickAlgoAgent") + len("TickAlgoAgent")]) from src.main.algo_agent_object import AlgoAgentObjects as abObj from src.loghandler import log import pandas as pd import traceback import math # Init Logging Facilities logger = log.setup_custom_logger('AlgoAgent') def load_indicators(): try: if len(abObj.fast_min_pd_DF) >= int(abObj.parser.get('ma', 'interval')): moving_average(int(abObj.parser.get('ma', 'interval'))) if len(abObj.fast_min_pd_DF) >= int(abObj.parser.get( 'ema', 'interval')): exponential_moving_average(int(abObj.parser.get('ema', 'interval'))) if len(abObj.fast_min_pd_DF) >= int( abObj.parser.get('macd', 'slow_interval')): macd(int(abObj.parser.get('macd', 'fast_interval')), int(abObj.parser.get('macd', 'slow_interval'))) if len(abObj.fast_min_pd_DF) >= int(abObj.parser.get( 'adx', 'interval')): average_directional_movement_index( int(abObj.parser.get('adx', 'interval')), int(abObj.parser.get('adx', 'interval_ADX')))
from src.main.algo_bot_objects import AlgoBotObjects as abObj from src.dataframe import slow_indicators as indi_obj from src.loghandler import log import traceback import time import pandas as pd logger = log.setup_custom_logger('root') class SlowDF(object): def __init__(self): pass @staticmethod def generate_slow_min_df(ticks): def get_ohlc(): try: data = pd.DataFrame(abObj.slow_min_ticks, columns=['time', 'price']) data['time'] = pd.to_datetime(data['time'], unit='s', utc=True) data = data.set_index('time') data = data.tz_convert(tz='Asia/Kolkata') ti = data.loc[:, ['price']] slow_min_bars = ti.price.resample(str(abObj.slow_min) + 'min').ohlc() for index, row in slow_min_bars.iterrows(): # print('*', row) abObj.slow_min_pd_DF = abObj.slow_min_pd_DF.append( row, sort=False) break
import sys, os import json sys.path.append(os.getcwd()[:os.getcwd().find("TickStream") + len("TickStream")]) from src.loghandler import log from src.main.tick_stream_object import TickStreamObjects as tsobj from src.broker.ib_service.ib_services import IBService log = log.setup_custom_logger("tickstream") SYM_MAP_PATH = tsobj.get_with_base_path("common", "ib_script_map", tsobj.get_value("common", "masters")) class streamer(object): ibobj = IBService() def get_ib_symbol_from_map(self, nse_symbol): try: map_json = json.loads(open(str(SYM_MAP_PATH)).read()) for sym in map_json: if sym['NSE_Symbol'] == nse_symbol: return sym['IB_Symbol'] return nse_symbol except Exception as ex: log.error("No mapping value found for NSE Symbol:%s" % nse_symbol) log.error(traceback.format_exc()) def start_streaming(self): try: self.ibobj.connect_ib(self.ibobj) for contract in tsobj.ib_subcribed_scripts: self.ibobj.subscribe_contract(contract, self.ibobj)
import sys import os import argparse import json import random import subprocess import traceback sys.path.append(os.getcwd()[:os.getcwd().find("HRHD_Worker") + len("HRHD_Worker/")]) from src.hrhd_worker.hrhd_object import HRHDObjects as hrhdObj from src.libs.firebase_utils import FireBaseUtils from src.loghandler import log logger = log.setup_custom_logger('hrhd') nse_dict_list = None class Hrhd: def __init__(self): logger.info("** Initiating....") @staticmethod def nifty_list(list_config_name): try: with open(hrhdObj.get_with_base_path("common", list_config_name), 'r') as json_file: nse_dict = json.load(json_file) return nse_dict except Exception as ex: logger.error(traceback.format_exc())