def download(doamin="IN", interval: TCandleType = TCandleType.DAY_1, period=50) -> typing.Tuple[bool, DataFrame]: key = "download_progress_" + interval.value if (dredis.get(key) == "1"): danalytics.reportAction( "ignore_duplicate_fetch_download_already_progress") return (False, None) data = None dredis.set(key, "1") try: # stack() ticker = [x for x in getSymbolList(doamin).keys()] data = yf.download(tickers=ticker, period=covert_to_period_from_duration( interval, period), interval=interval.value, group_by='ticker', auto_adjust=False, prepost=False, threads=True, proxy=None, rounding=True) except Exception as e: dlog.ex(e) return (False, None) finally: dredis.set(key, "0") # Sometime it ret duplicate results for the last row so drop it, data = data[~data.index.duplicated(keep='last')] return (True, data)
def taskBuildIndicatorAll(): "This will download - process - and save the file as pkl" pingCelery() danalytics.reportAction("taskBuildIndicatorAll_started") for x in SUPPORTED_CANDLE: dglobaldata.downloadAndBuildIndicator("IN", x) danalytics.reportAction("taskBuildIndicatorAll_ended") # Compute Summary # dhighlights.taskComputeSummary() # update dglobaldata.checkLoadLatestData() buildTaskSuccess("Complated all snap shot", None)
def wrapper(*args, **kwargs): t = time.time() func_name = func.__name__ if remote_logging: danalytics.reportAction(func_name + "_started") dlog.d("\n\n>>>>>>>>>>>>>>>> STARTING {} <<<<<<<<<<<<<".format( func.__name__)) res = func(*args, **kwargs) dlog.d("\n>>>>>>>>>>>>>>>> ENDING {}, Time taken: {} sec <<<<<<<<<<<<<\n\n".format( func.__name__, time.time() - t)) if remote_logging: danalytics.reportAction(func_name + "_ended") return res
def wrapper(*args, **kwargs): func_name = func.__name__ try: danalytics.reportAction("worker_task_start_{}".format(func_name)) ret = func(*args, **kwargs) danalytics.reportAction("worker_task_success_{}".format(func_name)) return ret except Exception as e: dlog.ex(e, "Unknown error in the server") danalytics.reportAction( "worker_task_exception_{}".format(func_name)) danalytics.reportException(e) return RetHelper.buildException(e)
import os from myapp.factories.application import create_application from myapp.factories.celery import configure_cache, configure_celery from myapp.core import dlog from myapp.core import danalytics # Start the process here # Application should be a global name as ut used by VS code. application = create_application() configure_celery(application) configure_cache(application) danalytics.init() danalytics.reportAction("boot_complate") def run(): flask_host = os.environ.get('FLASK_HOST', '127.0.0.1') application.run(host=flask_host, debug=True) if __name__ == '__main__': run()
def computeIndicator(df: DataFrame, ticker, domain: str): # Make lower case << Validated df[ticker, 'name'] = getSymbolList(domain)[ticker]['name'] df[ticker, 'symbol'] = getSymbolList(domain)[ticker]['symbol'] # df[ticker, 'sector'] = getSymbolList()[ticker]['sector'] df[ticker, 'open'] = np.round(df[ticker, 'Open'], 2) df[ticker, 'date'] = df[ticker].index df[ticker, 'date_iso'] = df[ticker, "date"].apply( lambda x: x.strftime('%Y-%m-%dT%H:%M:%SZ')) df[ticker, 'close'] = np.round(df[ticker, 'Close'], 2) df[ticker, 'high'] = np.round(df[ticker, 'High'], 2) df[ticker, 'low'] = np.round(df[ticker, 'Low'], 2) df[ticker, 'volume'] = np.round(df[ticker, 'Volume'], 2) # define changes df[ticker, 'close_change_percentage'] = fixRound( (df[ticker, 'close'] - df[ticker, 'close'].shift(1)) / df[ticker, 'close'].shift(1) * 100) df[ticker, 'volume_change_percentage'] = fixRound( (df[ticker, 'volume'] - df[ticker, 'volume'].shift(1)) / df[ticker, 'volume'].shift(1) * 100) # Volatility df[ticker, 'high_low_gap'] = df[ticker, 'high'] - df[ticker, 'low'] df[ticker, 'high_low_gap_percentage'] = np.round( (df[ticker, 'high'] - df[ticker, 'low']) / df[ticker, 'close'] * 100, 2) # write your own info here << Validated for range in all_range: df[ticker, "ema_{}".format(range)] = np.round( df[ticker, "close"].ewm(span=range, adjust=False).mean(), 2) df[ticker, "sma_{}".format(range)] = np.round( df[ticker, "close"].rolling(range).mean(), 2) try: df[ticker, "wma_{}".format(range)] = talib.WMA(df[ticker, "close"], timeperiod=range) except Exception as e: dlog.ex(e, "not able compute wma", showStack=False) df[ticker, "wma_{}".format(range)] = 0 # validated # macd and RSI # df[ticker, 'macd'] = talib.MACD(df[ticker, 'close'].as_matrix()) # df[ticker, "macd_macd"], df[ticker, "macd_macdsignal"], df[ticker, "macd_macdhist"] = talib.MACD( # df.close, fastperiod=12, slowperiod=26, signalperiod=9) # TAlib cause exception so just be in the safe try: df[ticker, 'rsi_14'] = np.round(talib.RSI(df[ticker, 'close'].values, 14), 2) df[ticker, 'rsi_18'] = talib.RSI(df[ticker, 'close'].values, 18) # band df[ticker, 'bb_up_5'], df[ticker, 'bb_mid_5'], df[ticker, 'bb_down_5'] = talib.BBANDS( df[ticker, 'close'], timeperiod=5) # df[ticker, 'bb_up_15'], df[ticker, 'bb_mid_15'], df[ticker, 'bb_down_15'] = talib.BBANDS( # df[ticker, 'close'], timeperiod=15) # df[ticker, 'bb_up_60'], df[ticker, 'bb_mid_60'], df[ticker, 'bb_down_60'] = talib.BBANDS( # df[ticker, 'close'], timeperiod=60) df[ticker, 'sar'] = talib.SAR(df[ticker, 'high'], df[ticker, 'low'], acceleration=0.02, maximum=0.2) df[ticker, 'atr_14'] = talib.ATR(df[ticker, 'high'], df[ticker, 'low'], df[ticker, 'close'], timeperiod=14) df[ticker, 'natr_14'] = talib.NATR(df[ticker, 'high'], df[ticker, 'low'], df[ticker, 'close'], timeperiod=14) df[ticker, 'tr_14'] = talib.TRANGE(df[ticker, 'high'], df[ticker, 'low'], df[ticker, 'close']) except Exception as e: dlog.ex(e, "Not able to calculate RSI for Ticker".format(ticker), showStack=False) danalytics.reportAction("talib_exception_for_{}".format(ticker)) df[ticker, 'rsi_14'] = -1 df[ticker, 'rsi_18'] = -1 df[ticker, 'bb_up_5'], df[ticker, 'bb_mid_5'], df[ticker, 'bb_down_5'] = (-1, -1, -1) df[ticker, 'sar'] = -1 df[ticker, 'atr_14'] = -1 df[ticker, 'natr_14'] = -1 df[ticker, 'tr_14'] = -1
from myapp.core import (danalytics, dglobaldata, dhighlights, dindicator, dlog, dplot, dredis, dstorage) from myapp.core.ddecorators import (decrTaskCommonAction, decrTLogFunction, make_exception_safe) from myapp.core.ddownload import download from myapp.core.dnetwork import pingCelery from myapp.core.dtypes import TCandleType from myapp.core.rootConfig import SUPPORTED_CANDLE from myapp.core.sync import SUPPORTED_CHART_DURATION, getSymbolList from myapp.core.timex import getCurTimeStr from myapp.extensions import celery # Log might needs to be inited for worker danalytics.init() danalytics.reportAction("task_boot_complete") def buildTaskSuccess(msg: str, out: Any): return {"status": "success", "msg": msg, "out": out} @celery.task(name="tasks.simple_task") @decrTLogFunction(remote_logging=True) @decrTaskCommonAction def simple_task(argument: str) -> str: sleep_for = random.randrange(5, 11) print("Going to sleep for {} seconds...".format(sleep_for)) time.sleep(sleep_for) hello = "Hello '{}' from task! We have slept for {} seconds".format( str(argument), sleep_for)