예제 #1
0
 def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except Exception as e:
         dlog.ex(e, "Unknown error in the server")
         danalytics.reportException(e)
         return RetHelper.buildException(e)
예제 #2
0
def download(doamin="IN",
             interval: TCandleType = TCandleType.DAY_1,
             period=50) -> typing.Tuple[bool, DataFrame]:
    key = "download_progress_" + interval.value
    if (dredis.get(key) == "1"):
        danalytics.reportAction(
            "ignore_duplicate_fetch_download_already_progress")
        return (False, None)
    data = None
    dredis.set(key, "1")
    try:
        # stack()
        ticker = [x for x in getSymbolList(doamin).keys()]
        data = yf.download(tickers=ticker,
                           period=covert_to_period_from_duration(
                               interval, period),
                           interval=interval.value,
                           group_by='ticker',
                           auto_adjust=False,
                           prepost=False,
                           threads=True,
                           proxy=None,
                           rounding=True)
    except Exception as e:
        dlog.ex(e)
        return (False, None)
    finally:
        dredis.set(key, "0")
    # Sometime it ret duplicate results for the last row so drop it,
    data = data[~data.index.duplicated(keep='last')]
    return (True, data)
예제 #3
0
        def wrapper(*args, **kwargs):
            print(args)
            print(kwargs)
            func_name = func.__name__

            cache_key_loading = "{}_loading".format(cache_key)
            cache_key_ts = "{}_ts".format(cache_key)
            # Check if cache exist.
            if kwargs.get('ignore_cache') != True:
                cache = dredis.getPickle(cache_key, None)
                if cache:
                    return cache
            # Check global lock
            if dredis.get(cache_key_loading) == "1":
                raise Exception(
                    "{} is locked by smart cache".format(func_name))
            # Lock
            dredis.set(cache_key_loading, "1")
            # We need to use try catch to avoid unlock
            res = None
            try:
                # Execute
                res = func(*args, **kwargs)
                dredis.setPickle(cache_key, res)
                dredis.set(cache_key_ts, time.time())
            except Exception as e:
                dlog.ex(e, "exception happened while executing:{}".format(func_name))
                pass
            # Unlock
            dredis.set(cache_key_loading, "0")
            return res
예제 #4
0
def getLastNIndicatorInJson(domain, df: DataFrame, limit=15):
    final_result = {}
    try:
        df = df.tail(limit)
        result = df.to_json(orient="records")
        parsed = json.loads(result)
        for offset in range(limit):
            row = parsed[offset]
            # Key => 0, -1,-2....
            offset_key = (limit - offset - 1) * -1
            for x in row.keys():
                pair = literal_eval(x)
                symbol = pair[0]
                indicator = pair[1]
                value = row[x]
                if symbol not in final_result:
                    final_result[symbol] = {}
                if offset_key not in final_result[symbol]:
                    final_result[symbol][offset_key] = {}
                final_result[symbol][offset_key][indicator] = value
        # print(json.dumps(final_result, indent=4))
    except Exception as e:
        dlog.ex(e)
        danalytics.reportException(e)
    # More some info.
    # for x in final_result.keys():
    #    final_result[x]['sector'] = getSymbolList(domain=domain)[x]['sector']
    return final_result
예제 #5
0
def performScreen(domain: str, condition: str, columns=[], sort_by: str = None, limit: int = None):
    dlog.d('[INFO] Running screen for condition: {}, columns: {}'.format(
        condition, columns))
    indicatorHistory = getIndicatorHistory(domain)
    result = []
    sl = 0
    columns = [resolveIndicatorExpression(c) for c in columns]
    condition = resolveCondition(condition)
    last_error = ''
    try:
        for symbol in indicatorHistory.keys():
            indicator_map = indicatorHistory[symbol]
            try:
                if(eval(condition)):
                    sl += 1
                    selected_one = {}
                    selected_one['symbol'] = symbol
                    selected_one['name'] = getSymbolList(domain)[
                        symbol]['name']
                    selected_one['sector'] = getSymbolList(domain)[
                        symbol]['sector']
                    selected_one['close'] = str(
                        np.round(indicator_map['1d'][0]['close'], 2))
                    selected_one['volume'] = str(
                        np.round(indicator_map['1d'][0]['volume'], 2))
                    selected_one['rsi_14'] = str(
                        np.round(indicator_map['1d'][0]['rsi_14'], 2))
                    selected_one['change'] = str(
                        np.round(indicator_map['1d'][0]['close_change_percentage'], 2))
                    # extra col
                    for col in columns:
                        selected_one[col[0]] = str(
                            np.round(eval(col[1]), 2))
                    # add used defined data
                    result.append(fixDict(selected_one))
            except Exception as e:
                last_error = 'Are you passing right Filter {}'.format(
                    condition)
                dlog.ex(e, showStack=False)
                dlog.e(
                    "We faced the issue when we are running filter for symbol:{}".format(symbol))

                # We just ignore this

    except Exception as e:
        raise e
    # Filter None

    # Do sort
    if sort_by:
        sort_key = sort_by.replace("-", "")
        reverse = sort_by[0] != "-"
        result = [x for x in result if x.get(sort_key) is not None]
        result.sort(key=lambda x: float(x.get(sort_key)), reverse=reverse)

    # Do limit
    if limit:
        result = result[:limit]
    return {'result': result, 'last_error': last_error}
예제 #6
0
def loadDataForCandle(candle_type: TCandleType):
    try:
        _candleTypeToDataFrameMap[
            candle_type.value] = dstorage.load_data_to_disk(
                dstorage.get_default_path_for_candle((candle_type)))
        timetracker.mark_last_data_update_ts(candle_type)
        dlog.d("updating data...")
    except Exception as e:
        dlog.ex(e, "not able to load data from storage.")
예제 #7
0
def buildTechnicalIndicators(df: DataFrame, domain: str):
    # If somevalue is nan and all calculation just dont work
    df.fillna(method='ffill', inplace=True)
    for ticker in getSymbolList(domain).keys():
        try:
            computeIndicator(df, ticker, domain)
        except Exception as e:
            dlog.ex(e, "Not able to process data frame for {}".format(ticker))
    dlog.d("Indicator compution done proper way")
    return df
예제 #8
0
def taskBuildIndicator(domain: str, candle_type: str):
    try:
        pingCelery()
        _candle_type = TCandleType(candle_type)
        dglobaldata.downloadAndBuildIndicator(domain, _candle_type)
        # Compute Summary
        if _candle_type == TCandleType.DAY_1:
            pass
            # dhighlights.taskComputeSummary()
        buildTaskSuccess("taskBuildIndicator Done", None)
    except Exception as e:
        dlog.d("Got exception in taskBuildIndicator")
        dlog.ex(e)
예제 #9
0
 def wrapper(*args, **kwargs):
     func_name = func.__name__
     try:
         danalytics.reportAction("worker_task_start_{}".format(func_name))
         ret = func(*args, **kwargs)
         danalytics.reportAction("worker_task_success_{}".format(func_name))
         return ret
     except Exception as e:
         dlog.ex(e, "Unknown error in the server")
         danalytics.reportAction(
             "worker_task_exception_{}".format(func_name))
         danalytics.reportException(e)
         return RetHelper.buildException(e)
예제 #10
0
def reportAction(tag: str, extra: dict = {}):
    dlog.d("logging remore action with tag:" + tag)
    if isDebug():
        dlog.d("ignore remote log in debug mode")
        return
    try:
        simplestore_post(
            url="{}/api/analytics/action".format(SIMPLESTORE_ENDPOINT),
            data={
                "app_id": APP_ID,
                "session": session,
                "tag": tag,
                "extra": extra
            })
    except Exception as ex:
        dlog.ex(ex)
예제 #11
0
def reportException(ex: Exception, location=""):
    if isDebug():
        dlog.d("ignore remote log in debug mode")
        return
    try:
        res = simplestore_post(
            url="{}/api/analytics/exception".format(SIMPLESTORE_ENDPOINT),
            data={
                "app_id": APP_ID,
                "session": session,
                "location": traceback.format_exc().split("\n")[-4].strip(),
                "type": "exception",
                "stack": traceback.format_exc(),
                "args": str(ex.args)
            })
        dlog.d(str(res))
    except Exception as ex:
        dlog.ex(ex)
예제 #12
0
 def wrapper(*args, **kwargs):
     print(args)
     print(kwargs)
     func_name = func.__name__
     cache_key_loading = "{}_loading".format(cache_key)
     if dredis.get(cache_key_loading) == "1":
         raise Exception(
             "{} is already in progress".format(func_name))
     # Lock
     dredis.set(cache_key_loading, "1")
     # We need to use try catch to avoid unlock
     res = None
     try:
         # Execute
         res = func(*args, **kwargs)
     except Exception as e:
         dlog.ex(e, "exception happened while executing:{}".format(func_name))
         pass
     # Unlock
     dredis.set(cache_key_loading, "0")
     return res
예제 #13
0
def getLatestDataInJson(domain, df: DataFrame):
    final_result = {}
    try:
        df = df.tail(1)
        result = df.to_json(orient="records")
        parsed = json.loads(result)
        for x in parsed[0].keys():
            pair = literal_eval(x)
            symbol = pair[0]
            indicator = pair[1]
            value = parsed[0][x]
            if symbol not in final_result:
                final_result[symbol] = {}
            final_result[symbol][indicator] = value
        # print(json.dumps(final_result, indent=4))
    except Exception as e:
        dlog.ex(e)
        danalytics.reportException(e)
    # More some info.
    for x in final_result.keys():
        final_result[x]['sector'] = getSymbolList(domain=domain)[x]['sector']
    return final_result
예제 #14
0
def init():
    if isDebug():
        dlog.d("ignore remote log in debug mode")
        return
    global session
    if session != '':
        dlog.d("Already session exist")
        return
    try:
        data: dict = simplestore_post(
            url="{}/api/analytics/launch".format(SIMPLESTORE_ENDPOINT),
            data={
                "app_id": APP_ID,
                "app_version": "1.0",
                "device_os": "web",
                "device_id": "null",
                "device_api": "null"
            })
        session = data.get('out')[0].get('session')
        dlog.d("Remote log is inited with session:{}".format(session))
    except Exception as ex:
        dlog.ex(e=ex)
        pass
예제 #15
0
def computeIndicator(df: DataFrame, ticker, domain: str):
    # Make lower case << Validated
    df[ticker, 'name'] = getSymbolList(domain)[ticker]['name']
    df[ticker, 'symbol'] = getSymbolList(domain)[ticker]['symbol']

    # df[ticker, 'sector'] = getSymbolList()[ticker]['sector']
    df[ticker, 'open'] = np.round(df[ticker, 'Open'], 2)
    df[ticker, 'date'] = df[ticker].index
    df[ticker, 'date_iso'] = df[ticker, "date"].apply(
        lambda x: x.strftime('%Y-%m-%dT%H:%M:%SZ'))
    df[ticker, 'close'] = np.round(df[ticker, 'Close'], 2)
    df[ticker, 'high'] = np.round(df[ticker, 'High'], 2)
    df[ticker, 'low'] = np.round(df[ticker, 'Low'], 2)
    df[ticker, 'volume'] = np.round(df[ticker, 'Volume'], 2)

    # define changes
    df[ticker, 'close_change_percentage'] = fixRound(
        (df[ticker, 'close'] - df[ticker, 'close'].shift(1)) /
        df[ticker, 'close'].shift(1) * 100)
    df[ticker, 'volume_change_percentage'] = fixRound(
        (df[ticker, 'volume'] - df[ticker, 'volume'].shift(1)) /
        df[ticker, 'volume'].shift(1) * 100)

    # Volatility
    df[ticker, 'high_low_gap'] = df[ticker, 'high'] - df[ticker, 'low']
    df[ticker, 'high_low_gap_percentage'] = np.round(
        (df[ticker, 'high'] - df[ticker, 'low']) / df[ticker, 'close'] * 100,
        2)

    # write your own info here << Validated
    for range in all_range:
        df[ticker, "ema_{}".format(range)] = np.round(
            df[ticker, "close"].ewm(span=range, adjust=False).mean(), 2)
        df[ticker, "sma_{}".format(range)] = np.round(
            df[ticker, "close"].rolling(range).mean(), 2)
        try:
            df[ticker, "wma_{}".format(range)] = talib.WMA(df[ticker, "close"],
                                                           timeperiod=range)
        except Exception as e:
            dlog.ex(e, "not able compute wma", showStack=False)
            df[ticker, "wma_{}".format(range)] = 0
        # validated
        # macd and RSI
        # df[ticker, 'macd'] = talib.MACD(df[ticker, 'close'].as_matrix())
        # df[ticker, "macd_macd"], df[ticker, "macd_macdsignal"], df[ticker, "macd_macdhist"] = talib.MACD(
        #    df.close, fastperiod=12, slowperiod=26, signalperiod=9)
        # TAlib cause exception so just be in the safe
    try:
        df[ticker,
           'rsi_14'] = np.round(talib.RSI(df[ticker, 'close'].values, 14), 2)
        df[ticker, 'rsi_18'] = talib.RSI(df[ticker, 'close'].values, 18)

        # band
        df[ticker, 'bb_up_5'], df[ticker,
                                  'bb_mid_5'], df[ticker,
                                                  'bb_down_5'] = talib.BBANDS(
                                                      df[ticker, 'close'],
                                                      timeperiod=5)
        # df[ticker, 'bb_up_15'], df[ticker, 'bb_mid_15'], df[ticker, 'bb_down_15'] = talib.BBANDS(
        #    df[ticker, 'close'], timeperiod=15)
        # df[ticker, 'bb_up_60'], df[ticker, 'bb_mid_60'], df[ticker, 'bb_down_60'] = talib.BBANDS(
        #    df[ticker, 'close'], timeperiod=60)

        df[ticker, 'sar'] = talib.SAR(df[ticker, 'high'],
                                      df[ticker, 'low'],
                                      acceleration=0.02,
                                      maximum=0.2)

        df[ticker, 'atr_14'] = talib.ATR(df[ticker, 'high'],
                                         df[ticker, 'low'],
                                         df[ticker, 'close'],
                                         timeperiod=14)
        df[ticker, 'natr_14'] = talib.NATR(df[ticker, 'high'],
                                           df[ticker, 'low'],
                                           df[ticker, 'close'],
                                           timeperiod=14)
        df[ticker, 'tr_14'] = talib.TRANGE(df[ticker, 'high'],
                                           df[ticker, 'low'], df[ticker,
                                                                 'close'])
    except Exception as e:
        dlog.ex(e,
                "Not able to calculate RSI for Ticker".format(ticker),
                showStack=False)
        danalytics.reportAction("talib_exception_for_{}".format(ticker))
        df[ticker, 'rsi_14'] = -1
        df[ticker, 'rsi_18'] = -1
        df[ticker, 'bb_up_5'], df[ticker,
                                  'bb_mid_5'], df[ticker,
                                                  'bb_down_5'] = (-1, -1, -1)
        df[ticker, 'sar'] = -1
        df[ticker, 'atr_14'] = -1
        df[ticker, 'natr_14'] = -1
        df[ticker, 'tr_14'] = -1
예제 #16
0
def downloadAndBuildIndicator(domain, candle_type: TCandleType):
    # Optimization
    if not shouldBuildIndicator(domain, candle_type):
        dlog.d("Ignore rebuilding shouldBuildIndicator")
        return

    # Locking
    lockkey = "downloadAndBuildindicator_{}_{}".format(domain,
                                                       candle_type.value)
    if dredis.get(lockkey) == "1":
        dlog.d("downloadAndBuildIndicator locked for key {}".format(lockkey))
        raise Exception("downloadAndBuildIndicator is progress")
    dredis.set(lockkey, "1")

    try:
        dlog.d("downloadAndBuildIndicator start")

        dlog.d("downloadAndBuildIndicator download start")
        ret_value, download_data = ddownload.download(domain,
                                                      interval=candle_type)
        if ret_value is False:
            dlog.d("Download fails")
            return {
                "status": "error",
                "msg": "something goes wrong",
                "out": None
            }

        dlog.d("downloadAndBuildIndicator building start")
        processed_df = dindicator.buildTechnicalIndicators(
            download_data, domain)

        # DONOT STOARE AS FILEdlog.d("downloadAndBuildIndicator: saving to storage start")
        #path_to_store = dstorage.get_default_path_for_candle(candle_type)
        #dstorage.store_data_to_disk(processed_df, path_to_store)

        dlog.d("downloadAndBuildIndicator: building indicator history map")
        # Building Indicator map for O(1) looks up.
        # This will be a 4d map
        # map[REL][1d][-1][close]...
        last15SlotIndicator = getLastNIndicatorInJson(domain, processed_df)
        indicator_history_key = "indicator_history_{}".format(domain)
        olddata = dredis.getPickle(indicator_history_key)
        if not olddata:
            olddata = {}
        for key in last15SlotIndicator.keys():
            if key not in olddata:
                olddata[key] = {}
            olddata[key][candle_type.value] = last15SlotIndicator.get(key)
        dredis.setPickle(indicator_history_key, olddata)
        dlog.d(
            "downloadAndBuildIndicator: saved indicator history to {}".format(
                indicator_history_key))

        dlog.d("downloadAndBuildIndicator: saving to redis start")
        dredis.setPickle(
            "indicator_data_{}_{}".format(domain, candle_type.value), {
                'data': getLatestDataInJson(domain, processed_df),
                'timestamp': getCurTimeStr()
            })

        # update market data
        if candle_type == TCandleType.DAY_1:
            saveMarketDataFormDayDF(domain, download_data)

        # Set TimeStamp key
        dredis.set(
            "indicator_timestamp_{}_{}".format(domain, candle_type.value),
            getCurTimeStr())

        # unlock
        dredis.set(lockkey, "0")

        dlog.d("downloadAndBuildIndicator ends")
        return {
            "status": "success",
            "msg": "Completed snapshot pipeline",
            "out": None
        }
    except Exception as e:
        dredis.set(lockkey, "0")
        dlog.d("downloadAndBuildIndicator Exception happened")
        danalytics.reportException(e, "Exception in downloadAndBuildIndicator")
        dlog.ex(e)
        raise e
    finally:
        dredis.set(lockkey, "0")
        pass