Exemplo n.º 1
0
 def wrapper(*args, **kwargs):
     print(args)
     print(kwargs)
     func_name = func.__name__
     cache_key_loading = "{}_loading".format(cache_key)
     if dredis.get(cache_key_loading) == "1":
         raise Exception(
             "{} is already in progress".format(func_name))
     # Lock
     dredis.set(cache_key_loading, "1")
     # We need to use try catch to avoid unlock
     res = None
     try:
         # Execute
         res = func(*args, **kwargs)
     except Exception as e:
         dlog.ex(e, "exception happened while executing:{}".format(func_name))
         pass
     # Unlock
     dredis.set(cache_key_loading, "0")
     return res
Exemplo n.º 2
0
        def wrapper(*args, **kwargs):
            print(args)
            print(kwargs)
            func_name = func.__name__

            cache_key_loading = "{}_loading".format(cache_key)
            cache_key_ts = "{}_ts".format(cache_key)
            # Check if cache exist.
            if kwargs.get('ignore_cache') != True:
                cache = dredis.getPickle(cache_key, None)
                if cache:
                    return cache
            # Check global lock
            if dredis.get(cache_key_loading) == "1":
                raise Exception(
                    "{} is locked by smart cache".format(func_name))
            # Lock
            dredis.set(cache_key_loading, "1")
            # We need to use try catch to avoid unlock
            res = None
            try:
                # Execute
                res = func(*args, **kwargs)
                dredis.setPickle(cache_key, res)
                dredis.set(cache_key_ts, time.time())
            except Exception as e:
                dlog.ex(e, "exception happened while executing:{}".format(func_name))
                pass
            # Unlock
            dredis.set(cache_key_loading, "0")
            return res
Exemplo n.º 3
0
def mark_dataload_end(candle_type: TCandleType):
    dredis.set("download_start_{}".format(candle_type.value), "0")
    dredis.set("download_start_{}_ts".format(candle_type.value),
               int(time.time()))
Exemplo n.º 4
0
def mark_dataload_start(candle_type: TCandleType):
    dredis.set("download_start_{}".format(candle_type.value), "1")
Exemplo n.º 5
0
from myapp.core.ddecorators import trace_perf
from myapp.core.dlog import stack
from myapp.core.dtypes import TCandleType
from myapp.core.rootConfig import SUPPORTED_CANDLE
from myapp.core.sync import getSymbolList
from pandas.core.frame import DataFrame

### Here is how to test down your own#
"""
    import yfinance as yf
yf.download("TCS.NS")
"""

# Rest all locks here
for candle_type in SUPPORTED_CANDLE:
    dredis.set("download_progress_{}".format(candle_type.value), "0")
dlog.d("Reset download locks")


@trace_perf
def download(doamin="IN",
             interval: TCandleType = TCandleType.DAY_1,
             period=50) -> typing.Tuple[bool, DataFrame]:
    key = "download_progress_" + interval.value
    if (dredis.get(key) == "1"):
        danalytics.reportAction(
            "ignore_duplicate_fetch_download_already_progress")
        return (False, None)
    data = None
    dredis.set(key, "1")
    try:
Exemplo n.º 6
0
def downloadAndBuildIndicator(domain, candle_type: TCandleType):
    # Optimization
    if not shouldBuildIndicator(domain, candle_type):
        dlog.d("Ignore rebuilding shouldBuildIndicator")
        return

    # Locking
    lockkey = "downloadAndBuildindicator_{}_{}".format(domain,
                                                       candle_type.value)
    if dredis.get(lockkey) == "1":
        dlog.d("downloadAndBuildIndicator locked for key {}".format(lockkey))
        raise Exception("downloadAndBuildIndicator is progress")
    dredis.set(lockkey, "1")

    try:
        dlog.d("downloadAndBuildIndicator start")

        dlog.d("downloadAndBuildIndicator download start")
        ret_value, download_data = ddownload.download(domain,
                                                      interval=candle_type)
        if ret_value is False:
            dlog.d("Download fails")
            return {
                "status": "error",
                "msg": "something goes wrong",
                "out": None
            }

        dlog.d("downloadAndBuildIndicator building start")
        processed_df = dindicator.buildTechnicalIndicators(
            download_data, domain)

        # DONOT STOARE AS FILEdlog.d("downloadAndBuildIndicator: saving to storage start")
        #path_to_store = dstorage.get_default_path_for_candle(candle_type)
        #dstorage.store_data_to_disk(processed_df, path_to_store)

        dlog.d("downloadAndBuildIndicator: building indicator history map")
        # Building Indicator map for O(1) looks up.
        # This will be a 4d map
        # map[REL][1d][-1][close]...
        last15SlotIndicator = getLastNIndicatorInJson(domain, processed_df)
        indicator_history_key = "indicator_history_{}".format(domain)
        olddata = dredis.getPickle(indicator_history_key)
        if not olddata:
            olddata = {}
        for key in last15SlotIndicator.keys():
            if key not in olddata:
                olddata[key] = {}
            olddata[key][candle_type.value] = last15SlotIndicator.get(key)
        dredis.setPickle(indicator_history_key, olddata)
        dlog.d(
            "downloadAndBuildIndicator: saved indicator history to {}".format(
                indicator_history_key))

        dlog.d("downloadAndBuildIndicator: saving to redis start")
        dredis.setPickle(
            "indicator_data_{}_{}".format(domain, candle_type.value), {
                'data': getLatestDataInJson(domain, processed_df),
                'timestamp': getCurTimeStr()
            })

        # update market data
        if candle_type == TCandleType.DAY_1:
            saveMarketDataFormDayDF(domain, download_data)

        # Set TimeStamp key
        dredis.set(
            "indicator_timestamp_{}_{}".format(domain, candle_type.value),
            getCurTimeStr())

        # unlock
        dredis.set(lockkey, "0")

        dlog.d("downloadAndBuildIndicator ends")
        return {
            "status": "success",
            "msg": "Completed snapshot pipeline",
            "out": None
        }
    except Exception as e:
        dredis.set(lockkey, "0")
        dlog.d("downloadAndBuildIndicator Exception happened")
        danalytics.reportException(e, "Exception in downloadAndBuildIndicator")
        dlog.ex(e)
        raise e
    finally:
        dredis.set(lockkey, "0")
        pass
Exemplo n.º 7
0
def saveMarketDataFormDayDF(domain: str, df: DataFrame):
    dlog.d("Saving market data...")
    resultJSON = getLatestDataInJson(domain, df)
    # Save this data
    dredis.setPickle("market_data_{}".format(domain), {"data": resultJSON})
    dredis.set("market_ts_{}".format(domain), getCurTimeStr())
Exemplo n.º 8
0
                final_result[symbol][offset_key][indicator] = value
        # print(json.dumps(final_result, indent=4))
    except Exception as e:
        dlog.ex(e)
        danalytics.reportException(e)
    # More some info.
    # for x in final_result.keys():
    #    final_result[x]['sector'] = getSymbolList(domain=domain)[x]['sector']
    return final_result


# Rest all locks here - This is needed for restart the server
for candle_type in SUPPORTED_CANDLE:
    for domain in SUPPORTED_DOMAIN:
        dredis.set(
            "downloadAndBuildindicator_{}_{}".format(domain,
                                                     candle_type.value), "0")
dlog.d("Reset downloadAndBuildIndicator locks")


def getLastUpdatedTimeStamp(domain: str):
    result = {}
    for candle_type in SUPPORTED_CANDLE:
        result['{}-{}'.format(domain, candle_type.value)] = dredis.get(
            "indicator_timestamp_{}_{}".format(domain, candle_type.value),
            "Data not found")
    return result


# It will create a cache for laest data frame