Esempio n. 1
0
 def wrapper(*args, **kwargs):
     try:
         return func(*args, **kwargs)
     except Exception as e:
         dlog.ex(e, "Unknown error in the server")
         danalytics.reportException(e)
         return RetHelper.buildException(e)
Esempio n. 2
0
def getLastNIndicatorInJson(domain, df: DataFrame, limit=15):
    final_result = {}
    try:
        df = df.tail(limit)
        result = df.to_json(orient="records")
        parsed = json.loads(result)
        for offset in range(limit):
            row = parsed[offset]
            # Key => 0, -1,-2....
            offset_key = (limit - offset - 1) * -1
            for x in row.keys():
                pair = literal_eval(x)
                symbol = pair[0]
                indicator = pair[1]
                value = row[x]
                if symbol not in final_result:
                    final_result[symbol] = {}
                if offset_key not in final_result[symbol]:
                    final_result[symbol][offset_key] = {}
                final_result[symbol][offset_key][indicator] = value
        # print(json.dumps(final_result, indent=4))
    except Exception as e:
        dlog.ex(e)
        danalytics.reportException(e)
    # More some info.
    # for x in final_result.keys():
    #    final_result[x]['sector'] = getSymbolList(domain=domain)[x]['sector']
    return final_result
Esempio n. 3
0
 def wrapper(*args, **kwargs):
     func_name = func.__name__
     try:
         danalytics.reportAction("worker_task_start_{}".format(func_name))
         ret = func(*args, **kwargs)
         danalytics.reportAction("worker_task_success_{}".format(func_name))
         return ret
     except Exception as e:
         dlog.ex(e, "Unknown error in the server")
         danalytics.reportAction(
             "worker_task_exception_{}".format(func_name))
         danalytics.reportException(e)
         return RetHelper.buildException(e)
Esempio n. 4
0
def getLatestDataInJson(domain, df: DataFrame):
    final_result = {}
    try:
        df = df.tail(1)
        result = df.to_json(orient="records")
        parsed = json.loads(result)
        for x in parsed[0].keys():
            pair = literal_eval(x)
            symbol = pair[0]
            indicator = pair[1]
            value = parsed[0][x]
            if symbol not in final_result:
                final_result[symbol] = {}
            final_result[symbol][indicator] = value
        # print(json.dumps(final_result, indent=4))
    except Exception as e:
        dlog.ex(e)
        danalytics.reportException(e)
    # More some info.
    for x in final_result.keys():
        final_result[x]['sector'] = getSymbolList(domain=domain)[x]['sector']
    return final_result
Esempio n. 5
0
def downloadAndBuildIndicator(domain, candle_type: TCandleType):
    # Optimization
    if not shouldBuildIndicator(domain, candle_type):
        dlog.d("Ignore rebuilding shouldBuildIndicator")
        return

    # Locking
    lockkey = "downloadAndBuildindicator_{}_{}".format(domain,
                                                       candle_type.value)
    if dredis.get(lockkey) == "1":
        dlog.d("downloadAndBuildIndicator locked for key {}".format(lockkey))
        raise Exception("downloadAndBuildIndicator is progress")
    dredis.set(lockkey, "1")

    try:
        dlog.d("downloadAndBuildIndicator start")

        dlog.d("downloadAndBuildIndicator download start")
        ret_value, download_data = ddownload.download(domain,
                                                      interval=candle_type)
        if ret_value is False:
            dlog.d("Download fails")
            return {
                "status": "error",
                "msg": "something goes wrong",
                "out": None
            }

        dlog.d("downloadAndBuildIndicator building start")
        processed_df = dindicator.buildTechnicalIndicators(
            download_data, domain)

        # DONOT STOARE AS FILEdlog.d("downloadAndBuildIndicator: saving to storage start")
        #path_to_store = dstorage.get_default_path_for_candle(candle_type)
        #dstorage.store_data_to_disk(processed_df, path_to_store)

        dlog.d("downloadAndBuildIndicator: building indicator history map")
        # Building Indicator map for O(1) looks up.
        # This will be a 4d map
        # map[REL][1d][-1][close]...
        last15SlotIndicator = getLastNIndicatorInJson(domain, processed_df)
        indicator_history_key = "indicator_history_{}".format(domain)
        olddata = dredis.getPickle(indicator_history_key)
        if not olddata:
            olddata = {}
        for key in last15SlotIndicator.keys():
            if key not in olddata:
                olddata[key] = {}
            olddata[key][candle_type.value] = last15SlotIndicator.get(key)
        dredis.setPickle(indicator_history_key, olddata)
        dlog.d(
            "downloadAndBuildIndicator: saved indicator history to {}".format(
                indicator_history_key))

        dlog.d("downloadAndBuildIndicator: saving to redis start")
        dredis.setPickle(
            "indicator_data_{}_{}".format(domain, candle_type.value), {
                'data': getLatestDataInJson(domain, processed_df),
                'timestamp': getCurTimeStr()
            })

        # update market data
        if candle_type == TCandleType.DAY_1:
            saveMarketDataFormDayDF(domain, download_data)

        # Set TimeStamp key
        dredis.set(
            "indicator_timestamp_{}_{}".format(domain, candle_type.value),
            getCurTimeStr())

        # unlock
        dredis.set(lockkey, "0")

        dlog.d("downloadAndBuildIndicator ends")
        return {
            "status": "success",
            "msg": "Completed snapshot pipeline",
            "out": None
        }
    except Exception as e:
        dredis.set(lockkey, "0")
        dlog.d("downloadAndBuildIndicator Exception happened")
        danalytics.reportException(e, "Exception in downloadAndBuildIndicator")
        dlog.ex(e)
        raise e
    finally:
        dredis.set(lockkey, "0")
        pass