def download(doamin="IN", interval: TCandleType = TCandleType.DAY_1, period=50) -> typing.Tuple[bool, DataFrame]: key = "download_progress_" + interval.value if (dredis.get(key) == "1"): danalytics.reportAction( "ignore_duplicate_fetch_download_already_progress") return (False, None) data = None dredis.set(key, "1") try: # stack() ticker = [x for x in getSymbolList(doamin).keys()] data = yf.download(tickers=ticker, period=covert_to_period_from_duration( interval, period), interval=interval.value, group_by='ticker', auto_adjust=False, prepost=False, threads=True, proxy=None, rounding=True) except Exception as e: dlog.ex(e) return (False, None) finally: dredis.set(key, "0") # Sometime it ret duplicate results for the last row so drop it, data = data[~data.index.duplicated(keep='last')] return (True, data)
def wrapper(*args, **kwargs): print(args) print(kwargs) func_name = func.__name__ cache_key_loading = "{}_loading".format(cache_key) cache_key_ts = "{}_ts".format(cache_key) # Check if cache exist. if kwargs.get('ignore_cache') != True: cache = dredis.getPickle(cache_key, None) if cache: return cache # Check global lock if dredis.get(cache_key_loading) == "1": raise Exception( "{} is locked by smart cache".format(func_name)) # Lock dredis.set(cache_key_loading, "1") # We need to use try catch to avoid unlock res = None try: # Execute res = func(*args, **kwargs) dredis.setPickle(cache_key, res) dredis.set(cache_key_ts, time.time()) except Exception as e: dlog.ex(e, "exception happened while executing:{}".format(func_name)) pass # Unlock dredis.set(cache_key_loading, "0") return res
def getLastUpdatedTimeStamp(domain: str): result = {} for candle_type in SUPPORTED_CANDLE: result['{}-{}'.format(domain, candle_type.value)] = dredis.get( "indicator_timestamp_{}_{}".format(domain, candle_type.value), "Data not found") return result
def should_fetch_data(candle_type: TCandleType) -> bool: last_mem_ts = _last_update_ts.get(candle_type.value) # data not exist if last_mem_ts == 0 or last_mem_ts is None: return True # The mem data is old last_redis_ts = int( dredis.get("download_start_{}_ts".format(candle_type.value), "0")) return int(last_redis_ts) > int(last_mem_ts)
def mayGetLatestStockData(domain: str, reload, sync: str): if (reload == "1"): dlog.d("taskDownloadLatestMarketData: submitting task") if sync == "1": tasks.taskDownloadLatestMarketData(domain) else: tasks.taskDownloadLatestMarketData.delay(domain) return last_update = dredis.get("market_ts_{}".format(domain), None) if last_update is None or last_update == 'None': dlog.d("No last update - submitting task") tasks.taskDownloadLatestMarketData.delay(domain) elif IfTimeIs5MinOld(last_update): dlog.d("data is 5 min old... submitting task") # tasks.taskDownloadLatestMarketData.delay(domain) else: dlog.d("Data is already there")
def wrapper(*args, **kwargs): print(args) print(kwargs) func_name = func.__name__ cache_key_loading = "{}_loading".format(cache_key) if dredis.get(cache_key_loading) == "1": raise Exception( "{} is already in progress".format(func_name)) # Lock dredis.set(cache_key_loading, "1") # We need to use try catch to avoid unlock res = None try: # Execute res = func(*args, **kwargs) except Exception as e: dlog.ex(e, "exception happened while executing:{}".format(func_name)) pass # Unlock dredis.set(cache_key_loading, "0") return res
def is_dataload_start(candle_type: TCandleType): return dredis.get("download_end_{}".format(candle_type.value)) == "1"
from myapp.core.rootConfig import SUPPORTED_CANDLE import time from typing import Dict from myapp.core import dredis from myapp.core import dlog from myapp.core.dtypes import TCandleType _last_update_ts: Dict[str, int] = {} for x in SUPPORTED_CANDLE: _last_update_ts[x.value] = int( dredis.get("download_start_{}_ts".format(x.value), "0")) def reportNAN(data): dlog.e("NAN found for the item : {}".format(data)) def mark_dataload_start(candle_type: TCandleType): dredis.set("download_start_{}".format(candle_type.value), "1") def mark_dataload_end(candle_type: TCandleType): dredis.set("download_start_{}".format(candle_type.value), "0") dredis.set("download_start_{}_ts".format(candle_type.value), int(time.time())) def is_dataload_start(candle_type: TCandleType): return dredis.get("download_end_{}".format(candle_type.value)) == "1" def mark_last_data_update_ts(candle_type: TCandleType):
def downloadAndBuildIndicator(domain, candle_type: TCandleType): # Optimization if not shouldBuildIndicator(domain, candle_type): dlog.d("Ignore rebuilding shouldBuildIndicator") return # Locking lockkey = "downloadAndBuildindicator_{}_{}".format(domain, candle_type.value) if dredis.get(lockkey) == "1": dlog.d("downloadAndBuildIndicator locked for key {}".format(lockkey)) raise Exception("downloadAndBuildIndicator is progress") dredis.set(lockkey, "1") try: dlog.d("downloadAndBuildIndicator start") dlog.d("downloadAndBuildIndicator download start") ret_value, download_data = ddownload.download(domain, interval=candle_type) if ret_value is False: dlog.d("Download fails") return { "status": "error", "msg": "something goes wrong", "out": None } dlog.d("downloadAndBuildIndicator building start") processed_df = dindicator.buildTechnicalIndicators( download_data, domain) # DONOT STOARE AS FILEdlog.d("downloadAndBuildIndicator: saving to storage start") #path_to_store = dstorage.get_default_path_for_candle(candle_type) #dstorage.store_data_to_disk(processed_df, path_to_store) dlog.d("downloadAndBuildIndicator: building indicator history map") # Building Indicator map for O(1) looks up. # This will be a 4d map # map[REL][1d][-1][close]... last15SlotIndicator = getLastNIndicatorInJson(domain, processed_df) indicator_history_key = "indicator_history_{}".format(domain) olddata = dredis.getPickle(indicator_history_key) if not olddata: olddata = {} for key in last15SlotIndicator.keys(): if key not in olddata: olddata[key] = {} olddata[key][candle_type.value] = last15SlotIndicator.get(key) dredis.setPickle(indicator_history_key, olddata) dlog.d( "downloadAndBuildIndicator: saved indicator history to {}".format( indicator_history_key)) dlog.d("downloadAndBuildIndicator: saving to redis start") dredis.setPickle( "indicator_data_{}_{}".format(domain, candle_type.value), { 'data': getLatestDataInJson(domain, processed_df), 'timestamp': getCurTimeStr() }) # update market data if candle_type == TCandleType.DAY_1: saveMarketDataFormDayDF(domain, download_data) # Set TimeStamp key dredis.set( "indicator_timestamp_{}_{}".format(domain, candle_type.value), getCurTimeStr()) # unlock dredis.set(lockkey, "0") dlog.d("downloadAndBuildIndicator ends") return { "status": "success", "msg": "Completed snapshot pipeline", "out": None } except Exception as e: dredis.set(lockkey, "0") dlog.d("downloadAndBuildIndicator Exception happened") danalytics.reportException(e, "Exception in downloadAndBuildIndicator") dlog.ex(e) raise e finally: dredis.set(lockkey, "0") pass