async def store_json_results_in_redis_and_s3( data: Union[Dict[str, set], Dict[str, str], List[Union[Dict[str, Union[Union[str, int], Any]], Dict[str, Union[Union[str, None, int], Any]], ]], str, Dict[str, list], ], redis_key: str = None, redis_data_type: str = "str", s3_bucket: str = None, s3_key: str = None, json_encoder=None, ): """ Stores data in Redis and S3, depending on configuration :param redis_data_type: "str" or "hash", depending on how we're storing data in Redis :param data: Python dictionary or list that will be encoded in JSON for storage :param redis_key: Redis Key to store data to :param s3_bucket: S3 bucket to store data :param s3_key: S3 key to store data :return: """ last_updated_redis_key = config.get( "store_json_results_in_redis_and_s3.last_updated_redis_key", "STORE_JSON_RESULTS_IN_REDIS_AND_S3_LAST_UPDATED", ) function = f"{__name__}.{sys._getframe().f_code.co_name}" last_updated = int(time.time()) stats.count( f"{function}.called", tags={ "redis_key": redis_key, "s3_bucket": s3_bucket, "s3_key": s3_key }, ) if redis_key: if redis_data_type == "str": if isinstance(data, str): red.set(redis_key, data) else: red.set(redis_key, json.dumps(data, cls=SetEncoder, default=json_encoder)) elif redis_data_type == "hash": red.hmset(redis_key, data) else: raise UnsupportedRedisDataType( "Unsupported redis_data_type passed") red.hset(last_updated_redis_key, redis_key, last_updated) if s3_bucket and s3_key: data_for_s3 = {"last_updated": last_updated, "data": data} put_object( Bucket=s3_bucket, Key=s3_key, Body=json.dumps(data_for_s3, cls=SetEncoder, default=json_encoder, indent=2).encode(), )
async def store_json_results_in_redis_and_s3( data: Union[ Dict[str, set], Dict[str, str], List[ Union[ Dict[str, Union[Union[str, int], Any]], Dict[str, Union[Union[str, None, int], Any]], ] ], str, Dict[str, list], ], redis_key: str = None, redis_data_type: str = "str", s3_bucket: str = None, s3_key: str = None, json_encoder=None, s3_expires: int = None, ): """ Stores data in Redis and S3, depending on configuration :param s3_expires: Epoch time integer for when the written S3 object should expire :param redis_data_type: "str" or "hash", depending on how we're storing data in Redis :param data: Python dictionary or list that will be encoded in JSON for storage :param redis_key: Redis Key to store data to :param s3_bucket: S3 bucket to store data :param s3_key: S3 key to store data :return: """ last_updated_redis_key = config.get( "store_json_results_in_redis_and_s3.last_updated_redis_key", "STORE_JSON_RESULTS_IN_REDIS_AND_S3_LAST_UPDATED", ) function = f"{__name__}.{sys._getframe().f_code.co_name}" last_updated = int(time.time()) stats.count( f"{function}.called", tags={"redis_key": redis_key, "s3_bucket": s3_bucket, "s3_key": s3_key}, ) # If we've defined an S3 key, but not a bucket, let's use the default bucket if it's defined in configuration. if s3_key and not s3_bucket: s3_bucket = config.get("consoleme_s3_bucket") if redis_key: if redis_data_type == "str": if isinstance(data, str): red.set(redis_key, data) else: red.set( redis_key, json.dumps(data, cls=SetEncoder, default=json_encoder) ) elif redis_data_type == "hash": if data: red.hmset(redis_key, data) else: raise UnsupportedRedisDataType("Unsupported redis_data_type passed") red.hset(last_updated_redis_key, redis_key, last_updated) if s3_bucket and s3_key: s3_extra_kwargs = {} if isinstance(s3_expires, int): s3_extra_kwargs["Expires"] = datetime.utcfromtimestamp(s3_expires) data_for_s3 = json.dumps( {"last_updated": last_updated, "data": data}, cls=SetEncoder, default=json_encoder, indent=2, ).encode() if s3_key.endswith(".gz"): data_for_s3 = gzip.compress(data_for_s3) put_object(Bucket=s3_bucket, Key=s3_key, Body=data_for_s3, **s3_extra_kwargs)