Beispiel #1
0
def up2cloud(filename, abs_filepath, need_rm=True, pic_host=""):
    ckey = "up2cloud"
    thekey = filename
    rcli = RedisClient().get_cli()
    thetoken = rcli.get(ckey)
    if thetoken:
        thetoken = json.loads(thetoken)
    else:
        q = Auth(access_key, secret_key)
        # 上传后保存的文件名
        # 生成上传 Token,可以指定过期时间等
        ttl = 3600
        thetoken = q.upload_token(g_bucket_name, None, ttl)
        rcli.set(ckey, json.dumps(thetoken), ex=ttl - 600)
    # 要上传文件的本地路径
    ret, info = put_file(thetoken, thekey, abs_filepath)
    uri = ""
    if ret and ret["key"] == thekey and ret['hash'] == etag(abs_filepath):
        uri = json.loads(info.text_body)["key"]
    if not uri:
        g_stdlogging.error("[up2cloud]%s %s" % (ret, info))
        raise unknown_err
    # print(abs_filepath)
    if need_rm:
        os.remove(abs_filepath)
    if not pic_host:
        pic_host = app.config["PIC_HOST"]
    return pic_host + "/" + uri
Beispiel #2
0
def get_user_info_by_auth_redis():
    auth_key = get_auth_key()
    if not auth_key:
        return None
    cli = RedisClient().get_cli()
    # res = cli.get(auth_key)
    res = cli.get(g_auth_key_pre + auth_key)
    if not res:
        return None
    return pickle.loads(res)
Beispiel #3
0
        def wrapper(*args, **kwargs):
            # 包路径名+类名(非类的话,该值为参数的类型,如<class 'str'>或int)+方法名
            arg_part = ""
            if len(args) >= 1:
                arg_part = str(args[0].__class__)
            key_pre = str(inspect.getmodule(
                func)) + ":" + arg_part + ":" + func.__name__ + ":" + str(ver)
            r_need_cache = kwargs.pop("r_need_cache", True)
            r_del_cache = kwargs.pop("r_del_cache", False)
            r_timeout = kwargs.pop("r_timeout", 0)
            # kwargs中的默认值,获取不到,如要获取,必须传值
            tmp_dict = {}
            if exclude_list:
                for key, val in kwargs.items():
                    if key not in exclude_list:
                        tmp_dict[key] = val
                kwargs = tmp_dict
            sorted_values = sorted(kwargs.items(), key=lambda val: val[0])
            kwdata = urlencode(sorted_values)
            # 普通方法
            input_key = ""
            for item in args:
                tmp = str(item.__class__)
                if "." in tmp:
                    input_key += tmp + "|"
                else:
                    input_key += str(item) + "|"
            input_key += kwdata

            # if not tmp_args or tmp_args[0].__class__.__module__ == "builtins":
            #     input_key = "^^".join(tmp_args) + ":" + kwdata
            # # 实例或类方法
            # else:
            #     input_key = "^^".join(tmp_args[1:]) + ":" + kwdata
            if len(input_key) > 255:
                input_key = hashlib.md5(input_key.encode("utf-8")).hexdigest()
            key = key_pre + "|" + input_key
            # print(key)
            cli = RedisClient().get_cli()
            if r_del_cache:
                cli.delete(key)
            if r_need_cache:
                res = cli.get(key)
                if not res:
                    res = func(*args, **kwargs)
                    if r_timeout:
                        cli.set(key, pickle.dumps(res), ex=r_timeout)
                    else:
                        cli.set(key, pickle.dumps(res), ex=timeout)
                else:
                    res = pickle.loads(res)
            else:
                res = func(*args, **kwargs)
            return res
Beispiel #4
0
def freq_control_by_key(key, duration=600, max_times=10):
    cli = RedisClient().get_cli()
    now_times = cli.get(key)
    if not now_times:
        cli.incr(key)
        cli.expire(key, duration)
    else:
        now_times = int(now_times)
        if now_times >= max_times:
            return False
        else:
            cli.incr(key)
    return True
Beispiel #5
0
 def __init__(self,
              flag,
              ver=4,
              id_name="id",
              ex=300,
              need_cache=True,
              db=None):
     if db:
         self.db = db
     else:
         self.db = get_db()
     self.rcli = RedisClient().get_cli()
     self.flag = flag
     self.ver = ver
     self.id_name = id_name
     self.need_cache = need_cache
     self.ex = ex
Beispiel #6
0
def freq_control_coupon_by_key(key, coupon, duration=600, max_size=10):
    cli = RedisClient().get_cli()
    coupon_size = 0
    try:
        coupon_size = cli.scard(key)
    except Exception as e:
        logging.error("[freq_control_coupon_by_key]%s" % e)
    if not coupon_size:
        cli.sadd(key, coupon)
        cli.expire(key, duration)
    elif coupon_size >= max_size:
        return False
    else:
        cli.sadd(key, coupon)
    return True
Beispiel #7
0
def up2cloud_video_part(filename, abs_filepath, need_rm=True, pic_host=""):
    ckey = "up2cloud_video_part1"
    thekey = filename
    rcli = RedisClient().get_cli()
    thetoken = rcli.get(ckey)
    if False and thetoken:
        thetoken = json.loads(thetoken)
    else:
        q = Auth(access_key, secret_key)
        # 上传后保存的文件名
        # 生成上传 Token,可以指定过期时间等
        ttl = 3600
        pat = urlsafe_base64_encode("videopart-$(count)")
        policy = {
            # "persistentOps": "vsample/jpg/ss/1/t/4/s/480x360/interval/1/pattern/dmZyYW1lLSQoY291bnQp",
            "persistentOps":
            "segment/mp4/segtime/5/pattern/%s" % pat,
            "persistentNotifyUrl":
            "https://balimiao.cn/front/qiniu/video_part_callback"
        }
        thetoken = q.upload_token(g_bucket_name, None, ttl, policy)
        rcli.set(ckey, json.dumps(thetoken), ex=ttl - 600)
    # 要上传文件的本地路径
    ret, info = put_file(thetoken, thekey, abs_filepath)
    uri = ""
    if ret and ret["key"] == thekey and ret['hash'] == etag(abs_filepath):
        uri = json.loads(info.text_body)["key"]
    if not uri:
        g_stdlogging.error("[up2cloud_video]%s %s" % (ret, info))
        raise unknown_err
    # print(abs_filepath)
    if need_rm:
        os.remove(abs_filepath)
    if not pic_host:
        pic_host = app.config["PIC_HOST"]
    return pic_host + "/" + uri
Beispiel #8
0
class QueryUtil():
    def __init__(self,
                 flag,
                 ver=4,
                 id_name="id",
                 ex=300,
                 need_cache=True,
                 db=None):
        if db:
            self.db = db
        else:
            self.db = get_db()
        self.rcli = RedisClient().get_cli()
        self.flag = flag
        self.ver = ver
        self.id_name = id_name
        self.need_cache = need_cache
        self.ex = ex

    def make_key(self, tid):
        return str(self.flag) + "|" + str(self.ver) + "|" + str(
            self.id_name) + "|" + str(tid)

    def make_1n_key(self, tid):
        return str(self.flag) + "|1n|" + str(self.ver) + "|" + str(
            self.id_name) + "|" + str(tid)

    def del_by_idlist(self, id_list):
        pipe = self.rcli.pipeline(transaction=False)
        for tid in id_list:
            id_key = self.make_key(tid)
            pipe.delete(id_key)
        pipe.execute()

    def del_1n_by_idlist(self, id_list):
        pipe = self.rcli.pipeline(transaction=False)
        for tid in id_list:
            id_key = self.make_1n_key(tid)
            pipe.delete(id_key)
        pipe.execute()

    '''
    只能获取1:1关系的场景
    id_list : [1,12,222]
    sql_tmpl : "select * from t_store where id in (%s)"
    '''

    def mget_by_idlist(self, id_list, sql_tmpl):
        final_dict = {}
        all_list = []
        cache_dict = {}
        db_dict = {}
        id_list = get_uniq_list(id_list)
        if id_list:
            for item in id_list:
                thetype = type(item)
                break
            id_key_list = []
            for tid in id_list:
                id_key = self.make_key(tid)
                id_key_list.append(id_key)
            res_list = [None for i in id_list]
            if self.need_cache:
                res_list = self.rcli.mget(id_key_list)
            db_id_list = []
            for key, item in enumerate(res_list):
                if not item:
                    db_id_list.append(id_list[key])
                else:
                    cache_dict[id_list[key]] = pickle.loads(item)
            if db_id_list:
                db_id_list_str = ",".join([str(i) for i in db_id_list])
                sql = sql_tmpl % (db_id_list_str, )
                db_dict = self.mget_by_sql(sql)["dict"]
                if self.need_cache:
                    pipe = self.rcli.pipeline(transaction=False)
                    for key, info in db_dict.items():
                        pipe.set(self.make_key(key),
                                 pickle.dumps(info),
                                 ex=self.ex)
                    pipe.execute()
            all_tables = merge_two_dicts(cache_dict, db_dict)
            for key, item in all_tables.items():
                final_dict[thetype(key)] = item
            for tid in id_list:
                if tid in final_dict:
                    all_list.append(final_dict[tid])
        return {"dict": final_dict, "list": all_list}

    '''
    能获取1:n关系的场景
    id_list : [1,12,222]
    sql_tmpl : "select * from t_a_b_rel where aid in (%s)"
    '''

    def mget_by_idlist_1n(self, id_list, sql_tmpl):
        final_dict = {}
        all_list = []
        cache_dict = {}
        db_dict = {}
        id_list = get_uniq_list(id_list)
        if id_list:
            for item in id_list:
                thetype = type(item)
                break
            id_key_list = []
            for tid in id_list:
                id_key = self.make_1n_key(tid)
                id_key_list.append(id_key)
            res_list = [None for i in id_list]
            if self.need_cache:
                res_list = self.rcli.mget(id_key_list)
            db_id_list = []
            for key, item in enumerate(res_list):
                if not item:
                    db_id_list.append(id_list[key])
                else:
                    cache_dict[id_list[key]] = pickle.loads(item)
            # print(db_id_list,cache_dict)
            cont_dict = {}
            if db_id_list:
                db_id_list_str = ",".join([str(i) for i in db_id_list])
                sql = sql_tmpl % (db_id_list_str, )
                mid_res = self.mget_by_sql(sql)
                db_info_list = mid_res["list"]
                for item in db_info_list:
                    if item[self.id_name] in cont_dict:
                        cont_dict[item[self.id_name]].append(item)
                    else:
                        cont_dict[item[self.id_name]] = [item]
                if self.need_cache:
                    pipe = self.rcli.pipeline(transaction=False)
                    for key, info in cont_dict.items():
                        pipe.set(self.make_1n_key(key),
                                 pickle.dumps(info),
                                 ex=self.ex)
                    pipe.execute()
            all_tables = merge_two_dicts(cache_dict, cont_dict)
            for key, item in all_tables.items():
                final_dict[thetype(key)] = item
            for tid in id_list:
                if tid in final_dict:
                    all_list.append(final_dict[tid])
        return {"dict": final_dict, "list": all_list}

    def mget_by_sql(self, sql):
        tdict = {}
        tlist = []
        cursor = self.db.execute_sql(sql)
        for row in cursor.fetchall():
            table = dict()
            for column, value in zip(cursor.description, row):
                column_name = column[0]
                if type(value) == datetime.datetime:
                    value = get_ts8dt(value)
                table[column_name] = value
            tdict[table[self.id_name]] = table
            tlist.append(table)
        return {"dict": tdict, "list": tlist}