def scrape(symbol, start_str='1 day ago UTC', end_str=None): klines = np.array( client.get_historical_klines(symbol, Client.KLINE_INTERVAL_5MINUTE, start_str=start_str, end_str=end_str, limit=1000)) # remove everything besides Time+OHLC+Volume if len(klines) > 0: klines = np.delete(klines, np.s_[6:12], 1) # cast to dataframe df = np_array_to_ohlc_df(klines) # defining table name table_name = symbol.lower() # save to db df.to_sql(table_name, engine, if_exists='append', index=False, chunksize=500, dtype={ "ts": DateTime, "open": Numeric, "high": Numeric, "low": Numeric, "close": Numeric, "volume": Numeric }, method=replace_into) quantity = len(klines) logger.success(f"klines inserted into DB, quantity: {quantity}")
def dango(): try: data = fetch('SELECT * FROM 小团子酱杨雪') logger.success('查询 "小团子酱 杨雪" 完成') return Result.success(data) except: logger.success('查询 "小团子酱 杨雪" 失败') return Result.error(400, '获取数据失败')
def get_rontgen_data(id): if not id_is_exist(id): return None try: data = redis_client.hgetall(id) logger.success(f"<{id}>取出数据成功") if data_is_disable(id, data): logger.warning(f"<{id}>数据不可用") return None return data except: return None
def search(self): try: if self.create_database() == sql_config.CREATE_SUCCESS: pages = self.get_key_pages() logger.success(f'查询 "{self.key}" 共有 {pages} 页') for page in range(1, pages + 1): logger.success(f'正在查询 "{self.key}" :第 {page} 页') self.get_bv(page) self.get_data() logger.success(f'"{self.key}" 第{page}页数据查询完成') logger.success(f'获取 "{self.key}" 所有稿件信息成功') if self.commit_data(): logger.success(f'搜索关键字 "{self.key}" 并上传到数据库成功') else: logger.error(f'上传 "{self.key}" 数据时失败') except: logger.error(f'搜索 "{self.key}" 失败')
def get_aip(API_KEY, SECRET_KEY, tex, cuid='kagarise', ctp=1, lan='zh', spd=5, pit=5, vol=5, per=103, aue=3): tok = get_access_token(client_id=API_KEY, client_secret=SECRET_KEY) try: access_token = tok['refresh_token'] logger.success(f'获取token成功') except: access_token = None logger.error(f'获取token失败') print(access_token) params = { 'tex': tex, 'tok': access_token, 'cuid': cuid, 'ctp': ctp, 'lan': lan, 'spd': spd, 'pit': pit, 'vol': vol, 'per': per, 'aue': aue } result = requests.post(aip_api, params=params) if not isinstance(result, dict): file_path = f'source/aip/{int(time())}.mp3' with open(file_path, 'wb') as f: f.write(result.content) upload_local_file(bucket_name=cos_config['BucketName'], local_file_path=file_path, file_path=file_path) if os.path.exists(file_path): os.remove(file_path) logger.success(f"请求aip:{tex}成功") return f'{cos_config["Prefix"]}{file_path}' else: logger.error(f"请求aip:{tex}失败") return None
def data_is_disable(id, data): new_data = data.copy() new_data['use_times'] = int(new_data['use_times']) + 1 try: redis_client.hset(id, mapping=new_data) except: return True logger.success(f"<{id}>更新数据成功") if data['typ'] == 'times' and int(data['use_times']) > 0: return True elif data['typ'] == 'minute' and int(time()) - int( data['create_time']) > 60 * int(data['num']): return True elif data['typ'] == 'hour' and int(time()) - int( data['create_time']) > 3600 * int(data['num']): return True elif data['typ'] == 'day' and int(time()) - int( data['create_time']) > 86400 * int(data['num']): return True else: return False
def create_database(self): try: table_name = self.key.replace(' ', '') # 若每次都重新查找,可加DROP # sql = f'DROP TABLE IF EXISTS {table_name};' # commit(sql) # WARNING: 存在sql注入问题,但并未返回实质性信息 sql = f'SELECT TABLE_NAME FROM information_schema.TABLES WHERE TABLE_SCHEMA = "bilibili" AND TABLE_NAME = "{table_name}"' result = fetch(sql) exist = bool(result) if exist is False: logger.info('创建数据库成功') sql = f'''CREATE TABLE {table_name} ( id INT NOT NULL AUTO_INCREMENT PRIMARY KEY COMMENT 'ID', url VARCHAR(255) NOT NULL COMMENT '链接', title VARCHAR(255) NOT NULL COMMENT '标题', date VARCHAR(255) NOT NULL COMMENT '发布日期', time VARCHAR(255) NOT NULL COMMENT '发布时间', view INT NOT NULL COMMENT '播放', danmu INT NOT NULL COMMENT '弹幕', love INT NOT NULL COMMENT '点赞', coin INT NOT NULL COMMENT '投币', collect INT NOT NULL COMMENT '收藏', share INT NOT NULL COMMENT '分享', info VARCHAR(255) NOT NULL COMMENT '简介', tags VARCHAR(255) NOT NULL COMMENT '标签', create_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', update_time TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT '更新时间' )ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4; ''' commit(sql) return sql_config.CREATE_SUCCESS else: logger.success('数据库已存在,无需创建') return sql_config.TABLE_EXIST except: logger.error('创建数据库失败') return sql_config.CREATE_FAIL
def get_hand_painting(url): try: logger.success(f'start handle url:{url}') img = requests.get(url) img = BytesIO(img.content) a = np.asarray(Image.open(img).convert('L')).astype('float') depth = 10. # (0-100) grad = np.gradient(a) # 取图像灰度的梯度值 grad_x, grad_y = grad # 分别取横纵图像梯度值 grad_x = grad_x * depth / 100. grad_y = grad_y * depth / 100. A = np.sqrt(grad_x**2 + grad_y**2 + 1.) uni_x = grad_x / A uni_y = grad_y / A uni_z = 1. / A vec_el = np.pi / 2.2 # 光源的俯视角度,弧度值 vec_az = np.pi / 4. # 光源的方位角度,弧度值 dx = np.cos(vec_el) * np.cos(vec_az) # 光源对x 轴的影响 dy = np.cos(vec_el) * np.sin(vec_az) # 光源对y 轴的影响 dz = np.sin(vec_el) # 光源对z 轴的影响 b = 255 * (dx * uni_x + dy * uni_y + dz * uni_z) # 光源归一化 b = b.clip(0, 255) im = Image.fromarray(b.astype('uint8')) # 重构图像 file_path = f'source/hand_painting/{int(time())}.jpg' im.save(file_path) upload_local_file(bucket_name=cos_config['BucketName'], local_file_path=file_path, file_path=file_path) if os.path.exists(file_path): os.remove(file_path) img_url = f'{cos_config["Prefix"]}{file_path}' logger.success(f'finish handle url:{url}\n img_url is {img_url}') return img_url except: return None
def get_yourls(url): if not url.startswith('http://') and not url.startswith('https://'): url = "http://" + url api = "http://yourls.zzuli.love/yourls-api.php" timestamp = str(int(time.time())) signature = "755abd5c4d" signature = hashlib.md5((timestamp + signature).encode('utf8')).hexdigest() action = "shorturl" format = "json" params = { 'timestamp': timestamp, 'signature': signature, 'action': action, 'format': format, 'url': url } try: data = eval(requests.get(api, params=params).text) logger.success(f'{url}被处理为{data["shorturl"]}') return data except: logger.error(f"短链接请求错误:{url}") return None
def get_rontgen_code(tex, num, typ, pwd): id_length = 6 id = create_id(id_length) try: while id_is_exist(id): id = create_id(id_length) except: return None logger.success(f"生成id<{id}>") data = { 'tex': tex, 'num': num, 'typ': typ, 'pwd': '' if pwd is None else pwd, 'create_time': int(time()), 'use_times': 0 } try: redis_client.hset(id, mapping=data) logger.success(f"<{id}>存放数据成功") return id except: return None