async def authenticate(self, request): path = request.url.path if not path.startswith('/api'): return if path == '/api/login': return if request.scope.get('method') == 'OPTIONS': return data = None if request.url.scheme != 'ws': auth = request.headers.get('X-Auth-Pacs') if not auth: auth = request.query_params.get('token') if not auth: raise AuthenticationError('Invalid auth') credentials = auth try: data = jwt.decode(credentials, config['secret'], algorithms=['HS256']) async with get_conn() as conn: active = await Users(conn).is_active(data['id']) if not active: raise AuthenticationError('Deactivated user') except Exception as e: # try share file key async with get_conn() as conn: file_id = await SharedFiles(conn).check(credentials) if file_id and (path.startswith(f'/api/files/{file_id}') or path.startswith(f'/api/ws_token')): data = {'id': credentials, 'admin': False} else: raise AuthenticationError('Invalid auth') else: token = request.query_params.get('token') try: data = jwt.decode(token, config['secret'], algorithms=['HS256']) except Exception as e: raise AuthenticationError('Invalid auth') data = {'id': data['id'], 'admin': data['admin']} if not data: raise AuthenticationError('Invalid auth') return AuthCredentials(["authenticated"]), User(data)
async def get(self, request): data = [int(i) for i in request.query_params['ids'].split(',')] columns = set([]) rows = [] async with get_conn() as conn: master = await Replica(conn).master() for d in data: file = await ReplicaFiles(conn).get_file_from_replica( master['id'], d) columns.update(file['meta'].keys()) rows.append(file['meta']) columns = list(columns) columns.sort() tmp = uuid4() tmp_csv = f'/tmp/{tmp}.csv' with open(tmp_csv, 'w') as csvfile: csvwriter = csv.writer(csvfile) csvwriter.writerow(columns) for r in rows: row = [r.get(c, '') for c in columns] csvwriter.writerow(row) return FileResponse(tmp_csv)
async def post(self, request): form = await request.form() filename = form['file'].filename file = form['file'].file async with get_conn() as conn: async with conn.transaction(): master = await Replica(conn).master() if not master: return api_error('No master set') ds = parse_dcm(file) hsh = hash_file(file) file_data = { 'name': os.path.basename(filename), 'master': master['id'], 'hash': hsh, } file_data.update(ds) filedata = await Files(conn).insert_or_select(file_data) storage = await Storage.get(master) ret = await storage.copy(file, filedata) await ReplicaFiles(conn).add( master['id'], [{ 'id': filedata['id'], **ret }], ) return UJSONResponse({})
async def get_file_by_id(request): file_id = int(request.path_params['id']) async with get_conn() as conn: file = await Files(conn).get_extra(file_id) if not file or file['deleted']: raise HTTPException(status_code=404) return file
async def post(self, request): file_id = get_id(request) data = await request.json() async with get_conn() as conn: key = await SharedFiles(conn).share(file_id, data['duration']) return UJSONResponse({'key': key})
async def sync(): global listener_conn, work await setup() listener_conn = await create_conn() await listener_conn.add_listener('events', db_event) while True: work = True try: await do_sync() except Exception as e: print(traceback.format_exc()) try: async with get_conn() as conn: await Log(conn).add(traceback.format_exc()) except Exception as e: print(traceback.format_exc()) try: time.sleep(1) except KeyboardInterrupt: if listener_conn: await listener_conn.close() await teardown() break
async def get(self, request): is_admin(request) offset = request.path_params.get('offset') limit = request.path_params.get('limit') async with get_conn() as conn: data = await Log(conn).get_logs(offset=offset, limit=limit) return UJSONResponse({'data': [dict(u) for u in data]})
async def post(self, request): is_admin(request) data = await request.json() async with get_conn() as conn: result = await Users(conn).new_pswd(data['id']) return UJSONResponse({'password': result})
async def post(self, request): is_admin(request) data = await request.json() async with get_conn() as conn: await Users(conn).deactivate(data['id']) return UJSONResponse({})
async def delete(self, request): is_admin(request) replica_id = int(request.path_params['id']) async with get_conn() as conn: await Replica(conn).delete(replica_id) return UJSONResponse({})
def addAnalyseCount(uid): conn = get_conn() session = conn.Session() res = session.query(Urltask).filter(Urltask.user_id == uid).first() print('增加分析次数', res) if res: print(res.analyse_count, ' ') res.analyse_count += 1 print(res.analyse_count) conn.safeAction(session)
async def index(replica): global work replica_id = replica['id'] async with get_conn() as conn: await Replica(conn).update_status(replica_id, 'indexing') storage = await Storage.get(replica) indexing_interrupted = False async for d in storage.index(): if not work: indexing_interrupted = True break loc = None if not d.get('hash'): loc = await storage.fetch(d) if not d.get('hash'): d['hash'] = hash_file(loc) f = await Files(conn).get(d) if not f: if not replica['master']: continue if not loc: loc = await storage.fetch(d) try: dcm_data = parse_dcm(loc) except Exception as e: continue d.update(dcm_data) d['master'] = replica['id'] f = await Files(conn).add(d) d['id'] = f['id'] try: del d['meta'] except KeyError: pass if replica['master']: await ReplicaFiles(conn).add( replica_id, [d], ) else: await ReplicaFiles(conn).index(replica_id, d) files = await ReplicaFiles(conn).get_for_sync(replica) if len(files) == 0 and not indexing_interrupted: await Replica(conn).update_status(replica_id, 'ok')
async def post(self, request): file_id = get_id(request) data = await request.json() if 'tools_state' in data: async with get_conn() as conn: await Files(conn).update_tools_state( file_id, request.user.id, data['tools_state'], ) return UJSONResponse(data)
async def post(self, request): data = await request.json() async with get_conn() as conn: try: data = await Users(conn).change_password( request.user, data['password']) except ApiException as e: return UJSONResponse({'error': str(e)}, status_code=400) return UJSONResponse({})
async def post(self, request): is_admin(request) data = await request.json() async with get_conn() as conn: result = await Users(conn).add_user(data['username'], data['admin']) return UJSONResponse({ 'password': result['password'], 'username': data['username'] })
async def get(self, request): is_admin(request) q = request.path_params.get('q') offset = request.path_params.get('offset') limit = request.path_params.get('limit') async with get_conn() as conn: data = await Users(conn).get_users(offset=offset, limit=limit, username=q) return UJSONResponse({'data': [Users.to_json(u) for u in data]})
async def reindex_main(): await setup(sync_db=True) await reset_index() async with get_conn() as conn: files = await Files(conn).get_all() for f in files: await index_file(f) await teardown()
async def post(self, request): is_admin(request) data = await request.json() replica_id = int(request.path_params['id']) async with get_conn() as conn: if 'master' in data: await Replica(conn).set_master(replica_id) if 'delay' in data: await Replica(conn).update_delay(replica_id, data['delay']) return UJSONResponse({})
async def get(self, request): file_id = get_id(request) if not file_id: raise HTTPException(status_code=404) async with get_conn() as conn: master = await Replica(conn).master() file = await ReplicaFiles(conn).get_file_from_replica( master['id'], file_id) storage = await Storage.get(master) if not file: raise HTTPException(status_code=404) return await storage.serve(file)
async def delete(self, request): is_admin(request) async with get_conn() as conn: async with conn.transaction(): master = await Replica(conn).master() if not master: return api_error('No master set') file = await get_file_by_id(request) storage = await Storage.get(master) await storage.delete(file) await Files(conn).delete(file['id'], master['id']) return UJSONResponse({})
async def post(self, request): data = await request.json() async with get_conn() as conn: try: data = await Users(conn).login(data['username'], data['password']) except ApiException as e: return UJSONResponse({'error': str(e)}, status_code=400) token = gen_token(data) resp = UJSONResponse({ 'id': data['id'], 'admin': data['admin'], 'token': token, }) return resp
async def post(self, request): is_admin(request) data = await request.json() async with get_conn() as conn: async with conn.transaction(): replica = Replica(conn) result = await replica.add(data['type'], data) master = await replica.master() if not master: master_id = result await replica.set_master(result) else: master_id = master['id'] await ReplicaFiles(conn).add_replica(result, master_id) return UJSONResponse({'id': result})
async def store(ds, data): global initialized if not initialized: await setup() initialized = True async with get_conn() as conn: try: ds = get_meta(ds) async with conn.transaction(): master = await Replica(conn).master() hsh = hash_file(data) file_data = { 'name': str(uuid.uuid4()) + '.dcm', 'master': master['id'], 'hash': hsh, } file_data.update(ds) f = await Files(conn).insert_or_select(file_data) storage = await Storage.get(master) ret = await storage.copy(data, f) await ReplicaFiles(conn).add( master['id'], [{ 'id': f['id'], **ret }], ) except Exception as e: print(traceback.format_exc()) await Log(conn).add(str(e)) return False return True
async def get(self, request): data = [int(i) for i in request.query_params['ids'].split(',')] files = [] async with get_conn() as conn: master = await Replica(conn).master() storage = await Storage.get(master) for d in data: file = await ReplicaFiles(conn).get_file_from_replica( master['id'], d) tmp = await storage.fetch(file) file['tmp'] = tmp file['arcname'] = '_'.join([ str(file['patient_id']), str(file['study_id']) or 'empty', str(file['series_number']) or 'empty', file['name'], ]) files.append(file) tmp = uuid4() zipname = f'/tmp/{tmp}.zip' await BackgroundTask(zip_files, files, zipname)() return FileResponse(zipname)
def crawling(self, url, uid): conn = get_conn() print('启动爬取任务: ') douyin = Douyin(conn) sum = 0 id = 1 data = [] for item in douyin.getPost(url): info, nickname = handleItem(item) data.append(item) cur = len(item.get('aweme_list', [])) sum += cur self.update_state(state='PROGRESS', meta={ 'result': info, 'status': '爬取中...', 'end': 0, 'id': id, 'cur': cur, 'nickname': nickname }) id += 1 result = ''' 共爬取{}条数据 '''.format(sum) print('爬取完成,清除该任务', uid) cache.delete(uid) return { 'result': [], 'status': '爬取结束!!!' + result, 'end': 1, 'id': id, 'cur': 0, 'data': data, 'nickname': nickname }
async def get_patient_by_id(request): patient_id = get_id(request) async with get_conn() as conn: return await Patient(conn).get_extra(patient_id)
async def do_sync(): global work # index unindexed files async with get_conn() as conn: files = await Files(conn).unindexed() for f in files: if not work: return await index_file(f) replicas = {} master = {} async with get_conn() as conn: data = await Replica(conn).get_all() for d in data: if d['master']: master = dict(d) replicas[d['id']] = dict(d) for r in replicas.values(): storage = await Storage.get(r) r['storage'] = storage if r['master']: master['storage'] = storage for r in replicas.values(): if not work: return if r['status'] == 'indexing': await index(r) if r['master']: continue offset = 0 while True: async with get_conn() as conn: data = await ReplicaFiles(conn).get_for_sync(r, offset) if len(data): await Replica(conn).update_status(r['id'], 'syncing') for d in data: if not work: return if d['status'] == Status.deleted: rf = await ReplicaFiles(conn).get_file_from_replica( r['id'], d['file_id'], ) await r['storage'].delete(rf) await ReplicaFiles(conn).delete(r['id'], d['file_id']) else: rfm = await ReplicaFiles(conn).get_file_from_replica( master['id'], d['file_id'], ) local_loc = await master['storage'].fetch(rfm) ret = await r['storage'].copy(local_loc, rfm) await ReplicaFiles(conn).index(r['id'], { 'id': d['file_id'], **ret }) if len(data) < 1000: if len(data): async with get_conn() as conn: await Replica(conn).update_status(r['id'], 'ok') break offset += 1000
async def get(self, request): is_admin(request) async with get_conn() as conn: replicas = await Replica(conn).get_all() return UJSONResponse({'data': replicas})
def get_newrank(): conn = get_conn() ranker = NewRank(conn) ranker.run()
import random from db.conn import get_conn import pickle import threading lock = threading.Lock() CACHE_CONFIG = { # try 'filesystem' if you don't want to setup redis 'CACHE_TYPE': 'redis', 'CACHE_REDIS_URL': REDIS_URL, } cache = Cache() cache.init_app(server, config=CACHE_CONFIG) # server.db = Mymysql() server.db = get_conn() #stars, posts, users, comments, urltasks def random_time(len): return int(random.random()*len) MAX_EXPIRES = 6*30*24*60*60 @cache.memoize(timeout=random_time(300)) def global_store_rows(table): # simulate expensive query print('正在更新数量数据...') # engine = create_engine(MYSQL_URL, pool_recycle=2400, pool_size=20, max_overflow=10) engine = server.db.engine if table == 'urltasks':