def add_task(): with servicer.db_fn() as db: params = { 'id': uuid(), 'user': openid, 'url': 'http://example.com', 'article_id': uuid(), } db.execute( 'INSERT INTO tasks VALUES ($id, $user, $url, $article_id)', params)
def test_capture(servicer, ctx, login, pool, monkeypatch): class WorkerStub: def Crawl(self, req): for url in req.urls: if not 'fail' in url: yield wo.CrawlResponse(url=url) monkeypatch.setattr(servicer, 'worker_stub', lambda: WorkerStub()) results = [] canceled = False def inner(): for tasks in servicer.GetActiveTasks(co.Empty(), ctx): if canceled: return results.append(tasks.tasks) pool.submit(inner) assert wait_until(lambda: len(results) == 1) servicer.Capture( sc.CaptureRequest(article_id=uuid(), urls=['url1', 'url2', 'url3_fail', 'url4']), ctx) assert wait_until(lambda: len(results) == 6) tasks = results[5] print(tasks) assert len(tasks) == 1 assert 'fail' in tasks[0].url canceled = True sem = servicer.task_event.listeners.pop() sem.release()
def _async_action(): successful_urls = set() for res in worker.Crawl(wo.CrawlRequest(urls=urls)): url = res.url task_id = tasks[url] content = res.content logging.info(f'Capture succeeded for {url} of type {content.type}') timestamp = unix_time() _id = uuid() ledger_key = ledger.add(content.hash) with self.db_fn() as db: # If the snapshot already exists(because this snapshot has multiple pieces of data attached), ignore. db.execute( "INSERT OR IGNORE INTO snapshots VALUES (?,?,?,?,?,?)", (_id, article_id, url, timestamp, False, None), ) db.execute( "INSERT INTO data VALUES (?,?,?,?,?)", (_id, content.type, content.data.decode(), content.hash, ledger_key), ) db.execute('DELETE FROM tasks WHERE id = ?', (task_id,)) successful_urls.add(url) self.task_event.notify() # Worker hang up for url, task_id in tasks.items(): if url not in successful_urls: logging.info(f'Capture failed for {url}') self._add_notification(openid, f"拍摄快照失败:{url}", is_error=True) with self.db_fn() as db: db.execute('DELETE FROM tasks WHERE id = ?', (task_id,)) self.task_event.notify()
def _add_notification(self, openid, msg, is_error): with self.db_fn() as db: params = { 'id': uuid(), 'user': openid, 'type': 1 if is_error else 0, 'created_at': unix_time(), 'has_read': 0, 'content': msg, } db.execute('INSERT INTO notifications VALUES ($id, $user, $type, $created_at, $has_read, $content)', params)
def CreateArticle(self, request, context): openid = context.openid title = request.title _id = uuid() timestamp = unix_time() with self.db_fn() as db: db.execute( "INSERT INTO articles VALUES (?,?,?,?)", (_id, openid, timestamp, title) ) return co.Article(id=_id, title=title, created_at=timestamp)
def test_multithread(auth): N = 32 tokens = [utils.uuid() for _ in range(N)] openids = [utils.uuid() for _ in range(N)] threads = [] for token in tokens: th = start_thread(lambda: auth.request_login(token)) threads.append(th) random.shuffle(tokens) for token, openid in zip(tokens, openids): th = start_thread(lambda: auth.confirm_login(token, openid)) threads.append(th) for th in threads: th.join() assert len(auth.pending_tokens) == 0 assert len(auth.confirmed_tokens) == N for token, openid in zip(tokens, openids): assert auth.get_openid(token) == openid
def openid(): return utils.uuid()
def token(): return utils.uuid()