def load_initialized_vectors(self): self.entities = np.array(loads(self.redis_con.get('entities'))) entity_vectors = iter_mget(self.redis_con, [f'{entity}_v' for entity in self.entities]) self.entity_vectors = np.stack( [np.fromstring(v, dtype=self.np_dtype) for v in entity_vectors]) self.relations = np.array(loads(self.redis_con.get('relations'))) relation_vectors = iter_mget( self.redis_con, [f'{relation}_v' for relation in self.relations]) self.relation_vectors = np.stack( [np.fromstring(v, dtype=self.np_dtype) for v in relation_vectors])
async def get_video(self, data_params: dict, path: str = None): data = loads(await post(Urls.queryStudentTrianList, headers=self.headers, data={ "subsectionId": data_params["subsectionId"], "chapterId": data_params["chapterId"] }))["data"] if self.is_url: print(data["videoData"]["videoList"][0]["value"]) else: path = is_mkdir(path or data["subsectionName"]) data = data["videoData"] file_name = data["name"] async with self.semaphore: logger.download("开始下载:" + file_name) try: await save(await get(data["videoList"][0]["value"]), f"{path}/{file_name}") except TimeoutError: logger.timeout(file_name + "链接超时,正在尝试重新下载...") await self.get_video(data_params, path) else: logger.completed("下载完成:" + file_name) logger.completed("所在目录:" + f"{self.save_path}\\{path}")
def basic_logout(self, logout_url): response = requests.post(logout_url, data = None, headers = self.headers, verify = False) status = {} status['status'] = response.status_code content = response.text content = loads(content) return status, content
def get_task_result(self, task_id): """Just return task result by id. Does not remove task id with result from queue. """ if task_id not in self: raise NoSuchIdExists("{0}".format(task_id)) result = self._conn.get(self.queue_item_key(task_id)) return loads(result)
def basic_login(self): body = urllib.urlencode({"grant_type": "password", "email": self.email, "password": self.password, "client_secret": self.client_secret, "client_key": self.client_key}) response = requests.post(self.oauth_url, data = body, headers = self.headers, verify = False) status = {} status['status'] = response.status_code content = response.text content = loads(content) return status, content
def do_snapshot_download(self, args): '''Download a SNAPSHOT''' snapshot = args.snapshot body = self.client.snapshot_download(snapshot=snapshot) result = utils.loads(body) result.pop() if len(result) == 0: print('Snapshot %s does not exist.' % snapshot) return -1 uri=result.pop() utils.download(uri)
def load_initialized_vectors(self): self.entities = np.array(loads(self.redis_con.get('entities'))) entity_vectors = iter_mget(self.redis_con, [f'{entity}_v' for entity in self.entities]) self.entity_vectors = np.stack( [np.fromstring(v, dtype=self.np_dtype) for v in entity_vectors]) self.relations = np.array(loads(self.redis_con.get('relations'))) embedding_clusters = iter_mget( self.redis_con, [f'{relation}_cv' for relation in self.relations]) self.embedding_clusters = np.stack([ np.fromstring(v, dtype=self.np_dtype) for v in embedding_clusters ]) weights_clusters = iter_mget( self.redis_con, [f'{relation}_wv' for relation in self.relations]) self.weights_clusters = np.stack( [np.fromstring(v, dtype=self.np_dtype) for v in weights_clusters]) size_clusters = iter_mget( self.redis_con, [f'{relation}_s' for relation in self.relations]) self.size_clusters = np.stack( [np.fromstring(v, dtype=np.int32) for v in size_clusters])
async def get_subsection(self, data: dict): params = {"chapterId": data["id"], **self.params} data = loads(await get(Urls.querySubsectionListByChapterId, headers=self.headers, params=params))["data"]["subsectionList"] await wait([ self.loop.create_task( self.get_video({ **params, "subsectionId": i["id"] })) for i in data ], timeout=self.timeout)
async def get_chapter(self): """ 获取 queryChapterBytermId 中的数据 """ try: print("\n下载路径:", self.save_path) data = loads(await get(Urls.queryChapterBytermId, headers=self.headers, params=self.params))["data"]["chapterList"] await wait( [self.loop.create_task(self.get_subsection(i)) for i in data], timeout=self.timeout) except ServerDisconnectedError: logger.error("Cookie值可能已失效,请重新登录!")
def get_task_result_nostore(self, task_id): """Return task result by id, and remove task id and result from queue. """ if task_id not in self: raise NoSuchIdExists("{0}".format(task_id)) new_ids = [id_ for id_ in self.all_task_ids if task_id != id_] self._conn.delete(self.queue_key()) for id_ in new_ids: self._conn.rpush(self.queue_key(), id_) item_key = self.queue_item_key(task_id) result = self._conn.get(item_key) self._conn.delete(item_key) return loads(result)
def execute_request(self, path, method = 'GET', **params): params = urlencode(params) if method in ['GET', 'DELETE']: req_url = "?".join([path, params]) body = None else: req_url = path body = params.encode("utf8") req_url = "/".join([self.api_url, req_url]) if self.token: self.headers['Authorization'] = "bearer:%s" % self.token func = getattr(requests, method.lower()) response = func(req_url, data = body, headers = self.headers, verify = False) status = {} status['status'] = response.status_code content = response.text data = loads(content) return status, data
def test(self): with pytest.raises(ValueError): utils.RingBuffer(tag="tag", size=0) with pytest.raises(ValueError): utils.RingBuffer(tag="tag", size=1000) utils.RingBuffer(tag="tag", size=999) buf = utils.RingBuffer(tag="tag") assert list(buf.get()) == [] self.execute_tasks("default") yield buf.clear() buf.put() self.execute_tasks("default") assert list(buf.get()) == [] self.execute_tasks("default") buf.put(0) self.execute_tasks("default") buf.put(1, 2) self.execute_tasks("default") assert list(buf.get()) == [2, 1, 0] self.execute_tasks("default") queue = taskqueue.Queue("ringbuffer") buf = utils.RingBuffer(tag="tag", size=1) assert list(buf.get()) == [2] assert queue.fetch_statistics().tasks == 3 assert [ utils.loads(task.payload) for task in queue.lease_tasks(0, 1000) ] == [0, 1, 2] self.execute_tasks("default") assert queue.fetch_statistics().tasks == 1 assert list(buf.get()) == [2] self.execute_tasks("default") yield buf.clear() assert queue.fetch_statistics().tasks == 0 assert list(buf.get()) == [] self.execute_tasks("default") cache = buf._get._cache key = buf._get._key args = (buf.queue_name, buf.tag, buf.size) yield utils.memoize_clear(cache, key, args, use_memcache=True)
def test(self): with pytest.raises(ValueError): utils.RingBuffer(tag="tag", size=0) with pytest.raises(ValueError): utils.RingBuffer(tag="tag", size=1000) utils.RingBuffer(tag="tag", size=999) buf = utils.RingBuffer(tag="tag") assert list(buf.get()) == [] self.execute_tasks("default") yield buf.clear() buf.put() self.execute_tasks("default") assert list(buf.get()) == [] self.execute_tasks("default") buf.put(0) self.execute_tasks("default") buf.put(1, 2) self.execute_tasks("default") assert list(buf.get()) == [2, 1, 0] self.execute_tasks("default") queue = taskqueue.Queue("ringbuffer") buf = utils.RingBuffer(tag="tag", size=1) assert list(buf.get()) == [2] assert queue.fetch_statistics().tasks == 3 assert [utils.loads(task.payload) for task in queue.lease_tasks(0, 1000)] == [0, 1, 2] self.execute_tasks("default") assert queue.fetch_statistics().tasks == 1 assert list(buf.get()) == [2] self.execute_tasks("default") yield buf.clear() assert queue.fetch_statistics().tasks == 0 assert list(buf.get()) == [] self.execute_tasks("default") cache = buf._get._cache key = buf._get._key args = (buf.queue_name, buf.tag, buf.size) yield utils.memoize_clear(cache, key, args, use_memcache=True)
def JSON(self): return utils.loads(open(self.config_file_name).read())
def dequeue(self): task_id = self._conn.lpop(self.queue_key()) item_key = self.queue_item_key(task_id) result = self._conn.get(item_key) self._conn.delete(item_key) return task_id, loads(result)
def load(self): from utils import load, loads with self.get_stream('rb') as stream: # TODO: Don`t load the whole data. unpickled = loads(stream.read()) return unpickled
def vlans(self): return [int(x) for x in utils.cut(utils.loads(self.vlan_list()))]
def images(self): return utils.cut(utils.loads(self.image_list()))