def test_search(self): res = self.store_manager.search("Banana") res = jsons.loads(res) self.assertEqual(len(res.keys()), 5) res = self.store_manager.search(key_words=["Fruits"]) res = jsons.loads(res) self.assertEqual(len(res), 5)
def get_state(self): device_info_method = GetDeviceInfoMethod(None) logger.debug(f"Device info method: {jsons.dumps(device_info_method)}") dim_encrypted = self.tp_link_cipher.encrypt( jsons.dumps(device_info_method)) logger.debug(f"Device info method encrypted: {dim_encrypted}") secure_passthrough_method = SecurePassthroughMethod(dim_encrypted) logger.debug(f"Secure passthrough method: {secure_passthrough_method}") request_body = jsons.loads(jsons.dumps(secure_passthrough_method)) logger.debug(f"Request body: {request_body}") response = Http.make_post_cookie(f"{self.url}?token={self.token}", request_body, {'TP_SESSIONID': self.cookie_token}) resp_dict: dict = response.json() logger.debug(f"Device responded with: {resp_dict}") self.__validate_response(resp_dict) decrypted_inner_response = jsons.loads( self.tp_link_cipher.decrypt(resp_dict['result']['response'])) logger.debug(f"Device inner response: {decrypted_inner_response}") self.__validate_response(decrypted_inner_response) return decrypted_inner_response['result']
def test_add_product_to_store(self): # check when store does not exit res = self.store_manager.add_product_to_store(-1, "moshe", "p", 1, "s", "e", 10) res = jsons.loads(res) x = 5 self.assertFalse(res['error']) # add some products for product in self.products: self.test_open_store() res = self.store_manager.add_product_to_store( self.idx - 1, "moshe" + str(self.idx - 1), *product) res = jsons.loads(res) self.assertTrue(res['error']) # check if added successfully res = self.store_manager.get_store(self.idx - 1).inventory.products.keys() x = 5 self.assertNotIn(product[0], [*res]) # check add product without permission product = self.products[0] res = self.store_manager.add_product_to_store( self.idx - 1, "not moshe" + str(self.idx - 1), *product) res = jsons.loads(res) self.assertTrue(res)
def change_state(self, new_state: int, terminal_uuid: str): new_state_bool = True if new_state == 1 else False logger.debug( f"Will change state to {new_state_bool}, terminal uuid: {terminal_uuid}" ) device_info_params = DeviceInfoParams() device_info_params.set_device_on(new_state_bool) logger.debug(f"Device info params: {jsons.dumps(device_info_params)}") device_info_method = SetDeviceInfoMethod(device_info_params) device_info_method.set_request_time_milis(time()) device_info_method.set_terminal_uuid(terminal_uuid) logger.debug(f"Device info method: {jsons.dumps(device_info_method)}") dim_encrypted = self.tp_link_cipher.encrypt( jsons.dumps(device_info_method)) logger.debug(f"Device info method encrypted: {dim_encrypted}") secure_passthrough_method = SecurePassthroughMethod(dim_encrypted) logger.debug(f"Secure passthrough method: {secure_passthrough_method}") request_body = jsons.loads(jsons.dumps(secure_passthrough_method)) logger.debug(f"Request body: {request_body}") response = Http.make_post_cookie(f"{self.url}?token={self.token}", request_body, {'TP_SESSIONID': self.cookie_token}) resp_dict: dict = response.json() logger.debug(f"Device responded with: {resp_dict}") self.__validate_response(resp_dict) decrypted_inner_response = jsons.loads( self.tp_link_cipher.decrypt(resp_dict['result']['response'])) logger.debug(f"Device inner response: {decrypted_inner_response}") self.__validate_response(decrypted_inner_response)
def test_exception_wrong_json(self): with self.assertRaises(DecodeError): jsons.loads('{this aint no JSON!') try: jsons.loads('{this aint no JSON!') except DecodeError as err: self.assertEqual(None, err.target) self.assertEqual('{this aint no JSON!', err.source)
def loadConfig(): global saidas, entradas, regras with open("config.json") as configJson: dadosConfig = json.load (configJson) for entrada in dadosConfig['entradas']: entradas.append(jsons.loads(jsons.dumps(entrada), Variavel)) for saida in dadosConfig['saidas']: saidas.append(jsons.loads(jsons.dumps(saida), Variavel)) for regra in dadosConfig['regras']: regras.append(jsons.loads(jsons.dumps(regra), Regra))
def test_update_product(self): for store_id in self.store_manager.stores.keys(): res = self.store_manager.update_product( store_id, "test_owner" + str(store_id), "not real product", "price", 20) res = jsons.loads(res) self.assertFalse(res['ans']) res = self.store_manager.update_product( store_id, "test_owner" + str(store_id), "Apple", "price", 20) res = jsons.loads(res) self.assertTrue(res)
def run_jsons(): """ """ glyph = [ OrientedPoint(0, 2, 3), OrientedPoint(3, -1, 3), OrientedPoint(7, 0, 3.1415), OrientedPoint(2, 0.54, 3), ] glyph_encoded = jsons.dumps(glyph) print(f"\nglyph_encoded:\n{glyph_encoded}") glyph_decoded = jsons.loads(glyph_encoded, Glyph) print(f"\nglyph_decoded:\n{glyph_decoded}") spline = [ glyph, [ OrientedPoint(2, -1, 3), OrientedPoint(0.7436723, 0, 3.1511), OrientedPoint(math.e, 0.21, 3), ], ] spline_encoded = jsons.dumps(spline) print(f"\nspline_encoded:\n{spline_encoded}") spline_decoded = jsons.loads(spline_encoded, Spline) print(f"spline_decoded:\n{spline_decoded}") an_spline = { "source_file": "splne.txt", "total_points": sum(map(len, spline)), "spline": spline, } an_spline_encoded = jsons.dumps(an_spline, indent=4) print(f"\nan_spline_encoded:\n{an_spline_encoded}") an_spline_decoded = jsons.loads(an_spline_encoded, Dict[str, Union[Spline, Any]]) print(f"an_spline_decoded:\n{an_spline_decoded}") li = LigatureInfo(glyph, spline) print(f"li: {li}") li_enc = jsons.dumps(li, indent=4) print(f"\nli_enc:\n{li_enc}") li_dec = jsons.loads(li_enc, cls=LigatureInfo) print(f"\nli_dec:\n{li_dec}") jsons.set_serializer(serializer_oriented_point, OrientedPoint) li = LigatureInfo(glyph, spline) print(f"\nCustom serializer_oriented_point li: {li}") li_enc = jsons.dumps(li, indent=4) print(f"\nli_enc:\n{li_enc}") li_dec = jsons.loads(li_enc, cls=LigatureInfo) print(f"\nli_dec:\n{li_dec}")
def populate_inputs_file(): client = Client() response = client.get("/cc/info/actions") data = jsons.loads(response.content)["actions"] # print(data) names = [i["name"] for i in data] # print(names) for name in names: try: outputInputs( jsons.loads( client.post("/cc/getInputs/{}".format(name), {}).content)) except: pass
def convert_file_version(path: str, file_name: str, version_to_convert_to: str) -> None: if file_name not in CONVERTABLE_METADATA_FILES: return try: with open(path, 'r') as f: s = f.read().replace('\n', '') loaded_dict = jsons.loads(s) except jsons.exceptions.DecodeError: raise ValueError( f'Error when trying to load file: {path}.\n File contents:\n {s}') try: converted_dict: Dict[str, Any] = convert_dict(loaded_dict, file_name, version_to_convert_to) except VersionError as ve: raise VersionError( f"The version of the downloaded models " f"cannot be lower than the version of the library ({version_to_convert_to}). " f"Please contact the development team if you see this error." ) from ve try: tp = LMTrainingMetrics if file_name == 'metrics' else LMTrainingConfig converted_config: Union[ LMTrainingMetrics, LMTrainingConfig] = load_config_or_metrics_form_dict( converted_dict, tp) except jsons.exceptions.DeserializationError as ex: raise ValueError( f"Could not deserialize: {converted_dict}\n Path {path}") from ex dump_to_file(converted_config, path)
def south_north_fund(): url = "http://money.finance.sina.com.cn/quotes_service/api/jsonp.php/var%20liveDateTableList=/HK_MoneyFlow.getDayMoneyFlowOtherInfo" headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.80 Safari/537.36' } data = {'ts': 2342354353, '_': CommonUtil.get_timestamp()} resp = requests.get(url, data, headers=headers) html = resp.text live_date_table_list = re.findall("var liveDateTableList=\((.*?)\)", html) live_data = live_date_table_list[0] live_json = jsons.loads(live_data) fund_link = "http://stock.finance.sina.com.cn/hkstock/view/money_flow.php" title = "北上资金:%s 南下资金:%s" % (live_json["north_inFlow_total"], live_json["source_inFlow_total"]) description = "沪股通%s 深股通%s,港股通(沪)%s 港股通(深)%s" % \ (live_json["south_hk_sh"]["daliyInflow"], live_json["south_hk_sz"]["daliyInflow"], live_json["north_sh"]["daliyInflow"], live_json["north_sz"]["daliyInflow"]) log.info(title, description) link_payload = UrlLinkPayload(description=description, title=title, url=fund_link, thumbnailUrl="") url_link = UrlLink(payload=link_payload) return url_link
def put_existing_pet(self, pet: dict) -> Type[PetDto]: url = self.base_url + self.add_or_update response = requests.put(url, json=pet) if response.ok: return jsons.loads(response.content, PetDto, key_transformer=jsons.KEY_TRANSFORMER_SNAKECASE)
def search(): message = request.get_json() print(message) search_res = stores_manager.search(search_term=message['product_name']) print(search_res) answer = jsons.loads(search_res) return jsonify({'error': False, "data": answer})
def load_jsonl_file_and_build_lookup( data_jsonl: str, cls: Type[_TDatum], primary_key_getter: Callable[[_TDatum], _TPrimaryKey], secondary_key_getter: Callable[[_TDatum], _TSecondaryKey], unit: str = " items", verbose: bool = True, ) -> Dict[_TPrimaryKey, Dict[_TSecondaryKey, _TDatum]]: """Loads a jsonl file of serialized dataclass objects and returns the lookup with a primary key and a secondary key.""" if verbose: desc = f"Reading {cls} from {data_jsonl}" else: desc = None data_lookup: Dict[_TPrimaryKey, Dict[_TSecondaryKey, _TDatum]] = defaultdict(dict) with open(data_jsonl) as fp: for line in tqdm(fp, desc=desc, unit=unit, dynamic_ncols=True, disable=not verbose): datum = jsons.loads(line.strip(), cls) primary_key = primary_key_getter(datum) if primary_key not in data_lookup: data_lookup[primary_key] = {} data_lookup[primary_key][secondary_key_getter(datum)] = datum return data_lookup
def test_stores_details(self): self.store_manager.add_purchase_product_policy(1, "test_owner1", 3, 8) stores_description = jsons.loads( self.store_manager.get_stores_description()) self.assertTrue(stores_description['ans']) stores_description = stores_description['stores_description'] self.assertEqual(len(stores_description.keys()), 5)
def on_rpc_request(ch, method, props, body): try: decoded_body = body.decode() args = jsons.loads(decoded_body) print({ 'action': 'handle_rmq_rpc', 'event_name': event_name, 'args': args, 'exchange': exchange, 'routing_key': queue, 'correlation_id': props.correlation_id }) result = rpc_handler(*args) body = jsons.dumps(result) # send reply ch.basic_publish(exchange='', routing_key=props.reply_to, properties=pika.BasicProperties( correlation_id=props.correlation_id), body=body.encode()) print({ 'action': 'send_rmq_rpc_reply', 'event_name': event_name, 'args': args, 'result': result, 'exchange': exchange, 'routing_key': queue, 'correlation_id': props.correlation_id }) finally: if not auto_ack: ch.basic_ack(delivery_tag=method.delivery_tag)
def login(self, username: str, login_username: str, password): print("here!") logger.log("user %s called login with login_username:%s", username, login_username) if self.security.verify_password(login_username, password): logged_in, data = self.user_manager.login(username, login_username) print(data) if logged_in is True: user = jsons.loads(data['data']) managed_stores = [] print(user) for store in user['managed_stores']: store_description = self.stores_manager.get_store_description( store['store_id']) managed_stores.append(store_description) user['managed_stores'] = managed_stores print("******************") print(user) return {'error': not logged_in, 'data': user} return {'error': True, 'error_msg': data['error_msg']} else: print("!!!!!!!!!!!") return { 'error': True, 'error_msg': 'incorrect password. Try again.' }
def get_page_data(self, wikiid: str, page: str, revision: int = None) -> Union[None, CachedRevision]: page = self.slugify(page) dir_name = path.join(self.data_directory, wikiid, page) file_content = None revision_file = path.join(dir_name, f"{revision}.json") # it's better to ask forgiveness than permission try: file_content = self.db.get_blob( revision_file) if revision else None except NotFound: pass try: if not file_content: # try to catch the latest # need to get latest revision files, this can be very expensive if we have lot of revisions files = self.db.list_blobs(dir_name, delimiter=None) if files: files.sort( key=lambda f: int(''.join(filter(str.isdigit, f)))) cached_file_name = path.join(dir_name, files.pop()) file_content = self.db.get_blob(cached_file_name) except NotFound: pass if file_content: return jsons.loads(file_content, CachedRevision)
def test_add_visiable_discount(self): self.test_add_product_to_store() res = self.store_manager.add_visible_product_discount( self.idx + 1, "moshe", datetime.datetime(2018, 6, 1), datetime.datetime(2020, 5, 17)) res = jsons.loads(res) x = 5
def _get_transformation_dict(version: str) -> Dict[str, str]: path_to_tranformation_string = os.path.join(project_dir, 'converters', 'forward', f'{version}.jq') with open(path_to_tranformation_string, 'r') as f: serialized_transformation_dict = f.read() transformation_dict = jsons.loads(serialized_transformation_dict) return transformation_dict
def find_most_similar_config(percent_prefix: str, path_to_dataset: str, current_config: Union[LMTrainingConfig, ClassifierTrainingConfig]): config_diff_dict = defaultdict(list) logger.debug(f"Finding the most similar config in {path_to_dataset}") dirpath, dirnames, _ = next(os.walk(path_to_dataset)) for dirname in dirnames: if not dirname.startswith(percent_prefix): continue file_path = os.path.join(dirpath, dirname, PARAM_FILE_NAME) if os.path.exists(file_path): with open(file_path, 'r') as f: json_str = f.read() logger.debug(f'Loading config from {file_path}') config = jsons.loads(json_str, type(current_config)) config_diff = deepdiff.DeepDiff(config, current_config) if config_diff == {}: return dirname, {} else: n_changed_params = (len(config_diff[DEEPDIFF_ADDED]) if DEEPDIFF_ADDED in config_diff else 0) \ + (len(config_diff[DEEPDIFF_CHANGED]) if DEEPDIFF_CHANGED in config_diff else 0) \ + (len(config_diff[DEEPDIFF_REMOVED]) if DEEPDIFF_REMOVED in config_diff else 0) config_diff_dict[n_changed_params].append((dirname, config_diff)) if not config_diff_dict: return None, deepdiff.DeepDiff({}, current_config) else: return config_diff_dict[min(config_diff_dict)][-1]
def load(cls): if not os.path.isfile(CONFIG_PATH): logging.warning(f'{CONFIG_PATH} not exists.') return with open(CONFIG_PATH, 'r', encoding='utf-8') as f: return jsons.loads(f.read(), Configuration)
def main( dataflow_dialogues_jsonl: str, num_context_turns: int, min_turn_index: int, include_program: bool, include_agent_utterance: bool, include_described_entities: bool, onmt_text_data_outbase: str, ) -> None: fps = OnmtTextDatum.create_output_files(onmt_text_data_outbase) for line in tqdm(open(dataflow_dialogues_jsonl), unit=" dialogues"): dialogue: Dialogue dialogue = jsons.loads(line.strip(), Dialogue) for onmt_text_datum in create_onmt_text_data_for_dialogue( dialogue=dialogue, num_context_turns=num_context_turns, min_turn_index=min_turn_index, include_program=include_program, include_agent_utterance=include_agent_utterance, include_described_entities=include_described_entities, ): for field_name, field_value in dataclasses.asdict( onmt_text_datum).items(): fp = fps[field_name] fp.write(field_value) fp.write("\n") for _, fp in fps.items(): fp.close()
def fetch_convertion_json(version: str) -> Dict[str, str]: json_url = f'{CONVERTERS_URL}/{version}.jq' response = requests.get(json_url) if response.status_code == 200: s = response.text.replace('\n', ' ').strip() else: raise ValueError(f'Cannot get version converter from {json_url}') return jsons.loads(s)
def test_appoint_manager_to_store(self): self.test_open_store() self.assertFalse( jsons.loads( self.store_manager.appoint_manager_to_store( self.idx + 1, "moshe" + str(self.idx - 1), "Amit"))) self.assertTrue( jsons.loads( self.store_manager.appoint_manager_to_store( self.idx - 1, "moshe" + str(self.idx - 1), "Amit"))) self.assertIn( "Amit", self.store_manager.get_store(self.idx - 1).store_managers.keys()) self.assertFalse( jsons.loads( self.store_manager.appoint_manager_to_store( self.idx - 1, "not moshe" + str(self.idx - 1), "Amit")))
def add_visible_discount(): message = request.get_json() print(message) answer = stores_manager.add_visible_discount_to_product( message['store_id'], message['username'], message['start_date'], message['end_date'], message['percent'], message['products']) data = jsons.loads(answer) return jsonify(data)
def test_marshal_unmarshal() -> None: foo = Foo("12") name = type(foo).__name__ clazz = globals()[name] js = jsons.dumps(foo) again = jsons.loads(js, cls=clazz) assert DeepDiff(foo, again, truncate_datetime="second") == {} assert 4 == 4
def on_request(ch, method, props, body): decoded_body = body.decode() log_info('HANDLE EVENT', queue=queue, body=decoded_body) msg = jsons.loads(decoded_body) result = handler(msg) if not auto_ack: ch.basic_ack(delivery_tag=method.delivery_tag) return result
def test_add_purchase_store_policy(self): res = self.store_manager.add_purchase_store_policy( 1, "test_owner1", 3, 8) res = jsons.loads(res) self.assertTrue(res['ans']) res = self.store_manager.add_purchase_store_policy( 1, "test_owner1", None, None) res = jsons.loads(res) self.assertFalse(res['ans']) res = self.store_manager.add_purchase_store_policy( 1, "test_owner1", 1, None) res = jsons.loads(res) self.assertTrue(res['ans']) res = self.store_manager.add_purchase_store_policy( 1, "test_owner2", 1, 2) res = jsons.loads(res) self.assertFalse(res['ans'])
def run(self, commit: Commit) -> RefactoringMinerOutput: url = get_full_github_url(commit.owner, commit.repository) cmd = ["./RefactoringMiner", "-gc", url, commit.sha, "1000"] result = subprocess.run(cmd, cwd=self.path, capture_output=True, check=True) output = result.stdout.decode() return jsons.loads(output, cls=RefactoringMinerOutput)