def test_update_in(): assert update_in({"a": 0}, ["a"], inc) == {"a": 1} assert update_in({"a": 0, "b": 1}, ["b"], str) == {"a": 0, "b": "1"} assert (update_in({ "t": 1, "v": { "a": 0 } }, ["v", "a"], inc) == { "t": 1, "v": { "a": 1 } }) # Handle missing element one deep: assert update_in({}, ["z"], str) == {"z": "None"} # Same semantics as Clojure, raises an error if going deeper than # one level into a dict which doesn't have the initial key: assert raises(AttributeError, lambda: update_in({}, ["z", "q"], str)) # Verify immutability: d = {'x': 1} oldd = d update_in(d, ['x'], inc) assert d is oldd
def update_config_data(name, value): """ Update config value to existing configuration :param str name: Name is a . separated path to a configuration value stored in a nested dictionary. :param value: Value to be updated """ global _config_data _config_data = update_in(_config_data, name.split('.'), lambda _: value)
def _predict_action(state): mine = shapely.geometry.Polygon(state['desc']['mine_shell']) obstacles = [shapely.geometry.Polygon(sh) for sh in state['desc']['obstacle_shells']] obstacle = shapely.ops.unary_union(obstacles) situable = mine.difference(obstacle) wrappeds = [shapely.geometry.Polygon(sh) for sh in state['wrapped_shells']] wrapped = shapely.ops.unary_union(wrappeds) not_wrapped = situable.difference(wrapped) if not_wrapped.area < 1.0: return None, state last_move = state.get('last_move', 'W') for move in [last_move, 'W', 'S', 'A', 'D']: proj = _move_projection_center(state['worker']['pos'], move) if not_wrapped.contains(proj): return move, tzd.dissoc(state, 'path_pts_to_not_wrapped') if not state.get('path_pts_to_not_wrapped'): target_tile = tzf.thread_first(not_wrapped.representative_point(), _shapely_point2pt, _snap_to_tile) print('Finding shortest path from tile {} to {}'.format(state['worker']['pos'], target_tile)) if tzd.get_in(['cache', 'incidence_m'], state) is None: incidence_m = _incidence_matrix(situable) state = tzd.assoc_in(state, ['cache', 'incidence_m'], incidence_m) else: incidence_m = state['cache']['incidence_m'] target_vertex_ind = _incidence_ind(target_tile[0], target_tile[1], x_size=math.ceil(situable.bounds[2])) path_dists, path_predecessors = sp.sparse.csgraph.shortest_path(csgraph=incidence_m, directed=False, return_predecessors=True, unweighted=True, indices=target_vertex_ind) start_vertex_ind = _incidence_ind(state['worker']['pos'][0], state['worker']['pos'][1], x_size=math.ceil(situable.bounds[2])) path_inds = _path_inds(path_predecessors, start_vertex_ind) path_pts = [_incidence_pt(ind, x_size=math.ceil(situable.bounds[2])) for ind in path_inds] print('Found path: {}'.format(path_pts)) state = tzd.assoc(state, 'path_pts_to_not_wrapped', path_pts) path_move = _projection_pt_move(state['worker']['pos'], state['path_pts_to_not_wrapped'][0]) if path_move is not None: return path_move, tzd.update_in(state, ['path_pts_to_not_wrapped'], lambda p: p[1:]) return 'Z', state
def send(image_id): lc_type = self.group_config["launchConfiguration"]["type"] if image_id is not None and lc_type == "launch_server": self.group_config = update_in( self.group_config, ["launchConfiguration", "args", "server", "imageRef"], lambda _: image_id) d = self.treq.post("%s/groups" % str(rcs.endpoints["otter"]), json.dumps(self.group_config), headers=headers(str(rcs.token)), pool=self.pool) d.addCallback(check_success, [201]) d.addCallback(self.treq.json_content) return d.addCallback(record_results)
def send(image_id): lc_type = self.group_config["launchConfiguration"]["type"] if image_id is not None and lc_type == "launch_server": self.group_config = update_in( self.group_config, ["launchConfiguration", "args", "server", "imageRef"], lambda _: image_id) d = self.treq.post( "%s/groups" % str(rcs.endpoints["otter"]), json.dumps(self.group_config), headers=headers(str(rcs.token)), pool=self.pool) d.addCallback(check_success, [201]) d.addCallback(self.treq.json_content) return d.addCallback(record_results)
def test_update_in(): assert update_in({"a": 0}, ["a"], inc) == {"a": 1} assert update_in({"a": 0, "b": 1}, ["b"], str) == {"a": 0, "b": "1"} assert (update_in({"t": 1, "v": {"a": 0}}, ["v", "a"], inc) == {"t": 1, "v": {"a": 1}}) # Handle missing element one deep: assert update_in({}, ["z"], str) == {"z": "None"} # Same semantics as Clojure, raises an error if going deeper than # one level into a dict which doesn't have the initial key: assert raises(AttributeError, lambda: update_in({}, ["z", "q"], str)) # Verify immutability: d = {'x': 1} oldd = d update_in(d, ['x'], inc) assert d is oldd
def _update_value_in_nested_dict_by_keylist(dictionary: dict, key_list: List[str], new_value) -> dict: """Update the value of a nested-dictionary by a keylist with new value. Wrapper around :func:`toolz.dicttoolz.update_in`. Note: Do not update the original dict, returns a new dict with same content as original dict, but with update and required location. Parameters: dictionary: the dictionary to update. key_list: list of subkeys where to update the dictionary. new_value: the new value to update to. Returns: the updated dictionary. """ return update_in(dictionary, key_list, lambda x: new_value)
def test_update_in(self): D, kw = self.D, self.kw assert update_in(D({"a": 0}), ["a"], inc, **kw) == D({"a": 1}) assert update_in(D({"a": 0, "b": 1}), ["b"], str, **kw) == D({"a": 0, "b": "1"}) assert (update_in(D({"t": 1, "v": D({"a": 0})}), ["v", "a"], inc, **kw) == D({"t": 1, "v": D({"a": 1})})) # Handle one missing key. assert update_in(D({}), ["z"], str, None, **kw) == D({"z": "None"}) assert update_in(D({}), ["z"], inc, 0, **kw) == D({"z": 1}) assert update_in(D({}), ["z"], lambda x: x+"ar", default="b", **kw) == D({"z": "bar"}) # Same semantics as Clojure for multiple missing keys, ie. recursively # create nested empty dictionaries to the depth specified by the # keys with the innermost value set to f(default). assert update_in(D({}), [0, 1], inc, default=-1, **kw) == D({0: D({1: 0})}) assert update_in(D({}), [0, 1], str, default=100, **kw) == D({0: D({1: "100"})}) assert (update_in(D({"foo": "bar", 1: 50}), ["d", 1, 0], str, 20, **kw) == D({"foo": "bar", 1: 50, "d": D({1: D({0: "20"})})})) # Verify immutability: d = D({'x': 1}) oldd = d update_in(d, ['x'], inc, **kw) assert d is oldd
def test_update_in(self): D, kw = self.D, self.kw assert update_in(D({"a": 0}), ["a"], inc, **kw) == D({"a": 1}) assert update_in(D({ "a": 0, "b": 1 }), ["b"], str, **kw) == D({ "a": 0, "b": "1" }) assert (update_in(D({ "t": 1, "v": D({"a": 0}) }), ["v", "a"], inc, **kw) == D({ "t": 1, "v": D({"a": 1}) })) # Handle one missing key. assert update_in(D({}), ["z"], str, None, **kw) == D({"z": "None"}) assert update_in(D({}), ["z"], inc, 0, **kw) == D({"z": 1}) assert update_in(D({}), ["z"], lambda x: x + "ar", default="b", **kw) == D({"z": "bar"}) # Same semantics as Clojure for multiple missing keys, ie. recursively # create nested empty dictionaries to the depth specified by the # keys with the innermost value set to f(default). assert update_in(D({}), [0, 1], inc, default=-1, **kw) == D({0: D({1: 0})}) assert update_in(D({}), [0, 1], str, default=100, **kw) == D({0: D({1: "100"})}) assert (update_in(D({ "foo": "bar", 1: 50 }), ["d", 1, 0], str, 20, **kw) == D({ "foo": "bar", 1: 50, "d": D({1: D({0: "20"})}) })) # Verify immutability: d = D({'x': 1}) oldd = d update_in(d, ['x'], inc, **kw) assert d is oldd
'--base_output_path', type=str, default='tmp', help= 'When --split is True, this is the path of the folder report parts will be writen to.' ) parser.add_argument('--model_evaluation_file', type=argparse.FileType('r'), help='Path to the Make/Model evaluation file.') args = parser.parse_args() dataset = list( map( lambda elem: update_in( elem, ['predictions'], compose(list, partial(take, args.predictions_limit))), ujson.load(args.dataset_file))) sections = groupby(lambda x: tuple(map(x.get, ['make', 'model'])), dataset).items() evaluation_base_url = f'https://storage.cloud.google.com/dev_visual_search/evaluations/output/by-id/{args.evaluation_id}' def link_to_page(key): if key is None: return None make, model = key return f'{evaluation_base_url}/prediction-{make}-{model}.html' for prev, current, next in sliding_window(
def test_update_in(): assert update_in({"a": 0}, ["a"], inc) == {"a": 1} assert update_in({"a": 0, "b": 1}, ["b"], str) == {"a": 0, "b": "1"} assert (update_in({"t": 1, "v": {"a": 0}}, ["v", "a"], inc) == {"t": 1, "v": {"a": 1}}) # Handle one missing key. assert update_in({}, ["z"], str, None) == {"z": "None"} assert update_in({}, ["z"], inc, 0) == {"z": 1} assert update_in({}, ["z"], lambda x: x+"ar", default="b") == {"z": "bar"} # Same semantics as Clojure for multiple missing keys, ie. recursively # create nested empty dictionaries to the depth specified by the # keys with the innermost value set to f(default). assert update_in({}, [0, 1], inc, default=-1) == {0: {1: 0}} assert update_in({}, [0, 1], str, default=100) == {0: {1: "100"}} assert (update_in({"foo": "bar", 1: 50}, ["d", 1, 0], str, 20) == {"foo": "bar", 1: 50, "d": {1: {0: "20"}}}) # Verify immutability: d = {'x': 1} oldd = d update_in(d, ['x'], inc) assert d is oldd
def test_update_in(): assert update_in({"a": 0}, ["a"], inc) == {"a": 1} assert update_in({"a": 0, "b": 1}, ["b"], str) == {"a": 0, "b": "1"} assert (update_in({"t": 1, "v": {"a": 0}}, ["v", "a"], inc) == {"t": 1, "v": {"a": 1}}) # Handle one missing key. assert update_in({}, ["z"], str, None) == {"z": "None"} assert update_in({}, ["z"], inc, 0) == {"z": 1} assert update_in({}, ["z"], lambda x: x+"ar", default="b") == {"z": "bar"} # Same semantics as Clojure for multiple missing keys, ie. recursively # create nested empty dictionaries to the depth specified by the # keys with the innermost value set to f(default). assert update_in({}, [0, 1], inc, default=-1) == {0: {1: 0}} assert update_in({}, [0, 1], str, default=100) == {0: {1: "100"}} assert (update_in({"foo": "bar", 1: 50}, ["d", 1, 0], str, 20) == {"foo": "bar", 1: 50, "d": {1: {0: "20"}}}) # Verify immutability: d = {'x': 1} oldd = d update_in(d, ['x'], inc) assert d is oldd # Test object support: c = C() c.a = 0 assert update_in(c, ["a"], inc).__dict__ == {"a": 1} c = C() c.a = 0 c.b = 1 assert update_in(c, ["b"], str).__dict__ == {"a": 0, "b": "1"} v = C() v.a = 0 c = C() c.t = 1 c.v = v assert update_in(c, ["v", "a"], inc).v.__dict__ == {"a": 1} # Handle one missing key. c = C() assert update_in(c, ["z"], str, None).__dict__ == {"z": "None"} assert update_in(c, ["z"], inc, 0).__dict__ == {"z": 1} assert update_in(c, ["z"], lambda x: x + "ar", default="b").__dict__ == {"z": "bar"} # Allow AttributeError to be thrown if more than one missing key, # because we don't know what type of object to create for nesting. assert raises(AttributeError, lambda: update_in(c, ["y", "z"], inc, default=0)) # Verify immutability: o = C() o.x = 1 update_in(o, ['x'], inc) assert o.x == 1