def test_split_out_both_servers_and_lb_nodes_if_too_long(self): """ Both 'lb_nodes' and 'servers' are split out if the event is too long to accomodate both. The longest one is removed first. """ event = { 'hi': 'there', 'desired': self.state, 'steps': ['steps'], 'lb_nodes': ['1', '2', '3', '4'], 'servers': ['1', '2', '3'] } message = "Executing convergence" short_event = { k: event[k] for k in event if k not in ('servers', 'lb_nodes') } result = split_execute_convergence( event.copy(), max_length=len(json.dumps(short_event, default=repr)) + 5) expected = [(short_event, message), (dissoc(event, 'desired', 'steps', 'servers'), message), (dissoc(event, 'desired', 'steps', 'lb_nodes'), message)] self.assertEqual(result, expected)
def test_split_out_lb_nodes_if_lb_nodes_longer(self): """ If the 'lb_nodes' parameter is longer than the 'servers' parameter, and the event is otherwise sufficiently small, 'lb_nodes' is the param that gets split into another message. """ event = { 'hi': 'there', 'desired': self.state, 'steps': ['steps'], 'lb_nodes': ['1', '2', '3', '4'], 'servers': ['1', '2', '3'] } message = "Executing convergence" # assume that removing 'servers' would make it the perfect length, but # since 'lb_nodes' is bigger, it's the thing that gets removed. length = len( json.dumps({k: event[k] for k in event if k != 'servers'}, default=repr), ) result = split_execute_convergence(event.copy(), max_length=length) expected = [(dissoc(event, 'lb_nodes'), message), (dissoc(event, 'desired', 'steps', 'servers'), message)] self.assertEqual(result, expected)
def test_dissoc(): assert dissoc({"a": 1}, "a") == {} assert dissoc({"a": 1, "b": 2}, "a") == {"b": 2} assert dissoc({"a": 1, "b": 2}, "b") == {"a": 1} # Verify immutability: d = {'x': 1} oldd = d d2 = dissoc(d, 'x') assert d is oldd assert d2 is not oldd
def test_dissoc(self): D, kw = self.D, self.kw assert dissoc(D({"a": 1}), "a") == D({}) assert dissoc(D({"a": 1, "b": 2}), "a") == D({"b": 2}) assert dissoc(D({"a": 1, "b": 2}), "b") == D({"a": 1}) # Verify immutability: d = D({'x': 1}) oldd = d d2 = dissoc(d, 'x') assert d is oldd assert d2 is not oldd
async def delete(self, id_): self.items, has_changed = pipe( id_, lambda key: dissoc(self.items, key), lambda new: (new, len(self.items) != len(new)), ) return has_changed
def _dispatch(self, request: Tuple[str, ...]): log.debug("got a request", request=request) request_type, *request_arg = request try: if request_type == "submit": job_name, fname = request_arg log.debug("submitting a task", fname=fname, job_name=job_name) job_id = self.submit(job_name, fname) return job_id elif request_type == "cancel": job_id = request_arg[0] log.debug("got a cancel request", job_id=job_id) self.cancel(job_id) return None elif request_type == "queue": log.debug("got a queue request") # remove the "proc" entries because they aren't pickable return { job_id: dissoc(info, "proc") for job_id, info in self.queue().items() } else: log.debug("got unknown request") except Exception as e: return e
def main(): desc = _read_desc(_desc_path()) pprint(desc) initial_state = {'desc': tzd.dissoc(desc, 'worker_pos'), 'worker': {'pos': desc['worker_pos'], 'orien': 'r'}, 'wrapped_shells': [_pt2shell(desc['worker_pos'])]} states = [initial_state] actions = [] shutil.rmtree(_output_image_dir(_desc_path()), ignore_errors=True) for turn_i in range(500): print('- turn {}'.format(turn_i)) prev_state = states[turn_i] action, intermediate_state = _predict_action(prev_state) if turn_i % 50 == 0: # if True: _export_state(intermediate_state, turn_i, _desc_path(), draw_opts={'render_scale': 10}) if action is None: break next_state = _update_state(intermediate_state, action) states.append(next_state) actions.append(action) _export_actions(actions, _output_actions_filepath(_desc_path()))
def main(db_uri, tablename): click.echo("Connecting to database...") database = SQLDatabase(db_uri, tablename) click.echo("Checking existing number of evaluations...") if len(database) < 2: # Check if there are enough existing datapoints for point in initial_points: target = black_box_function(**point) database.append({**point, "target": target}) click.echo("Creating optimizer and utility function...") optimizer = BayesianOptimization(f=None, pbounds=pbounds, random_state=1) utility_function = UtilityFunction(kind="ucb", kappa=3, xi=1) click.echo("Registering evaluations with optimizer...") for evaluation in database: optimizer.register(dissoc(evaluation, "target"), evaluation["target"]) while True: try: point = optimizer.suggest(utility_function) click.echo(f"Evaluating black-box function for {point}") target = black_box_function(**point) click.echo(f"Got {target}") click.echo("Registering with database...") database.append({**point, "target": target}) click.echo("Registering with optimizer...") optimizer.register(point, target) except KeyboardInterrupt: click.echo("Exiting...") break
def pop_nan(self, dct): """Given dict, return dict with keys popped where isnan(val).""" res = dict(dct) nans = valfilter( lambda x: ( x is None or str(x).strip() == '' or (isinstance(x, (Decimal, float)) and isnan(x))), res) return dissoc(res, *nans.keys())
def remove_factor(self, factor): factor_name = factor if isinstance(factor, str) else factor.name py_assert(factor_name in self.names, ValueError, 'unable to remove factor_name {0}, which does not exist in current container'.format(factor_name)) self.names.remove(factor_name) self.data.drop(factor_name, axis=1, inplace=True) self.property = dissoc(self.property, factor_name) return
def parse(raw_form: str): form_data = valmap(first, parse_qs(raw_form, strict_parsing=True)) return RsvpFormData(guest_id=form_data[RsvpFormData.GUEST_ID_FIELD], party_id=form_data[RsvpFormData.PARTY_ID_FIELD], attending=valmap( _parse_bool, dissoc(form_data, RsvpFormData.GUEST_ID_FIELD, RsvpFormData.PARTY_ID_FIELD)))
def queue(self, only_me: bool = True): # only_me doesn't do anything, but the argument is there # because it is in the other schedulers. # remove the "proc" entries because they aren't pickable return { job_id: dissoc(info, "proc") for job_id, info in self._current_queue.items() }
def remove_factor(self, factor): factor_name = factor if isinstance(factor, str) else factor.name py_assert( factor_name in self.names, ValueError, 'unable to remove factor_name {0}, which does not exist in current container' .format(factor_name)) self.names.remove(factor_name) self.data.drop(factor_name, axis=1, inplace=True) self.property = dissoc(self.property, factor_name) return
def _get_user_site_details(user_data): user = [] allowed_keys = {'user_id', 'site_url', 'site_name'} remapped_keys = {'user_id': 'site_user_id'} keys_to_dissoc = set(user_data['items'][0].keys()) - allowed_keys for site in user_data['items']: filtered_data = dissoc(site, *keys_to_dissoc) remapped_data = apply_key_map(remapped_keys, filtered_data) user.append(remapped_data) return user
def fit(self, X, y=None, sample_weight=None, exposure=None): fit_args = self._process_args(X=X, y=y, sample_weight=sample_weight, exposure=exposure) # Create internal cross-validating estimators self.cross_validating_estimators_ = OrderedDict( (k, CrossValidatingEstimator(v, cv=self.cv, n_jobs=self.n_jobs, verbose=self.verbose, pre_dispatch=self.pre_dispatch)) for k, v in self.ordered_regressors.items()) # frozendict(valmap(lambda x: # CrossValidatingEstimator(x, cv=self.cv, n_jobs=self.n_jobs, # verbose=self.verbose, # pre_dispatch=self.pre_dispatch), self.regressors).items()) # Fit the inner regressors using cross-validation for est_name, est in self.cross_validating_estimators_.items(): if self.verbose > 0: print('Super learner is fitting %s...' % est_name) est.fit(**fit_args) if self.verbose > 0: print('Super learner finished fitting %s.' % est_name) # Fit the outer meta-regressor. Cross validation is not used here. Instead, # users of the SuperLearner are free to wrap the SuperLearner in a # CrossValidatingEstimator. meta_fit_args = assoc( fit_args, 'X', np.concatenate(tuple( map(growd(2), [ est.cv_predictions_ for est in self.cross_validating_estimators_.values() ])), axis=1)) if self.y_transformer is not None: self.y_transformer_ = clone(self.y_transformer).fit(**fit_args) meta_fit_args = assoc( meta_fit_args, 'y', self.y_transformer_.transform( **dissoc(fit_args, 'sample_weight', 'y'))) if self.verbose > 0: print('Super learner fitting meta-regressor...') self.meta_regressor_ = clone(self.meta_regressor).fit(**meta_fit_args) if self.verbose > 0: print('Super learner meta-regressor fitting complete.') # All scikit-learn compatible estimators must return self from fit return self
def test_dicttoolz(): d1 = {'foo': 'bar'} d2 = {'baz': 'quux'} assert_that(merge(d1, d2)).is_equal_to({'foo': 'bar', 'baz': 'quux'}) assert_that(d1).is_equal_to({'foo': 'bar'}) assert_that(assoc(d1, 'a', 1)).is_equal_to({'foo': 'bar', 'a': 1}) assert_that(dissoc(d2, 'baz')).is_equal_to({}) struct = {'a': [{'c': 'hello'}]} assert_that(get_in(['a', 0, 'c'], struct)).is_equal_to(struct['a'][0]['c']) assert_that(get_in(['a', 0, 'd'], struct, 'not found')).is_equal_to('not found')
def as_effect(self): """Produce an :obj:`Effect` to update a stack.""" stack_config = dissoc(thaw(self.stack_config), 'stack_name') eff = update_stack(stack_name=self.stack.name, stack_id=self.stack.id, stack_args=stack_config) def report_success(result): retry_msg = 'Waiting for stack to update' return ((StepResult.RETRY, [ErrorReason.String(retry_msg)]) if self.retry else (StepResult.SUCCESS, [])) return eff.on(success=report_success)
def test_split_out_both_servers_and_lb_nodes_if_too_long(self): """ Both 'lb_nodes' and 'servers' are split out if the event is too long to accomodate both. The longest one is removed first. """ event = {'hi': 'there', 'desired': self.state, 'steps': ['steps'], 'lb_nodes': ['1', '2', '3', '4'], 'servers': ['1', '2', '3']} message = "Executing convergence" short_event = {k: event[k] for k in event if k not in ('servers', 'lb_nodes')} result = split_execute_convergence( event.copy(), max_length=len(json.dumps(short_event, default=repr)) + 5) expected = [ (short_event, message), (dissoc(event, 'desired', 'steps', 'servers'), message), (dissoc(event, 'desired', 'steps', 'lb_nodes'), message) ] self.assertEqual(result, expected)
def _predict_action(state): mine = shapely.geometry.Polygon(state['desc']['mine_shell']) obstacles = [shapely.geometry.Polygon(sh) for sh in state['desc']['obstacle_shells']] obstacle = shapely.ops.unary_union(obstacles) situable = mine.difference(obstacle) wrappeds = [shapely.geometry.Polygon(sh) for sh in state['wrapped_shells']] wrapped = shapely.ops.unary_union(wrappeds) not_wrapped = situable.difference(wrapped) if not_wrapped.area < 1.0: return None, state last_move = state.get('last_move', 'W') for move in [last_move, 'W', 'S', 'A', 'D']: proj = _move_projection_center(state['worker']['pos'], move) if not_wrapped.contains(proj): return move, tzd.dissoc(state, 'path_pts_to_not_wrapped') if not state.get('path_pts_to_not_wrapped'): target_tile = tzf.thread_first(not_wrapped.representative_point(), _shapely_point2pt, _snap_to_tile) print('Finding shortest path from tile {} to {}'.format(state['worker']['pos'], target_tile)) if tzd.get_in(['cache', 'incidence_m'], state) is None: incidence_m = _incidence_matrix(situable) state = tzd.assoc_in(state, ['cache', 'incidence_m'], incidence_m) else: incidence_m = state['cache']['incidence_m'] target_vertex_ind = _incidence_ind(target_tile[0], target_tile[1], x_size=math.ceil(situable.bounds[2])) path_dists, path_predecessors = sp.sparse.csgraph.shortest_path(csgraph=incidence_m, directed=False, return_predecessors=True, unweighted=True, indices=target_vertex_ind) start_vertex_ind = _incidence_ind(state['worker']['pos'][0], state['worker']['pos'][1], x_size=math.ceil(situable.bounds[2])) path_inds = _path_inds(path_predecessors, start_vertex_ind) path_pts = [_incidence_pt(ind, x_size=math.ceil(situable.bounds[2])) for ind in path_inds] print('Found path: {}'.format(path_pts)) state = tzd.assoc(state, 'path_pts_to_not_wrapped', path_pts) path_move = _projection_pt_move(state['worker']['pos'], state['path_pts_to_not_wrapped'][0]) if path_move is not None: return path_move, tzd.update_in(state, ['path_pts_to_not_wrapped'], lambda p: p[1:]) return 'Z', state
def test_split_out_lb_nodes_if_lb_nodes_longer(self): """ If the 'lb_nodes' parameter is longer than the 'servers' parameter, and the event is otherwise sufficiently small, 'lb_nodes' is the param that gets split into another message. """ event = { "hi": "there", "desired": self.state, "steps": ["steps"], "lb_nodes": ["1", "2", "3", "4"], "servers": ["1", "2", "3"], } message = "Executing convergence" # assume that removing 'servers' would make it the perfect length, but # since 'lb_nodes' is bigger, it's the thing that gets removed. length = len(json.dumps({k: event[k] for k in event if k != "servers"}, default=repr)) result = split_execute_convergence(event.copy(), max_length=length) expected = [(dissoc(event, "lb_nodes"), message), (dissoc(event, "desired", "steps", "servers"), message)] self.assertEqual(result, expected)
def test_split_out_lb_nodes_if_lb_nodes_longer(self): """ If the 'lb_nodes' parameter is longer than the 'servers' parameter, and the event is otherwise sufficiently small, 'lb_nodes' is the param that gets split into another message. """ event = {'hi': 'there', 'desired': self.state, 'steps': ['steps'], 'lb_nodes': ['1', '2', '3', '4'], 'servers': ['1', '2', '3']} message = "Executing convergence" # assume that removing 'servers' would make it the perfect length, but # since 'lb_nodes' is bigger, it's the thing that gets removed. length = len( json.dumps({k: event[k] for k in event if k != 'servers'}, default=repr),) result = split_execute_convergence(event.copy(), max_length=length) expected = [ (dissoc(event, 'lb_nodes'), message), (dissoc(event, 'desired', 'steps', 'servers'), message) ] self.assertEqual(result, expected)
def filterFeatures(match): usableKeys = ['players', 'radiant_win', 'hero_id', 'player_slot'] isUsable = lambda k: k in usableKeys toplvlFiltered = keyfilter(isUsable, match) filteredPlayers = [] for player in toplvlFiltered['players']: side = decideSide(player['player_slot']) playerData = assoc(keyfilter(isUsable, player), 'team', side) playerData = dissoc(playerData, 'player_slot') filteredPlayers.append(playerData) toplvlFiltered['players'] = filteredPlayers return toplvlFiltered
def test_split_out_both_servers_and_lb_nodes_if_too_long(self): """ Both 'lb_nodes' and 'servers' are split out if the event is too long to accomodate both. The longest one is removed first. """ event = { "hi": "there", "desired": self.state, "steps": ["steps"], "lb_nodes": ["1", "2", "3", "4"], "servers": ["1", "2", "3"], } message = "Executing convergence" short_event = {k: event[k] for k in event if k not in ("servers", "lb_nodes")} result = split_execute_convergence(event.copy(), max_length=len(json.dumps(short_event, default=repr)) + 5) expected = [ (short_event, message), (dissoc(event, "desired", "steps", "servers"), message), (dissoc(event, "desired", "steps", "lb_nodes"), message), ] self.assertEqual(result, expected)
def test_insert(mock_client_class): mock_mongo_client = mongomock.MongoClient() mock_client_class.return_value = mock_mongo_client INPUT_DOCS = [{'name': 'A'}, {'name': 'B'}] (INPUT_DOCS | beam.transforms.Map(lambda d: pymongo.InsertOne(d)) | beam.transforms.ParDo( MongoBulkWriteFn(db_uri='mongodb://localhost/db', collection_name='test_beam', db_name='db', order_writes=True))) found_objs = mock_mongo_client.get_database('db')['test_beam'].find({}) no_id_objs = map(lambda o: dissoc(o, '_id'), found_objs) assert no_id_objs == INPUT_DOCS
def wordcloud(): if DEVELOPMENT_MODE: with open('wordcloud.json') as f: return jsonify(json.load(f)) else: graph = facebook.GraphAPI(access_token=FACEBOOK_USER_ACCESS_TOKEN, version='2.7') query_string = f'fields=feed.since({SINCE})' \ '{comments{comments{message,created_time,like_count},' \ 'message,created_time,like_count,reactions},' \ 'message,created_time,updated_time,reactions}' endpoint_url = f'{FACEBOOK_GROUP_ID}?{query_string}' feed = graph.request(endpoint_url).get('feed') text = '' for each in feed.get('data'): message = each.get('message') if message: text += message comments = each.get('comments') if comments: for comment in comments.get('data'): text += comment.get('message') comments_in_comment = comment.get('comments') if comments_in_comment: for comment_in_comment in comments_in_comment.get( 'data'): text += comment_in_comment.get('message') from pythainlp.rank import rank from pythainlp.tokenize import word_tokenize word_list = word_tokenize(text, engine='newmm') word_count = rank(word_list) from toolz.dicttoolz import dissoc new_word_count = dissoc(word_count, ' ') words = [] for each in new_word_count: d = {'word': each, 'value': new_word_count[each]} words.append(d) return jsonify(words)
def test_delta_with_delay(self): schedule = { 'start': { 'relative_timeshift': { 'delay': '1', 'time_units': TimeUnits.DAYS, } }, 'periodical': { 'repeats': PeriodicalUnits.HOURLY, 'every': 1, }, 'timezone': 'Europe/Kiev', } delta, _ = schedule_delta(schedule) abs_delta, _ = schedule_delta(dissoc(schedule, 'start')) assert delta == 86400 assert abs_delta == 3600
def __exit__(self, exc_type, exc_value, exc_traceback): if isinstance(exc_value, ModuleCacheValid) or \ exc_type is ModuleCacheValid or \ exc_value is ModuleCacheValid: inspect.stack()[1][0].f_globals.update(self.moduledata) return True elif exc_value is None: new_moduledata = valfilter( complement(flip(isinstance)(ModuleType)), dissoc(inspect.stack()[1][0].f_globals, *self.suppress)) # Check that all objects can be cached for _ in starmap(self._check_cachability, new_moduledata.items()): pass new_metadata = self.invalidator.new_metadata(new_moduledata) self._put_in_cache(new_metadata, new_moduledata) return True else: return False
def __sub__(self, key: A): return Map(dicttoolz.dissoc(self, key))
def test_outputs(): for _name, task in encoded_dag['tasks'].items(): output = task['output'] assert output == sha(bencode(dissoc(task, 'output')))
def test_is_required(self, valid_create_hero_dto): with pytest.raises(ValidationError) as excinfo: CreateHeroDto(**dissoc(valid_create_hero_dto, "location")) self.assert_validation_error("value_error.missing", excinfo)
def as_json(self): """ :return: a JSON dictionary representing the node. """ return dissoc(attr.asdict(self), "feed_events")
def test_is_optional(self, valid_update_hero_dto): assert UpdateHeroDto(**dissoc(valid_update_hero_dto, "location"))
def test_is_required(self, valid_hero): with pytest.raises(ValidationError) as excinfo: Hero(**dissoc(valid_hero, "power_class")) self.assert_validation_error("value_error.missing", excinfo)
def test_defaults(self, valid_update_hero_dto): assert (UpdateHeroDto( **dissoc(valid_update_hero_dto, "name")).name == "Unknown")
def test_defaults(self, valid_hero): assert Hero(**dissoc(valid_hero, "name")).name == "Unknown"
def dump(self): return dissoc(self.__dict__, '_sa_instance_state')
def test_is_optional(self, valid_update_hero_dto): assert UpdateHeroDto( **dissoc(valid_update_hero_dto, "power_class"))