def resume_evaluation(model_factory, ds_manager, eval_dir=None, **kwargs): if eval_dir is None: eval_dir = find_eval_dir(model_factory, ds_manager) if not (eval_dir / "config.json").exists(): print(f"Starting new evaluation at {eval_dir}...") return evaluate(model_factory, ds_manager, **kwargs) print(f"Resuming evaluation at {eval_dir}...") with open(eval_dir / "config.json", "r") as f: config = json.load(f) return evaluate(model_factory, ds_manager, outer_k=config["outer_k"], repeat=config["repeat"], winner_repeat=config["winner_repeat"], epochs=config["epochs"], patience=config["patience"], stopping_min_delta=config["stopping_min_delta"], restore_best=config["restore_best"], hp_args=config["hp_args"], eval_dir=eval_dir, **fy.omit(kwargs, config.keys()))
def test_fruit_delete(client, random_password, new_user, new_fruit): password = random_password() author = new_user(password=password) fruit = new_fruit(user=author) detail_url = reverse("api:fruit-detail", args=[fruit.id]) request_args = {"why_deleted": "because"} assert client.login(username=author.username, password=password) response = client.delete( detail_url, json.dumps(request_args), content_type="application/json", ) assert response.status_code == status.HTTP_204_NO_CONTENT # We are still able to fetch the data, but in a different shape response = client.get(detail_url) response_data = response.json() expected = { **fruit_to_verbose_data(fruit, response), **request_args, "deleted": True, "time": response_data["time"], } assert response_data == funcy.omit(expected, ["lat", "lng", "images_count"])
def redis_client(): if settings.CACHEOPS_REDIS and settings.CACHEOPS_SENTINEL: raise ImproperlyConfigured("CACHEOPS_REDIS and CACHEOPS_SENTINEL are mutually exclusive") client_class = CacheopsRedis if settings.CACHEOPS_CLIENT_CLASS: client_class = import_string(settings.CACHEOPS_CLIENT_CLASS) if settings.CACHEOPS_SENTINEL: if not {'locations', 'service_name'} <= set(settings.CACHEOPS_SENTINEL): raise ImproperlyConfigured("Specify locations and service_name for CACHEOPS_SENTINEL") sentinel = Sentinel( settings.CACHEOPS_SENTINEL['locations'], **omit(settings.CACHEOPS_SENTINEL, ('locations', 'service_name', 'db'))) return sentinel.master_for( settings.CACHEOPS_SENTINEL['service_name'], redis_class=client_class, db=settings.CACHEOPS_SENTINEL.get('db', 0) ) # Allow client connection settings to be specified by a URL. if isinstance(settings.CACHEOPS_REDIS, str): return client_class.from_url(settings.CACHEOPS_REDIS) else: return client_class(**settings.CACHEOPS_REDIS)
def filter_nodes_by_guard(nodes, fields, jwt): for x in nodes: try: yield omit(x or dict(), fields) except Exception: pass
def redis_client(): if settings.CACHEOPS_REDIS and settings.CACHEOPS_SENTINEL: raise ImproperlyConfigured("CACHEOPS_REDIS and CACHEOPS_SENTINEL are mutually exclusive") client_class = CacheopsRedis if settings.CACHEOPS_CLIENT_CLASS: client_class = import_string(settings.CACHEOPS_CLIENT_CLASS) if settings.CACHEOPS_SENTINEL: if not {'locations', 'service_name'} <= set(settings.CACHEOPS_SENTINEL): raise ImproperlyConfigured("Specify locations and service_name for CACHEOPS_SENTINEL") sentinel = Sentinel( settings.CACHEOPS_SENTINEL['locations'], **omit(settings.CACHEOPS_SENTINEL, ('locations', 'service_name', 'db'))) return sentinel.master_for( settings.CACHEOPS_SENTINEL['service_name'], redis_class=client_class, db=settings.CACHEOPS_SENTINEL.get('db', 0) ) # Allow client connection settings to be specified by a URL. if isinstance(settings.CACHEOPS_REDIS, six.string_types): return client_class.from_url(settings.CACHEOPS_REDIS) else: return client_class(**settings.CACHEOPS_REDIS)
def output(self): outs = self.data.get('output') for out in outs: format = out.get('format') kwargs = omit(out, 'format') if format not in self.output_formats: raise Exception('Output not supported: {}'.format(format)) self.output_formats.get(format)(self, **kwargs)
def quick_evaluate(model_factory, ds_manager, **kwargs): return evaluate(model_factory, ds_manager, epochs=1, repeat=1, winner_repeat=1, label="quick", **fy.omit(kwargs, ["epochs", "repeat", "winner_repeat", "label"]))
def decode_trc(self, actions): circ = self.aigbv sys_actions = [fn.project(a, self.inputs) for a in actions] states = fn.lpluck(0, circ.simulate(actions)) assert all(s['##valid'] for s in states) states = [fn.omit(s, {'##valid'}) for s in states] return sys_actions, states
def transform(self, item): ctx = dict(fields=','.join(item['select']), from_=item['from'], when=_build_clause('when', item), group_by=_build_clause('group by', item), order_by=_build_clause('order by', item), limit=_build_clause('limit', item)) sql = self.tmpl.format(**ctx).strip() return merge( omit(item, ['select', 'from', 'group_by', 'order_by', 'limit']), {'sql': sql})
async def resolve_query_user(parent, args, ctx, info): where = strip_nones(args.get('where', {})) headers = ctx['req'].headers jwt = ctx['req'].state.jwt_payload fields = [] collection = ctx['db']['users'] x = await mongodb_streams.find_one(collection, match=where, pipeline=pipeline) # {{repr_disambiguations(disambiguations, ' ') if fields: x = omit(x or dict(), fields) return x
def test_fruit_create_missing_args(client, random_password, new_user, fruit_request_data, missing_arg, error_msg): password = random_password() user = new_user(password=password) assert client.login(username=user.username, password=password) response = client.post( reverse("api:fruit-list"), funcy.omit(fruit_request_data(), [missing_arg]), content_type="application/json", ) assert response.status_code == status.HTTP_400_BAD_REQUEST assert response.json() == error_msg
def get_stats(term): """ TODO: 1 - week 0 - day """ db = get_db() if term: data = db.delay_stat_week() else: data = db.delay_stat_day() data = {k:f'{v:.2f}' for k, v in omit(data, '_id').items()} return jsonify(data)
def filter_nodes_by_guard(nodes, fields, jwt): for x in nodes: try: if not (jwt['role'] == 'admin'): raise Exception("guard `jwt['role'] == 'admin'` not satisfied") else: fields += [] if not (jwt['role'] == 'semi'): raise Exception("guard `jwt['role'] == 'semi'` not satisfied") else: fields += ['passwords', 'campaign_data'] yield omit(x or dict(), fields) except Exception: pass
def get_weather_dfs(config): response = requests.get("https://api.openweathermap.org/data/2.5/onecall", params=config.items()) weather = json.loads(response.text) hourly = [ f.merge(f.omit(hr, 'weather'), hr['weather'][0]) for hr in weather['hourly'] ] hourly_df = pd.DataFrame(hourly) hourly_df['dt_end'] = hourly_df['dt'] + 60 * 60 hourly_df['dt'] = f.lmap(datetime.fromtimestamp, hourly_df['dt']) hourly_df['dt_end'] = f.lmap(datetime.fromtimestamp, hourly_df['dt_end']) hourly_df['color'] = f.lmap(get_weather_color, hourly_df['id']) current = ( "Currently {temp}°F, {description}. Wind {wind_speed} MPH".format( temp=round(weather['current']['temp']), description=weather['current']['weather'][0]['description'], wind_speed=round(weather['current']['wind_speed']))) return current, hourly_df
def object(self, children): required = [ get_first_key(c) for c in children if c != ELLIPSIS and c["required"] ] children = lmap(lambda o: omit(o, ["required"]), children) properties = merge(*children) if ELLIPSIS in children: children.remove(ELLIPSIS) if len(children) == 1: res = {"type": "object"} else: res = {"type": "object", "properties": properties} else: res = { "type": "object", "properties": properties, "additionalProperties": False, "required": required, } if required: res.update({"required": required}) return res
async def resolve_query_human(parent, args, ctx, info): where = strip_nones(args.get('where', {})) headers = ctx['req'].headers jwt = ctx['req'].state.jwt_payload fields = [] collection = ctx['db']['humans'] x = await mongodb_streams.find_one(collection, match=where, pipeline=pipeline) if not (jwt['role'] == 'admin'): raise Exception("guard `jwt['role'] == 'admin'` not satisfied") else: fields += [] if not (jwt['role'] == 'semi'): raise Exception("guard `jwt['role'] == 'semi'` not satisfied") else: fields += ['passwords', 'campaign_data'] # {{repr_disambiguations(disambiguations, ' ') if fields: x = omit(x or dict(), fields) return x
def test_seq_compose(circ1, circ2, data): circ1, circ2 = map(fresh_io, (circ1, circ2)) # 1. Check >> same as eager | on disjoint interfaces. assert_sample_equiv(circ1 | circ2, circ1 >> circ2, data) # 2. Force common interface. circ1 = circ1['o', {fn.first(circ1.outputs): '##test'}] circ2 = circ2['i', {fn.first(circ2.inputs): '##test'}] # Compose and check sample equivilence. circ12 = circ1 >> circ2 assert (circ1.latches | circ2.latches) == circ12.latches assert circ1.inputs <= circ12.inputs assert circ2.outputs <= circ12.outputs assert '##test' not in circ12.inputs assert '##test' not in circ12.outputs # 3. Check cascading inputs work as expected. test_input1 = {f'{i}': data.draw(st.booleans()) for i in circ1.inputs} test_input2 = {f'{i}': data.draw(st.booleans()) for i in circ2.inputs} omap1, lmap1 = circ1(test_input1) test_input2['##test'] = omap1['##test'] omap2, lmap2 = circ2(test_input2) # 3a. Combine outputs/latch outs. omap12_expected = fn.merge(fn.omit(omap1, '##test'), omap2) lmap12_expected = fn.merge(lmap1, lmap2) test_input12 = fn.merge(test_input1, test_input2) omap12, lmap12 = circ12(test_input12) assert lmap12 == lmap12_expected assert omap12 == omap12_expected
def unflatten(flat): """Unflatten a list in a dict structure.""" return {d["ID"]: F.omit(d, ["ID"]) for d in flat}
from funcy import omit print(omit({'x': ''}, ['x']))
def omit(mapping, keys): return fn.omit(dict(mapping), keys)