def conjunct(checks): '''Turns an array of check functions into a single conjunction''' if len(checks) == 0: return lambda v: True if len(checks) == 1: return checks[0] return lambda v: r.and_(*list(map(lambda check: check(v), checks)))
def claim_sites(self, n=1): result = ( self.rr.table('sites').get_all( r.args( r.db(self.rr.dbname).table( 'sites', read_mode='majority').between( ['ACTIVE', r.minval], ['ACTIVE', r.maxval], index='sites_last_disclaimed').order_by( r.desc('claimed'), 'last_disclaimed'). fold({}, lambda acc, site: acc.merge( r.branch( site.has_fields('job_id'), r.object( site['job_id'].coerce_to('string'), acc[ site['job_id'].coerce_to('string')]. default(0).add(1)), {})), emit=lambda acc, site, new_acc: r.branch( r.and_( r.or_( site['claimed'].not_(), site[ 'last_claimed'].lt(r.now().sub(60 * 60 ))), r.or_( site.has_fields('max_claimed_sites').not_( ), new_acc[site['job_id'].coerce_to( 'string')].le(site['max_claimed_sites' ]))), [site['id']], [])).limit(n))). update( # try to avoid a race condition resulting in multiple # brozzler-workers claiming the same site # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038 r.branch( r.or_(r.row['claimed'].not_(), r.row['last_claimed'].lt(r.now().sub(60 * 60))), { 'claimed': True, 'last_claimed': r.now() }, {}), return_changes=True)).run() self._vet_result(result, replaced=list(range(n + 1)), unchanged=list(range(n + 1))) sites = [] for i in range(result["replaced"]): if result["changes"][i]["old_val"]["claimed"]: self.logger.warn( "re-claimed site that was still marked 'claimed' " "because it was last claimed a long time ago " "at %s, and presumably some error stopped it from " "being disclaimed", result["changes"][i]["old_val"]["last_claimed"]) site = brozzler.Site(self.rr, result["changes"][i]["new_val"]) sites.append(site) if sites: return sites else: raise brozzler.NothingToClaim
def claim_sites(self, n=1): self.logger.trace('claiming up to %s sites to brozzle', n) result = ( self.rr.table('sites').get_all(r.args( r.db(self.rr.dbname).table('sites', read_mode='majority') .between( ['ACTIVE', r.minval], ['ACTIVE', r.maxval], index='sites_last_disclaimed') .order_by(r.desc('claimed'), 'last_disclaimed') .fold( {}, lambda acc, site: acc.merge( r.branch( site.has_fields('job_id'), r.object( site['job_id'].coerce_to('string'), acc[site['job_id'].coerce_to('string')].default(0).add(1)), {})), emit=lambda acc, site, new_acc: r.branch( r.and_( r.or_( site['claimed'].not_(), site['last_claimed'].lt(r.now().sub(60*60))), r.or_( site.has_fields('max_claimed_sites').not_(), new_acc[site['job_id'].coerce_to('string')].le(site['max_claimed_sites']))), [site['id']], [])) .limit(n))) .update( # try to avoid a race condition resulting in multiple # brozzler-workers claiming the same site # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038 r.branch( r.or_( r.row['claimed'].not_(), r.row['last_claimed'].lt(r.now().sub(60*60))), {'claimed': True, 'last_claimed': r.now()}, {}), return_changes=True)).run() self._vet_result( result, replaced=list(range(n+1)), unchanged=list(range(n+1))) sites = [] for i in range(result["replaced"]): if result["changes"][i]["old_val"]["claimed"]: self.logger.warn( "re-claimed site that was still marked 'claimed' " "because it was last claimed a long time ago " "at %s, and presumably some error stopped it from " "being disclaimed", result["changes"][i]["old_val"]["last_claimed"]) site = brozzler.Site(self.rr, result["changes"][i]["new_val"]) sites.append(site) self.logger.debug('claimed %s sites', len(sites)) if sites: return sites else: raise brozzler.NothingToClaim
def check(v): if isinstance(items, dict): print('going to validate', items) return v.filter(lambda x: ~validate(items)(x)).is_empty() elif isinstance(items, list): return r.and_(*[ r.do(v.nth(i), validator) for i, validator in enumerate(map(validate, items)) ])
def prop_check(v): props = [] for prop, prop_schema in arg.items(): sub_path = self.path + '/' + prop props.append( r.branch( v.has_fields(prop), r.do(v[prop], validate(prop_schema, sub_path)), True, )) return r.and_(*props)
async def create_subscription(user_id, serial, voice): serial_sub = { "id": serial["id"], "excluded_voices": [], "title": serial["title"] } await User.manager.execute( User.manager.table.filter( r.and_(r.row["id"] == user_id, r.not_(r.row["serials"].default( []).contains(serial_sub)))).update({ "serials": r.row["serials"].default([]).append(serial_sub) }))
async def create_serials_message(search_query, page_number, limit=10): res = await Serial.manager.execute( Serial.manager.table .order_by(r.desc("year"))\ .filter( r.and_( r.or_( r.row["year"].default(2019) >= 2017, r.row["finished"] == False), r.row["search_field"].match(search_query.lower()) ) )\ .slice(page_number * limit - limit, page_number * limit + 1) ) log.debug(f"Create serial message page {res}") msg = text(*[ text( hbold(serial["title"]), f'({quote_html(serial["origin_title"])})' if serial["origin_title"] else "", str(serial["year"]) if serial["year"] else "", hbold("\nЗавершён" if serial["finished"] else ""), f'\n/serial_{serial["id"]}\n') for serial in res ], sep="\n") inline_pagination = types.InlineKeyboardMarkup(row_width=2) btn_row = [ types.InlineKeyboardButton(">", callback_data="next_search_page") ] if page_number > 1: btn_row.insert( 0, types.InlineKeyboardButton("<", callback_data="prev_search_page")) inline_pagination.add( types.InlineKeyboardButton("В начало", callback_data="start_search_page")) inline_pagination.add(*btn_row) return { "text": msg, "reply_markup": inline_pagination, "parse_mode": ParseMode.HTML }
def get(self, id=None, o=None, a=None, b=None): """ - GET /testbeds (public) Testbed list - GET /testbeds/<id> (public) Testbed with <id> - GET /testbeds/<id>/(resources) (auth) Resource list of the testbed <id> - GET /testbeds/<id>/resources?timestamp_start=<XXX>×tamp_end=<XXX> (auth) Resource list of the testbed <id> that are available within a time range - GET /testbeds/<id>/leases Leases list of the testbed with the <id> - GET /testbeds/<id>/leases?timestamp_start=<XXX>×tamp_end=<XXX> (auth) Leases list of the testbed <id> within a time range :return: """ response = [] current_user = self.get_current_user() # [?timestamp_start=<XXX>×tamp_end=<XXX>] ts = self.get_argument('timestamp_start',None) te = self.get_argument('timestamp_end',None) # GET /testbeds if not id and not o: cursor = yield r.table('testbeds') \ .run(self.dbconnection) while (yield cursor.fetch_next()): testbed = yield cursor.next() response.append(testbed) # GET /testbeds/<id> elif not o and id and self.isUrn(id): cursor = yield r.table('testbeds') \ .filter({'id': id}) \ .run(self.dbconnection) while (yield cursor.fetch_next()): testbed = yield cursor.next() response.append(testbed) # GET /testbeds/<id>/resources elif id and self.isUrn(id) and o == 'resources' and not ts and not te: cursor = yield r.table(o) \ .filter(lambda resource: resource["testbed"] == id) \ .run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) # GET /testbeds/<id>/resources?timestamp_start=<XXX>×tamp_end=<XXX> elif id and self.isUrn(id) and o == 'resources': try: nb_leases = yield r.table("leases").count().run(self.dbconnection) if nb_leases > 0: # Resources NOT in Leases cursor = yield r.table(o) \ .filter(lambda resource: resource["testbed"] == id) \ .filter({'available':'true'}) \ .filter( lambda resource: r.table("leases").map(lambda l: l['resources'].coerce_to('array') ).reduce(lambda left, right: left.set_union(right) ).contains(resource['id']).not_() \ ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) if ts and te: # List of Resources ids in Leases but not in the given time range in_leases = yield r.table("leases").filter(lambda l: r.or_(l['start_time'].gt(int(te)),l['end_time'].lt(int(ts))) ).map(lambda l: l['resources'].coerce_to('array') ).reduce(lambda left, right: left.set_union(right) ).map(lambda x: r.table('resources').get(x) ).filter({'testbed':id}).run(self.dbconnection) response = response + in_leases if ts and not te: # List of Resources ids in Leases but not in the given time range in_leases = yield r.table("leases").filter(lambda l: l['start_time'].gt(int(te)) ).map(lambda l: l['resources'].coerce_to('array') ).reduce(lambda left, right: left.set_union(right) ).map(lambda x: r.table('resources').get(x) ).filter({'testbed':id}).run(self.dbconnection) response = response + in_leases if not ts and te: # List of Resources ids in Leases but not in the given time range in_leases = yield r.table("leases").filter(lambda l: l['end_time'].lt(int(ts)) ).map(lambda l: l['resources'].coerce_to('array') ).reduce(lambda left, right: left.set_union(right) ).map(lambda x: r.table('resources').get(x) ).filter({'testbed':id}).run(self.dbconnection) response = response + in_leases else: # All available Resources (No Leases in DB) cursor = yield r.table(o) \ .filter(lambda resource: resource["testbed"] == id) \ .filter({'available':'true'}) \ .run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) except Exception as e: logger.exception(e) # GET /testbeds/<id>/leases elif id and self.isUrn(id) and o == 'leases' and not ts and not te: cursor = yield r.table(o) \ .filter(lambda ls: ls["testbed"] == id) \ .run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) # GET /testbeds/<id>/leases?timestamp_start=<XXX>×tamp_end=<XXX> elif id and self.isUrn(id) and o == 'leases' and ts and te: cursor = yield r.table(o) \ .filter(lambda ls: ls["testbed"] == id) \ .filter(lambda l: r.and_(l['start_time'].ge(int(ts)),l['end_time'].le(int(te))) ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) # GET /testbeds/<id>/leases?timestamp_start=<XXX> elif id and self.isUrn(id) and o == 'leases' and ts and not te: cursor = yield r.table(o) \ .filter(lambda ls: ls["testbed"] == id) \ .filter(lambda l: l['start_time'].ge(int(ts)) ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) # GET /testbeds/<id>/leases?timestamp_end=<XXX> elif id and self.isUrn(id) and o == 'leases' and not ts and te: cursor = yield r.table(o) \ .filter(lambda ls: ls["testbed"] == id) \ .filter(lambda l: l['end_time'].le(int(te)) ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() response.append(item) else: self.userError("invalid request") return self.finish(json.dumps({"result": response}, cls=myJSONEncoder))
def and_(self, a): '''curried version of r.and_ (accepts only 2 arguments)''' return QC(lambda b: r.and_(b, a))
def get(self, o=None): """ Leases list GET /leases GET /leases/id GET /leases?timestamp_start=<xxx>×tamp_end=<xxx> :return: """ if not self.get_current_user(): self.userError('permission denied user not logged in') return leases = [] # [?timestamp_start=<XXX>×tamp_end=<XXX>] ts = self.get_argument('timestamp_start', None) te = self.get_argument('timestamp_end', None) #GET / leases if not o and not ts and not te: cursor = yield r.table('leases').run(self.dbconnection) while (yield cursor.fetch_next()): result = yield cursor.next() leases.append(result) # GET / leases/id elif o and not ts and not te: cursor = yield r.table('leases') \ .filter({'id': o}).run(self.dbconnection) while (yield cursor.fetch_next()): result = yield cursor.next() leases.append(result) # GET /leases?timestamp_start=<xxx>×tamp_end=<xxx> elif ts and te: cursor = yield r.table('leases') \ .filter(lambda l: r.and_(l['start_time'].ge(int(ts)),l['end_time'].le(int(te))) ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() leases.append(item) # GET /leases?timestamp_start=<XXX> elif ts and not te: cursor = yield r.table('leases') \ .filter(lambda l: l['start_time'].ge(int(ts)) ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() leases.append(item) # GET /leases?timestamp_end=<XXX> elif not ts and te: cursor = yield r.table('leases') \ .filter(lambda l: l['end_time'].le(int(te)) ).run(self.dbconnection) while (yield cursor.fetch_next()): item = yield cursor.next() leases.append(item) else: self.userError("invalid request") return self.write(json.dumps({"result": leases}, cls=myJSONEncoder))