def _start(self, term, **global_opt_args): token = self.next_token self.next_token += 1 # Construct query query = p.Query() query.type = p.Query.START query.token = token # Set global opt args # The 'db' option will default to this connection's default # if not otherwise specified. if 'db' in global_opt_args: global_opt_args['db'] = DB(global_opt_args['db']) else: if self.db: global_opt_args['db'] = DB(self.db) for k, v in global_opt_args.items(): pair = query.global_optargs.add() pair.key = k expr(v).build(pair.val) # Compile query to protobuf term.build(query.query) return self._send_query(query, term, global_opt_args)
def _start(self, term, **global_opt_args): token = self.next_token self.next_token += 1 # Construct query query = p.Query() query.type = p.Query.START query.token = token # Set global opt args # The 'db' option will default to this connection's default # if not otherwise specified. if 'db' in global_opt_args: global_opt_args['db'] = DB(global_opt_args['db']) else: if self.db: global_opt_args['db'] = DB(self.db) for k,v in global_opt_args.items(): pair = query.global_optargs.add() pair.key = k expr(v).build(pair.val) # Compile query to protobuf term.build(query.query) return self._send_query(query, term, global_opt_args)
def __init__(self, *args, **optargs): self._args = [expr(e) for e in args] self.raw = optargs.pop('raw', None) self.data = optargs.pop('data', None) self.table = optargs.pop('table', None) self.optargs = {} for key, value in dict_items(optargs): self.optargs[key] = expr(value)
def parse_rules(rules): return r.expr({ 'rules': rules.map(lambda rule: (rule['value'] == bytes('', 'utf-8')).branch( rule.without('value'), rule.merge({'value': _value_to_array(rule)}) )) })
def conn(self, test_connection=True): if not hasattr(self.__local, 'connCache'): self.__local.connCache = {} # check if existing connection is still good if os.getpid() in self.__local.connCache and test_connection: try: ast.expr(0).run(self.__local.connCache[os.getpid()]) except errors.ReqlError: del self.__local.connCache[os.getpid()] # cache a new connection if not os.getpid() in self.__local.connCache: self.__local.connCache[os.getpid()] = net.make_connection(net.DefaultConnection, **self.__connectOptions) # return the connection return self.__local.connCache[os.getpid()]
def serialize(self, json_encoder): message = [self.type] if self.term is not None: message.append(self.term) if self.options is not None: message.append(expr(self.options)) query_str = json_encoder.encode(message).encode('utf-8') query_header = struct.pack('<QL', self.token, len(query_str)) return query_header + query_str
def serialize(self, reql_encoder=ReQLEncoder()): message = [self.type] if self.term is not None: message.append(self.term) if self.global_optargs is not None: message.append(expr(self.global_optargs)) query_str = reql_encoder.encode(message).encode("utf-8") query_header = struct.pack("<QL", self.token, len(query_str)) return query_header + query_str
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ------------------------------------------------------------------------------ import rethinkdb.ast as r from rethinkdb.errors import ReqlNonExistenceError from api.errors import ApiInternalError VAL_TYPE_INT = r.expr( ["REQUIRE_SOURCE_QUANTITIES", "REQUIRE_TARGET_QUANTITIES"]) def fetch_latest_block_num(): try: return r.table('blocks')\ .max(index='block_num')\ .get_field('block_num') except ReqlNonExistenceError: raise ApiInternalError('No block data found in state') def fetch_holdings(holding_ids): return r.table('holdings')\ .get_all(r.args(holding_ids), index='id')\ .filter(lambda holding: (