def _get_refs_from(self, key, type_name, field_name): type_ref = self.db.collection(type_name) query_ref = type_ref.where(field_name, u'==', key) snapshots = self._stream_ref(query_ref) rows = [ResultWrapper.from_couchdb_json(self.value_from_snapshot(snapshot)) for snapshot in snapshots] objs = [GenericObject._from_doc(self, row.key, row.rev, row.value) for row in rows] return objs
def _query_items_simple(self, firebase_query): snapshots = firebase_query.get() results = [] for snapshot in snapshots: wrapper = ResultWrapper.from_couchdb_json(self.value_from_snapshot(snapshot)) results.append(GenericObject.from_row(self, wrapper)) return results
def _query_items_simple(self, firebase_query): snapshots = self._stream_ref(firebase_query) results = [] for snapshot in snapshots: wrapper = ResultWrapper.from_couchdb_json(self.value_from_snapshot(snapshot)) results.append(GenericObject.from_row(self, wrapper)) return results
def _get_refs_from(self, key, type_name, field_name): type_ref = self.db.collection(type_name) query_ref = type_ref.where(field_name, u'==', key) snapshots = query_ref.get() rows = [ResultWrapper.from_couchdb_json(self.value_from_snapshot(snapshot)) for snapshot in snapshots] objs = [GenericObject._from_doc(self, row.key, row.rev, row.value) for row in rows] return objs
def _set(self, key, input_value, rev=None): value = self.data_adapter.serialise(input_value) self._check_uniqueness(key, value) if self.read_only: raise Exception("This db is read only") if self.validator is not None: if "namespace" in value and not "schema" in value: schema_id = self.validator.schema_id_for(value["namespace"], value["type"]) # print("schema_id", schema_id) if schema_id is not None: value["schema"] = schema_id try: self.validator.validate(value) except jsonschema.ValidationError as e: raise FamValidationError(e) type = value["type"] value["_id"] = key sans_metadata = copy.deepcopy(value) del sans_metadata["type"] del sans_metadata["namespace"] self.db.collection(type).document(key).set(sans_metadata) return ResultWrapper.from_couchdb_json(value)
def _get(self, key, class_name): single_class_name = self._work_out_class(key, class_name) doc_ref = self.db.collection(single_class_name).document(key) snapshot = self._get_ref(doc_ref) if not snapshot.exists: return None as_json = self.value_from_snapshot(snapshot) return ResultWrapper.from_couchdb_json(as_json)
def _set(self, key, value, rev=None): if self.read_only: raise FamWriteError("You can't write to this database") try: if rev is not None: result = self.bucket.upsert(key, value, cas=rev) else: result = self.bucket.upsert(key, value) return ResultWrapper(key, result.cas, value) except KeyExistsError as e: raise FamResourceConflict("key alreday exists in couchbase: %s - %s" % (key, e))
def query_items_iterator(self, firebase_query, batch_size, order_by=u'_id'): for snapshot in self.query_snapshots_iterator(firebase_query, batch_size=batch_size, order_by=order_by): wrapper = ResultWrapper.from_couchdb_json( self.value_from_snapshot(snapshot)) obj = GenericObject.from_row(self, wrapper) yield obj
def get_single_type(self, namespace, type_name): type_ref = self.db.collection(type_name) snapshots = self._stream_ref(type_ref) rows = [ ResultWrapper.from_couchdb_json(self.value_from_snapshot(snapshot)) for snapshot in snapshots ] objs = [ GenericObject._from_doc(self, row.key, row.rev, row.value) for row in rows ] return objs
def _get(self, key, class_name): single_class_name = self._work_out_class(key, class_name) doc_ref = self.db.collection(single_class_name).document(key) try: snapshot = doc_ref.get() if not snapshot.exists: return None as_json = self.value_from_snapshot(snapshot) return ResultWrapper.from_couchdb_json(as_json) except NotFound: return None
def _n1ql_with_rev(self, query, *args, **kwargs): query = N1QLQuery(query, *args, **kwargs) rows = self.bucket.n1ql_query(query) results = [] bucket_name = None for row in rows: if bucket_name is None: keys = row.keys() keys.remove("id") keys.remove("cas") bucket_name = keys[0] rev = row["_sync"]["rev"] results.append(ResultWrapper(row["id"], rev, row[bucket_name])) return results
def query_wrappers_iterator(self, db, firebase_query, batch_size): skip = 0 query = firebase_query.order_by("schema").order_by(u"_id").limit( batch_size) while True: docs = query.stream() docs_list = list(docs) if len(docs_list) == 0: break for doc_snapshot in docs_list: as_json = db.value_from_snapshot(doc_snapshot) yield ResultWrapper.from_couchdb_json(as_json) last_doc = docs_list[-1] last_id = last_doc.to_dict()["_id"] last_schema = last_doc.to_dict()["schema"] query = firebase_query.order_by("schema").order_by( u"_id").start_after({ "_id": last_id, "schema": last_schema }).limit(batch_size)
def _set(self, key, input_value, rev=None): if self.read_only: raise Exception("This db is read only") value = self.data_adapter.serialise(input_value) type_name = value["type"] namespace = value["namespace"] if self.validator is not None: if "namespace" in value and not "schema" in value: schema_id = self.validator.schema_id_for(namespace, type_name) # print("schema_id", schema_id) if schema_id is not None: value["schema"] = schema_id # try: # self.validator.validate(value) # except jsonschema.ValidationError as e: # raise FamValidationError(e) value["_id"] = key sans_metadata = copy.deepcopy(value) del sans_metadata["type"] del sans_metadata["namespace"] unique_field_names = self._check_for_unique_fields( namespace, type_name, value) if unique_field_names: transaction = self.db.transaction() set_with_unique_fields(transaction, self.db, type_name, key, sans_metadata, unique_field_names) else: doc_ref = self.db.collection(type_name).document(key) self._set_ref(doc_ref, sans_metadata) return ResultWrapper.from_couchdb_json(value)
def get_single_type(self, namespace, type_name): type_ref = self.db.collection(type_name) docs = type_ref.get() rows = [ResultWrapper.from_couchdb_json(doc.to_dict()) for doc in docs] objs = [GenericObject._from_doc(self, row.key, row.rev, row.value) for row in rows] return objs
def query_items_iterator(self, firebase_query, batch_size, order_by=u'_id'): for snapshot in self.query_snapshots_iterator(firebase_query, batch_size=batch_size, order_by=order_by): wrapper = ResultWrapper.from_couchdb_json(self.value_from_snapshot(snapshot)) obj = GenericObject.from_row(self, wrapper) yield obj
def _get(self, key, class_name=None): try: result = self.bucket.get(key) except NotFoundError as e: return None return ResultWrapper(key, result.cas, result.value)
def _n1ql(self, query, *args, **kwargs): query = N1QLQuery(query, *args, **kwargs) rows = self.bucket.n1ql_query(query) rows_list = list(rows) return [ResultWrapper(row["$1"]["id"], row["$1"]["cas"], row["test"]) for row in rows_list]