def test_using_admin_party_source_and_target(self): m_admin_party_client = self.setUpClientMocks(admin_party=True) m_replicator = mock.MagicMock() type(m_replicator).creds = mock.PropertyMock(return_value=None) m_admin_party_client.__getitem__.return_value = m_replicator # create source/target databases src = CouchDatabase(m_admin_party_client, self.source_db) tgt = CouchDatabase(m_admin_party_client, self.target_db) # trigger replication rep = Replicator(m_admin_party_client) rep.create_replication(src, tgt, repl_id=self.repl_id) kcall = m_replicator.create_document.call_args_list self.assertEquals(len(kcall), 1) args, kwargs = kcall[0] self.assertEquals(len(args), 1) expected_doc = { '_id': self.repl_id, 'source': { 'url': '/'.join((self.server_url, self.source_db)) }, 'target': { 'url': '/'.join((self.server_url, self.target_db)) } } self.assertDictEqual(args[0], expected_doc) self.assertTrue(kwargs['throw_on_exists'])
def __init__(self, uri, create=False, server=None, **params): """Constructor for Database @param uri: str, Database uri @param create: boolean, False by default, if True try to create the database. @param server: Server instance """ self.uri = uri.rstrip('/') self.server_uri, self.dbname = self.uri.rsplit("/", 1) self.cloudant_dbname = unquote(self.dbname) if server is not None: if not hasattr(server, 'next_uuid'): raise TypeError('%s is not a couchdbkit.Server instance' % server.__class__.__name__) self.server = server else: self.server = server = Server(self.server_uri, **params) self.cloudant_client = self.server.cloudant_client validate_dbname(self.dbname) self.cloudant_database = CouchDatabase(self.cloudant_client, self.cloudant_dbname) if create: self.cloudant_database.create() self._request_session = self.server._request_session self.database_url = self.cloudant_database.database_url
def __init__(self, url=DB_URL): client = CouchDB(user=USERNAME, auth_token=PASSWORD, url=DB_URL, connect=True, auto_renew=True, use_basic_auth=True) self.db = CouchDatabase(client, DB_NAME, fetch_limit=100, partitioned=False)
class CouchDBInstance(): def __init__(self, url=DB_URL): client = CouchDB(user=USERNAME, auth_token=PASSWORD, url=DB_URL, connect=True, auto_renew=True, use_basic_auth=True) self.db = CouchDatabase(client, DB_NAME, fetch_limit=100, partitioned=False) def insertTweet(self, tweet): q = Query(self.db, use_index="_design/tweet_id", selector={'tweet_id': { '$eq': tweet["tweet_id"] }}) result = q.result[:] if len(result) > 0: return else: partition_key = PARTITION_KEY document_key = str(uuid.uuid4()) tweet['_id'] = ':'.join((partition_key, document_key)) self.db.create_document(tweet) def delete_all(self): for document in self.db: document.delete() def loadView(self, filename): map = "" reduce = "" with open(filename) as ddoc: val = "" nnl = False for x in ddoc.readlines(): val = val + x.strip() if x.strip() == "" and not nnl: # newline map = val val = reduce nnl = True # one map reduce per file if nnl: reduce = val return (map, reduce)
def get_all_missing(db: CouchDatabase) -> List['PhotoEncoding']: return_list: List['PhotoEncoding'] = [] selector = {'type': {'$eq': 'photoencoding'}, 'missing': {'$eq': True}} resp = db.get_query_result(selector) for doc in resp: return_list.append(PhotoEncoding.from_dict(doc)) return return_list
def test_using_iam_auth_source_and_target(self): m_iam_auth_client = self.setUpClientMocks(iam_api_key=MOCK_API_KEY) m_replicator = mock.MagicMock() m_iam_auth_client.__getitem__.return_value = m_replicator # create source/target databases src = CouchDatabase(m_iam_auth_client, self.source_db) tgt = CouchDatabase(m_iam_auth_client, self.target_db) # trigger replication rep = Replicator(m_iam_auth_client) rep.create_replication(src, tgt, repl_id=self.repl_id, user_ctx=self.user_ctx) kcall = m_replicator.create_document.call_args_list self.assertEquals(len(kcall), 1) args, kwargs = kcall[0] self.assertEquals(len(args), 1) expected_doc = { '_id': self.repl_id, 'user_ctx': self.user_ctx, 'source': { 'auth': { 'iam': { 'api_key': MOCK_API_KEY } }, 'url': '/'.join((self.server_url, self.source_db)) }, 'target': { 'auth': { 'iam': { 'api_key': MOCK_API_KEY } }, 'url': '/'.join((self.server_url, self.target_db)) } } self.assertDictEqual(args[0], expected_doc) self.assertTrue(kwargs['throw_on_exists'])
def post_order(db: CouchDatabase): order = request.json['order'] doc = db.get(order_key) orders = doc['orders'] orders.append(order) db[order_key] = doc return jsonify(order)
def test_using_basic_auth_source_and_target(self): test_basic_auth_header = 'abc' m_basic_auth_client = self.setUpClientMocks() m_replicator = mock.MagicMock() m_basic_auth_client.__getitem__.return_value = m_replicator m_basic_auth_client.basic_auth_str.return_value = test_basic_auth_header # create source/target databases src = CouchDatabase(m_basic_auth_client, self.source_db) tgt = CouchDatabase(m_basic_auth_client, self.target_db) # trigger replication rep = Replicator(m_basic_auth_client) rep.create_replication(src, tgt, repl_id=self.repl_id, user_ctx=self.user_ctx) kcall = m_replicator.create_document.call_args_list self.assertEquals(len(kcall), 1) args, kwargs = kcall[0] self.assertEquals(len(args), 1) expected_doc = { '_id': self.repl_id, 'user_ctx': self.user_ctx, 'source': { 'headers': { 'Authorization': test_basic_auth_header }, 'url': '/'.join((self.server_url, self.source_db)) }, 'target': { 'headers': { 'Authorization': test_basic_auth_header }, 'url': '/'.join((self.server_url, self.target_db)) } } self.assertDictEqual(args[0], expected_doc) self.assertTrue(kwargs['throw_on_exists'])
def get_all_by_ngo(db: CouchDatabase, ngo: str) -> List['PhotoEncoding']: return_list: List['PhotoEncoding'] = [] selector = { '$and': [{ 'type': { '$eq': 'photoencoding' } }, { 'ngoId': { '$eq': ngo } }, { 'missing': { '$eq': False } }] } resp = db.get_query_result(selector) for doc in resp: return_list.append(PhotoEncoding.from_dict(doc)) return return_list
class CouchDBTest(unittest.TestCase): def setUp(self): self.mock_session = mock.Mock() self.mock_session.get = mock.Mock() self.mock_session.post = mock.Mock() self.mock_session.put = mock.Mock() self.mock_session.delete = mock.Mock() self.account = mock.Mock() self.account.cloudant_url = "https://bob.cloudant.com" self.account.r_session = self.mock_session self.username = "******" self.db_name = "testdb" self.db_url = posixpath.join(self.account.cloudant_url, self.db_name) self.c = CouchDatabase(self.account, self.db_name) self.db_info = { "update_seq": "1-g1AAAADfeJzLYWBg", "db_name": self.db_name, "sizes": { "file": 1528585, "external": 5643, "active": None }, "purge_seq": 0, "other": { "data_size": 5643 }, "doc_del_count": 2, "doc_count": 13, "disk_size": 1528585, "disk_format_version": 6, "compact_running": False, "instance_start_time": "0" } self.ddocs = { "rows": [ { "id": "_design/test", "key": "_design/test", "value": { "rev": "1-4e6d6671b0ba9ba994a0f5e7e8de1d9d" }, "doc": { "_id": "_design/test", "_rev": "1-4e6d6671b0ba9ba994a0f5e7e8de1d9d", "views": { "test": { "map": "function (doc) {emit(doc._id, 1);}" } } } } ] } self.all_docs = { "total_rows": 13, "offset": 0, "rows": [ { "id": "snipe", "key": "snipe", "value": { "rev": "1-4b2fb3b7d6a226b13951612d6ca15a6b" } }, { "id": "zebra", "key": "zebra", "value": { "rev": "1-750dac460a6cc41e6999f8943b8e603e" } } ] } def test_create(self): mock_resp = mock.Mock() mock_resp.status_code = 201 self.mock_session.put = mock.Mock() self.mock_session.put.return_value = mock_resp self.c.create() self.assertTrue(self.mock_session.put.called) def test_delete(self): mock_resp = mock.Mock() mock_resp.status_code = 200 self.mock_session.delete = mock.Mock() self.mock_session.delete.return_value = mock_resp self.c.delete() self.assertTrue(self.mock_session.delete.called) def test_db_info(self): mock_resp = mock.Mock() mock_resp.status_code = 200 mock_resp.json = mock.Mock(return_value=self.db_info) self.mock_session.get = mock.Mock(return_value=mock_resp) exists_resp = self.c.exists() meta_resp = self.c.metadata() count_resp = self.c.doc_count() self.assertTrue(self.mock_session.get.called) self.assertEqual(self.mock_session.get.call_count, 3) self.assertEqual(exists_resp, True) self.assertEqual(meta_resp, self.db_info) self.assertEqual(count_resp, self.db_info["doc_count"]) def test_ddocs(self): mock_resp = mock.Mock() mock_resp.raise_for_status = mock.Mock(return_value=False) mock_resp.json = mock.Mock(return_value=self.ddocs) self.mock_session.get = mock.Mock(return_value=mock_resp) ddocs = self.c.design_documents() ddoc_list = self.c.list_design_documents() self.assertTrue(self.mock_session.get.called) self.assertEqual(self.mock_session.get.call_count, 2) self.assertEqual(ddocs[0]["id"], "_design/test") self.assertEqual(ddoc_list[0], "_design/test") def test_all_docs(self): mock_resp = mock.Mock() mock_resp.json = mock.Mock(return_value=self.all_docs) self.mock_session.get = mock.Mock(return_value=mock_resp) all_docs = self.c.all_docs() keys = self.c.keys(remote=True) self.assertTrue(self.mock_session.get.called) self.assertDictContainsSubset({"id": "snipe"}, all_docs["rows"][0]) self.assertDictContainsSubset({"id": "zebra"}, all_docs["rows"][1]) self.assertListEqual(keys, ["snipe", "zebra"]) def test_bulk_docs(self): mock_resp = mock.Mock() mock_resp.raise_for_status = mock.Mock(return_value=False) self.mock_session.post = mock.Mock(return_value=mock_resp) self.c.bulk_docs(['a', 'b', 'c']) self.mock_session.post.assert_called_once_with( posixpath.join(self.db_url, '_all_docs'), data=json.dumps({'keys': ['a', 'b', 'c']}) ) def test_bulk_insert(self): mock_resp = mock.Mock() mock_resp.raise_for_status = mock.Mock(return_value=False) self.mock_session.post = mock.Mock(return_value=mock_resp) docs = [ { '_id': 'somedoc', 'foo': 'bar' }, { '_id': 'anotherdoc', '_rev': '1-ahsdjkasdgf', 'hello': 'world' } ] self.c.bulk_insert(docs) self.mock_session.post.assert_called_once_with( posixpath.join(self.db_url, '_bulk_docs'), data=json.dumps({'docs': docs}), headers={'Content-Type': 'application/json'} ) def test_db_updates(self): updates_feed = """ {"dbname": "somedb3", "type": "created", "account": "bob", "seq": "3-g1AAAABteJzLYWBgYMxgTmFQSElKzi9KdUhJMtHLTc1NzTcwMNdLzskvTUnMK9HLSy3JAapkSmTIY2H4DwRZGcyJzLlAIfa0tKQUQ2NTIkzIAgD_wSJc"} {"dbname": "somedb2", "type": "updated", "account": "bob", "seq": "4-g1AAAABteJzLYWBgYMxgTmFQSElKzi9KdUhJMtHLTc1NzTcwMNdLzskvTUnMK9HLSy3JAapkSmTIY2H4DwRZGcyJLLlAIfa0tKQUQ2NTIkzIAgAAASJd"} {"dbname": "somedb1", "type": "deleted", "account": "bob", "seq": "9-g1AAAABteJzLYWBgYMxgTmFQSElKzi9KdUhJMtHLTc1NzTcwMNdLzskvTUnMK9HLSy3JAapkSmTIY2H4DwRZGcyJnLlAIfa0tKQUQ2NTIkzIAgAA9iJi"} {"dbname": "somedb2", "type": "created", "account": "bob", "seq": "11-g1AAAABteJzLYWBgYMxgTmFQSElKzi9KdUhJMtHLTc1NzTcwMNdLzskvTUnMK9HLSy3JAapkSmTIY2H4DwRZGcyJ3LlAIfa0tKQUQ2NTIkzIAgABWCJk"} {"dbname": "somedb1", "type": "updated", "account": "bob", "seq": "12-g1AAAABteJzLYWBgYMxgTmFQSElKzi9KdUhJMtHLTc1NzTcwMNdLzskvTUnMK9HLSy3JAapkSmTIY2H4DwRZGcyJPLlAIfa0tKQUQ2NTIkzIAgABiSJl"} """ with mock.patch('cloudant.database.Feed') as mock_feed: feed = (x.strip() for x in updates_feed.split('\n')) mock_feed.__iter__ = mock.MagicMock() mock_feed.return_value = feed updates = [u for u in self.c.db_updates()] self.assertEqual(len(updates), 5)
class Database(object): """ Object that abstract access to a CouchDB database A Database object can act as a Dict object. """ def __init__(self, uri, create=False, server=None, **params): """Constructor for Database @param uri: str, Database uri @param create: boolean, False by default, if True try to create the database. @param server: Server instance """ self.uri = uri.rstrip('/') self.server_uri, self.dbname = self.uri.rsplit("/", 1) self.cloudant_dbname = unquote(self.dbname) if server is not None: if not hasattr(server, 'next_uuid'): raise TypeError('%s is not a couchdbkit.Server instance' % server.__class__.__name__) self.server = server else: self.server = server = Server(self.server_uri, **params) self.cloudant_client = self.server.cloudant_client validate_dbname(self.dbname) self.cloudant_database = CouchDatabase(self.cloudant_client, self.cloudant_dbname) if create: self.cloudant_database.create() self._request_session = self.server._request_session self.database_url = self.cloudant_database.database_url def __repr__(self): return "<%s %s>" % (self.__class__.__name__, self.dbname) def _database_path(self, path): return '/'.join([self.database_url, path]) def info(self): """ Get database information @return: dict """ return self.cloudant_database.metadata() def set_security(self, secobj): """ set database securrity object """ with SecurityDocument(self.cloudant_database) as sec_doc: # context manager saves for key in sec_doc: del sec_doc[key] for k, v in secobj.items(): sec_doc[k] = v return self.get_security() def get_security(self): """ get database secuirity object """ return self.cloudant_database.get_security_document() def compact(self, dname=None): """ compact database @param dname: string, name of design doc. Usefull to compact a view. """ path = "/_compact" if dname is not None: path = "%s/%s" % (path, resource.escape_docid(dname)) path = self._database_path(path) res = self._request_session.post( path, headers={"Content-Type": "application/json"}) res.raise_for_status() return res.json() def view_cleanup(self): return self.cloudant_database.view_cleanup() def flush(self): """ Remove all docs from a database except design docs.""" # save ddocs all_ddocs = self.all_docs(startkey=u"_design", endkey=u"_design/\u9999", include_docs=True) ddocs = [] for ddoc in all_ddocs: doc = ddoc['doc'] old_atts = doc.get('_attachments', {}) atts = {} for name, info in old_atts.items(): att = {} att['content_type'] = info['content_type'] att['data'] = self.fetch_attachment(ddoc['doc'], name) atts[name] = att # create a fresh doc doc.pop('_rev') doc['_attachments'] = resource.encode_attachments(atts) ddocs.append(doc) # delete db self.server.delete_db(self.dbname) # we let a chance to the system to sync times = 0 while times < 10: if self.dbname in self.server: break time.sleep(0.2) times += 1 # recreate db + ddocs self.server.create_db(self.dbname) self.bulk_save(ddocs) def doc_exist(self, docid): """Test if document exists in a database @param docid: str, document id @return: boolean, True if document exist """ doc = Document(self.cloudant_database, docid) return doc.exists() def open_doc(self, docid, **params): """Get document from database Args: @param docid: str, document id to retrieve @param wrapper: callable. function that takes dict as a param. Used to wrap an object. @param **params: See doc api for parameters to use: http://wiki.apache.org/couchdb/HTTP_Document_API @return: dict, representation of CouchDB document as a dict. """ wrapper = None if "wrapper" in params: wrapper = params.pop("wrapper") elif "schema" in params: schema = params.pop("schema") if not hasattr(schema, "wrap"): raise TypeError("invalid schema") wrapper = schema.wrap attachments = params.get('attachments', False) if six.PY2 and isinstance(docid, six.text_type): docid = docid.encode('utf-8') if six.PY3 and isinstance(docid, bytes): docid = docid.decode('utf-8') doc = Document(self.cloudant_database, docid) try: doc.fetch() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound( json.loads(e.response.content.decode('utf-8'))['reason']) raise doc_dict = dict(doc) if attachments and '_attachments' in doc_dict: for attachment_name in doc_dict['_attachments']: attachment_data = doc.get_attachment(attachment_name, attachment_type='binary') doc_dict['_attachments'][attachment_name][ 'data'] = base64.b64encode(attachment_data) del doc_dict['_attachments'][attachment_name]['stub'] del doc_dict['_attachments'][attachment_name]['length'] if wrapper is not None: if not callable(wrapper): raise TypeError("wrapper isn't a callable") return wrapper(doc_dict) return doc_dict get = open_doc def list(self, list_name, view_name, **params): raise NoLongerSupportedException def show(self, show_name, doc_id, **params): raise NoLongerSupportedException def update(self, update_name, doc_id=None, **params): raise NoLongerSupportedException def all_docs(self, by_seq=False, **params): """Get all documents from a database This method has the same behavior as a view. `all_docs( **params )` is the same as `view('_all_docs', **params)` and `all_docs( by_seq=True, **params)` is the same as `view('_all_docs_by_seq', **params)` You can use all(), one(), first() just like views Args: @param by_seq: bool, if True the "_all_docs_by_seq" is passed to couchdb. It will return an updated list of all documents. @return: list, results of the view """ if by_seq: try: return self.view('_all_docs_by_seq', **params) except ResourceNotFound: # CouchDB 0.11 or sup raise AttributeError( "_all_docs_by_seq isn't supported on Couchdb %s" % self.server.info()[1]) return self.view('_all_docs', **params) def get_rev(self, docid): """ Get last revision from docid (the '_rev' member) @param docid: str, undecoded document id. @return rev: str, the last revision of document. """ response = self._request_session.head(self._database_path(docid)) try: response.raise_for_status() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound raise # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag return response.headers['ETag'].strip('"').lstrip('W/"') def save_doc(self, doc, encode_attachments=True, force_update=False, **params): """ Save a document. It will use the `_id` member of the document or request a new uuid from CouchDB. IDs are attached to documents on the client side because POST has the curious property of being automatically retried by proxies in the event of network segmentation and lost responses. (Idee from `Couchrest <http://github.com/jchris/couchrest/>`) @param doc: dict. doc is updated with doc '_id' and '_rev' properties returned by CouchDB server when you save. @param force_update: boolean, if there is conlict, try to update with latest revision @param params, list of optionnal params, like batch="ok" @return res: result of save. doc is updated in the mean time """ if doc is None: doc1 = {} else: doc1, schema = _maybe_serialize(doc) if '_attachments' in doc1 and encode_attachments: doc1['_attachments'] = resource.encode_attachments( doc['_attachments']) if '_id' in doc1: docid = doc1['_id'] if six.PY3 else doc1['_id'].encode('utf-8') couch_doc = Document(self.cloudant_database, docid) couch_doc.update(doc1) try: # Copied from Document.save to ensure that a deleted doc cannot be saved. headers = {} headers.setdefault('Content-Type', 'application/json') put_resp = couch_doc.r_session.put(couch_doc.document_url, data=couch_doc.json(), headers=headers) put_resp.raise_for_status() data = put_resp.json() super(Document, couch_doc).__setitem__('_rev', data['rev']) except HTTPError as e: if e.response.status_code != 409: raise if force_update: couch_doc['_rev'] = self.get_rev(docid) couch_doc.save() else: raise ResourceConflict res = couch_doc else: res = self.cloudant_database.create_document(doc1) if 'batch' in params and ('id' in res or '_id' in res): doc1.update({'_id': res.get('_id')}) else: doc1.update({'_id': res.get('_id'), '_rev': res.get('_rev')}) if schema: for key, value in six.iteritems(doc.__class__.wrap(doc1)): doc[key] = value else: doc.update(doc1) return { 'id': res['_id'], 'rev': res['_rev'], 'ok': True, } def save_docs(self, docs, use_uuids=True, new_edits=None, **params): """ bulk save. Modify Multiple Documents With a Single Request @param docs: list of docs @param use_uuids: add _id in doc who don't have it already set. @param new_edits: When False, this saves existing revisions instead of creating new ones. Used in the replication Algorithm. Each document should have a _revisions property that lists its revision history. .. seealso:: `HTTP Bulk Document API <http://wiki.apache.org/couchdb/HTTP_Bulk_Document_API>` """ if not isinstance(docs, (list, tuple)): docs = tuple(docs) docs1 = [] docs_schema = [] for doc in docs: doc1, schema = _maybe_serialize(doc) docs1.append(doc1) docs_schema.append(schema) def is_id(doc): return '_id' in doc if use_uuids: noids = [] for k, g in groupby(docs1, is_id): if not k: noids = list(g) uuid_count = max(len(noids), self.server.uuid_batch_count) for doc in noids: nextid = self.server.next_uuid(count=uuid_count) if nextid: doc['_id'] = nextid payload = {"docs": docs1} if new_edits is not None: payload["new_edits"] = new_edits # update docs res = self._request_session.post( self._database_path('_bulk_docs'), data=json.dumps(payload), headers={"Content-Type": "application/json"}, **params) res.raise_for_status() results = res.json() errors = [] for i, res in enumerate(results): if 'error' in res: errors.append(res) logging_context = dict( method='save_docs', params=params, error=res['error'], ) error_logger.error("save_docs error", extra=logging_context) else: if docs_schema[i]: docs[i]._doc.update({'_id': res['id'], '_rev': res['rev']}) else: docs[i].update({'_id': res['id'], '_rev': res['rev']}) if errors: raise BulkSaveError(errors, results) return results bulk_save = save_docs def delete_docs(self, docs, empty_on_delete=False, **params): """ bulk delete. It adds '_deleted' member to doc then uses bulk_save to save them. @param empty_on_delete: default is False if you want to make sure the doc is emptied and will not be stored as is in Apache CouchDB. .. seealso:: `HTTP Bulk Document API <http://wiki.apache.org/couchdb/HTTP_Bulk_Document_API>` """ if empty_on_delete: for doc in docs: new_doc = { "_id": doc["_id"], "_rev": doc["_rev"], "_deleted": True } doc.clear() doc.update(new_doc) else: for doc in docs: doc['_deleted'] = True return self.bulk_save(docs, use_uuids=False, **params) bulk_delete = delete_docs def delete_doc(self, doc, **params): """ delete a document or a list of documents @param doc: str or dict, document id or full doc. @return: dict like: .. code-block:: python {"ok":true,"rev":"2839830636"} """ result = {'ok': False} doc1, schema = _maybe_serialize(doc) if isinstance(doc1, dict): if not '_id' or not '_rev' in doc1: raise KeyError('_id and _rev are required to delete a doc') couch_doc = Document(self.cloudant_database, doc1['_id']) couch_doc['_rev'] = doc1['_rev'] elif isinstance(doc1, six.string_types): # we get a docid couch_doc = Document(self.cloudant_database, doc1) couch_doc['_rev'] = self.get_rev(doc1) # manual request because cloudant library doesn't return result res = self._request_session.delete( couch_doc.document_url, params={"rev": couch_doc["_rev"]}, ) try: res.raise_for_status() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound raise result = res.json() if schema: doc._doc.update({"_rev": result['rev'], "_deleted": True}) elif isinstance(doc, dict): doc.update({"_rev": result['rev'], "_deleted": True}) return result def copy_doc(self, doc, dest=None, headers=None): """ copy an existing document to a new id. If dest is None, a new uuid will be requested @param doc: dict or string, document or document id @param dest: basestring or dict. if _rev is specified in dict it will override the doc """ if not headers: headers = {} doc1, schema = _maybe_serialize(doc) if isinstance(doc1, six.string_types): docid = doc1 else: if '_id' not in doc1: raise KeyError('_id is required to copy a doc') docid = doc1['_id'] if dest is None: destination = self.server.next_uuid(count=1) elif isinstance(dest, six.string_types): if dest in self: dest = self.get(dest) destination = "%s?rev=%s" % (dest['_id'], dest['_rev']) else: destination = dest elif isinstance(dest, dict): if '_id' in dest and '_rev' in dest and dest['_id'] in self: destination = "%s?rev=%s" % (dest['_id'], dest['_rev']) else: raise KeyError( "dest doesn't exist or this not a document ('_id' or '_rev' missig)." ) if destination: headers.update({"Destination": str(destination)}) resp = self._request_session.request('copy', self._database_path(docid), headers=headers) try: resp.raise_for_status() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound raise return resp.json() return {'ok': False} def raw_view(self, view_path, params): params = deepcopy(params) params.pop('dynamic_properties', None) if view_path == '_all_docs': return self.cloudant_database.all_docs(**params) else: view_path = view_path.split('/') assert len(view_path) == 4 view = View(DesignDocument(self.cloudant_database, view_path[1]), view_path[3]) return view(**params) def view(self, view_name, schema=None, wrapper=None, **params): """ get view results from database. viewname is generally a string like `designname/viewname". It return an ViewResults object on which you could iterate, list, ... . You could wrap results in wrapper function, a wrapper function take a row as argument. Wrapping could be also done by passing an Object in obj arguments. This Object should have a `wrap` method that work like a simple wrapper function. @param view_name, string could be '_all_docs', '_all_docs_by_seq', 'designname/viewname' if view_name start with a "/" it won't be parsed and beginning slash will be removed. Usefull with c-l for example. @param schema, Object with a wrapper function @param wrapper: function used to wrap results @param params: params of the view """ if view_name.startswith('/'): view_name = view_name[1:] if view_name == '_all_docs': view_path = view_name elif view_name == '_all_docs_by_seq': view_path = view_name else: view_name = view_name.split('/') dname = view_name.pop(0) vname = '/'.join(view_name) view_path = '_design/%s/_view/%s' % (dname, vname) return ViewResults(self.raw_view, view_path, wrapper, schema, params) def search(self, view_name, handler='_fti/_design', wrapper=None, schema=None, **params): """ Search. Return results from search. Use couchdb-lucene with its default settings by default.""" return ViewResults(self.raw_view, "/%s/%s" % (handler, view_name), wrapper=wrapper, schema=schema, params=params) def documents(self, schema=None, wrapper=None, **params): """ return a ViewResults objects containing all documents. This is a shorthand to view function. """ return ViewResults(self.raw_view, '_all_docs', wrapper=wrapper, schema=schema, params=params) iterdocuments = documents def put_attachment(self, doc, content, name=None, content_type=None, content_length=None, headers=None): raise NoLongerSupportedException def delete_attachment(self, doc, name, headers=None): raise NoLongerSupportedException def fetch_attachment(self, id_or_doc, name, stream=False, headers=None): raise NoLongerSupportedException def ensure_full_commit(self): """ commit all docs in memory """ path = self._database_path('_ensure_full_commit') res = self._request_session.post( path, headers={"Content-Type": "application/json"}) res.raise_for_status() return res.json() def __len__(self): return self.info()['doc_count'] def __contains__(self, docid): return self.doc_exist(docid) def __getitem__(self, docid): return self.get(docid) def __setitem__(self, docid, doc): doc['_id'] = docid self.save_doc(doc) def __delitem__(self, docid): self.delete_doc(docid) def __iter__(self): return self.documents().iterator() def __nonzero__(self): return (len(self) > 0)
def get_orders(db: CouchDatabase): doc = db.get(order_key) orders = doc['orders'] # out = [i.__dict__ for i in orders] return jsonify(orders)
class Couch: db = None c_db = None couch_db = None # usage: database = Couch(db_name) # fields: db_name -> str def __init__(self, db_name): server_config = Config(CONFIG_PATH).get('couchdb') self.client = CouchDB(server_config['username'], server_config['password'], url=server_config['server_addr'], connect=True, auto_renew=True) self.select_db(db_name) # Get one database selected; if the database doesn't exist, create it. # usage: database.select_db(db_name); # fields: db_name -> str def select_db(self, db_name): self.couch_db = CouchDatabase(self.client, db_name) if not self.couch_db.exists(): self.couch_db.create() self.db = self.client[db_name] self.c_db = CloudantDatabase(self.client, db_name) # usage: database.close() # Database should be closed when finish using def close(self): self.client.disconnect() # Get count of documents in current database; # usage database.count(); def count(self): return self.couch_db.doc_count() # Get everything from the database; # usage: database.query_all(); # note: after query_all, iterate the returned item to get every document def query_all(self): qlist = [] for doc in self.db: qlist.append(doc) return qlist # Select something from the database; # usage: database.query(selector); # fields: selector -> Dictionary # note: after query, iterate the returned item to get every document def query(self, selector): qlist = [] result = self.c_db.get_query_result(selector) for doc in result: qlist.append(doc) return qlist def query_multiple(self, selectors): qlist = [] for selector in selectors: qlist += self.query(selector) return qlist # insert operation of the database; # usage: database.insert(doc); # fields: doc -> Dictionary def insert(self, doc): doc['timestamp'] = _timestamp() document = self.db.create_document(doc) if document.exists(): return document['_id'] def distinct_insert(self, doc): query_res = self.query(doc) if len(query_res) == 0: return self.insert(doc) return query_res[0]['_id'] # update operation of the database; # usage: database.update(field, old_value, new_value) # fields: field -> str; value -> str; new_value -> str def update(self, selector, field, new_value): q_res = self.c_db.get_query_result(selector) for document in q_res: doc_id = document['_id'] doc = Document(self.db, doc_id) doc.update_field(action=doc.field_set, field=field, value=new_value) doc.update_field(action=doc.field_set, field='timestamp', value=_timestamp()) # delete operation of the database; # usage: database.delete(selector) # fields: selector -> Dictionary def delete(self, selector): q_res = self.c_db.get_query_result(selector) for document in q_res: id = document['_id'] rev = document['_rev'] doc = Document(self.db, id) doc['_rev'] = rev doc.delete() def move_doc(self, selector, target): """ Move documents from current database to target database. :param selector: dictionary :param target: string, db name :return: """ documents = self.query(selector) for doc in documents: del doc['_id'] del doc['_rev'] Couch(target).distinct_insert(doc) self.delete(selector) def query_latest_change(self, selector): """ Query latest item sorted by timestamp. Returns only timestamp in documents. :param selector: dictionary :return: a list that contains 1 or 0 docs """ q_res = self.query(selector) q_res = list(filter(lambda x: 'timestamp' in x.keys(), q_res)) res = sorted(q_res, key=lambda x: x['timestamp']) return res[-1:]
def setUp(self): self.mock_session = mock.Mock() self.mock_session.get = mock.Mock() self.mock_session.post = mock.Mock() self.mock_session.put = mock.Mock() self.mock_session.delete = mock.Mock() self.account = mock.Mock() self.account.cloudant_url = "https://bob.cloudant.com" self.account.r_session = self.mock_session self.username = "******" self.db_name = "testdb" self.db_url = posixpath.join(self.account.cloudant_url, self.db_name) self.c = CouchDatabase(self.account, self.db_name) self.db_info = { "update_seq": "1-g1AAAADfeJzLYWBg", "db_name": self.db_name, "sizes": { "file": 1528585, "external": 5643, "active": None }, "purge_seq": 0, "other": { "data_size": 5643 }, "doc_del_count": 2, "doc_count": 13, "disk_size": 1528585, "disk_format_version": 6, "compact_running": False, "instance_start_time": "0" } self.ddocs = { "rows": [ { "id": "_design/test", "key": "_design/test", "value": { "rev": "1-4e6d6671b0ba9ba994a0f5e7e8de1d9d" }, "doc": { "_id": "_design/test", "_rev": "1-4e6d6671b0ba9ba994a0f5e7e8de1d9d", "views": { "test": { "map": "function (doc) {emit(doc._id, 1);}" } } } } ] } self.all_docs = { "total_rows": 13, "offset": 0, "rows": [ { "id": "snipe", "key": "snipe", "value": { "rev": "1-4b2fb3b7d6a226b13951612d6ca15a6b" } }, { "id": "zebra", "key": "zebra", "value": { "rev": "1-750dac460a6cc41e6999f8943b8e603e" } } ] }
class CouchDBTest(unittest.TestCase): def setUp(self): self.mock_session = mock.Mock() self.mock_session.get = mock.Mock() self.mock_session.post = mock.Mock() self.mock_session.put = mock.Mock() self.mock_session.delete = mock.Mock() self.account = mock.Mock() self.account.cloudant_url = "https://bob.cloudant.com" self.account.r_session = self.mock_session self.username = "******" self.db_name = "testdb" self.db_url = posixpath.join(self.account.cloudant_url, self.db_name) self.c = CouchDatabase(self.account, self.db_name) self.db_info = { "update_seq": "1-g1AAAADfeJzLYWBg", "db_name": self.db_name, "sizes": { "file": 1528585, "external": 5643, "active": None }, "purge_seq": 0, "other": { "data_size": 5643 }, "doc_del_count": 2, "doc_count": 13, "disk_size": 1528585, "disk_format_version": 6, "compact_running": False, "instance_start_time": "0" } self.ddocs = { "rows": [ { "id": "_design/test", "key": "_design/test", "value": { "rev": "1-4e6d6671b0ba9ba994a0f5e7e8de1d9d" }, "doc": { "_id": "_design/test", "_rev": "1-4e6d6671b0ba9ba994a0f5e7e8de1d9d", "views": { "test": { "map": "function (doc) {emit(doc._id, 1);}" } } } } ] } self.all_docs = { "total_rows": 13, "offset": 0, "rows": [ { "id": "snipe", "key": "snipe", "value": { "rev": "1-4b2fb3b7d6a226b13951612d6ca15a6b" } }, { "id": "zebra", "key": "zebra", "value": { "rev": "1-750dac460a6cc41e6999f8943b8e603e" } } ] } def test_create(self): mock_resp = mock.Mock() mock_resp.status_code = 201 self.mock_session.put = mock.Mock() self.mock_session.put.return_value = mock_resp self.c.create() self.assertTrue(self.mock_session.put.called) def test_delete(self): mock_resp = mock.Mock() mock_resp.status_code = 200 self.mock_session.delete = mock.Mock() self.mock_session.delete.return_value = mock_resp self.c.delete() self.assertTrue(self.mock_session.delete.called) def test_db_info(self): mock_resp = mock.Mock() mock_resp.status_code = 200 mock_resp.json = mock.Mock(return_value=self.db_info) self.mock_session.get = mock.Mock(return_value=mock_resp) exists_resp = self.c.exists() meta_resp = self.c.metadata() count_resp = self.c.doc_count() self.assertTrue(self.mock_session.get.called) self.assertEqual(self.mock_session.get.call_count, 3) self.assertEqual(exists_resp, True) self.assertEqual(meta_resp, self.db_info) self.assertEqual(count_resp, self.db_info["doc_count"]) def test_ddocs(self): mock_resp = mock.Mock() mock_resp.raise_for_status = mock.Mock(return_value=False) mock_resp.json = mock.Mock(return_value=self.ddocs) self.mock_session.get = mock.Mock(return_value=mock_resp) ddocs = self.c.design_documents() ddoc_list = self.c.list_design_documents() self.assertTrue(self.mock_session.get.called) self.assertEqual(self.mock_session.get.call_count, 2) self.assertEqual(ddocs[0]["id"], "_design/test") self.assertEqual(ddoc_list[0], "_design/test") def test_all_docs(self): mock_resp = mock.Mock() mock_resp.json = mock.Mock(return_value=self.all_docs) self.mock_session.get = mock.Mock(return_value=mock_resp) all_docs = self.c.all_docs() keys = self.c.keys(remote=True) self.assertTrue(self.mock_session.get.called) self.assertDictContainsSubset({"id": "snipe"}, all_docs["rows"][0]) self.assertDictContainsSubset({"id": "zebra"}, all_docs["rows"][1]) self.assertListEqual(keys, ["snipe", "zebra"]) def test_bulk_docs(self): mock_resp = mock.Mock() mock_resp.raise_for_status = mock.Mock(return_value=False) self.mock_session.post = mock.Mock(return_value=mock_resp) docs = [ { '_id': 'somedoc', 'foo': 'bar' }, { '_id': 'anotherdoc', '_rev': '1-ahsdjkasdgf', 'hello': 'world' } ] self.c.bulk_docs(docs) self.mock_session.post.assert_called_once_with( posixpath.join(self.db_url, '_bulk_docs'), data=json.dumps({'docs': docs}), headers={'Content-Type': 'application/json'} )
def select_db(self, db_name): self.couch_db = CouchDatabase(self.client, db_name) if not self.couch_db.exists(): self.couch_db.create() self.db = self.client[db_name] self.c_db = CloudantDatabase(self.client, db_name)
class Database(object): """ Object that abstract access to a CouchDB database A Database object can act as a Dict object. """ def __init__(self, uri, create=False, server=None, **params): """Constructor for Database @param uri: str, Database uri @param create: boolean, False by default, if True try to create the database. @param server: Server instance """ self.uri = uri.rstrip('/') self.server_uri, self.dbname = self.uri.rsplit("/", 1) self.cloudant_dbname = unquote(self.dbname) if server is not None: if not hasattr(server, 'next_uuid'): raise TypeError('%s is not a couchdbkit.Server instance' % server.__class__.__name__) self.server = server else: self.server = server = Server(self.server_uri, **params) self.cloudant_client = self.server.cloudant_client validate_dbname(self.dbname) self.cloudant_database = CouchDatabase(self.cloudant_client, self.cloudant_dbname) if create: self.cloudant_database.create() self._request_session = self.server._request_session self.database_url = self.cloudant_database.database_url def __repr__(self): return "<%s %s>" % (self.__class__.__name__, self.dbname) def _database_path(self, path): return '/'.join([self.database_url, path]) def info(self): """ Get database information @return: dict """ return self.cloudant_database.metadata() def set_security(self, secobj): """ set database securrity object """ with SecurityDocument(self.cloudant_database) as sec_doc: # context manager saves for key in sec_doc: del sec_doc[key] for k, v in secobj.items(): sec_doc[k] = v return self.get_security() def get_security(self): """ get database secuirity object """ return self.cloudant_database.get_security_document() def compact(self, dname=None): """ compact database @param dname: string, name of design doc. Usefull to compact a view. """ path = "/_compact" if dname is not None: path = "%s/%s" % (path, resource.escape_docid(dname)) path = self._database_path(path) res = self._request_session.post(path, headers={"Content-Type": "application/json"}) res.raise_for_status() return res.json() def view_cleanup(self): return self.cloudant_database.view_cleanup() def flush(self): """ Remove all docs from a database except design docs.""" # save ddocs all_ddocs = self.all_docs(startkey=u"_design", endkey=u"_design/\u9999", include_docs=True) ddocs = [] for ddoc in all_ddocs: doc = ddoc['doc'] old_atts = doc.get('_attachments', {}) atts = {} for name, info in old_atts.items(): att = {} att['content_type'] = info['content_type'] att['data'] = self.fetch_attachment(ddoc['doc'], name) atts[name] = att # create a fresh doc doc.pop('_rev') doc['_attachments'] = resource.encode_attachments(atts) ddocs.append(doc) # delete db self.server.delete_db(self.dbname) # we let a chance to the system to sync times = 0 while times < 10: if self.dbname in self.server: break time.sleep(0.2) times += 1 # recreate db + ddocs self.server.create_db(self.dbname) self.bulk_save(ddocs) def doc_exist(self, docid): """Test if document exists in a database @param docid: str, document id @return: boolean, True if document exist """ doc = Document(self.cloudant_database, docid) return doc.exists() def open_doc(self, docid, **params): """Get document from database Args: @param docid: str, document id to retrieve @param wrapper: callable. function that takes dict as a param. Used to wrap an object. @param **params: See doc api for parameters to use: http://wiki.apache.org/couchdb/HTTP_Document_API @return: dict, representation of CouchDB document as a dict. """ wrapper = None if "wrapper" in params: wrapper = params.pop("wrapper") elif "schema" in params: schema = params.pop("schema") if not hasattr(schema, "wrap"): raise TypeError("invalid schema") wrapper = schema.wrap attachments = params.get('attachments', False) if six.PY2 and isinstance(docid, six.text_type): docid = docid.encode('utf-8') if six.PY3 and isinstance(docid, bytes): docid = docid.decode('utf-8') doc = Document(self.cloudant_database, docid) try: doc.fetch() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound(json.loads(e.response.content.decode('utf-8'))['reason']) raise doc_dict = dict(doc) if attachments and '_attachments' in doc_dict: for attachment_name in doc_dict['_attachments']: attachment_data = doc.get_attachment(attachment_name, attachment_type='binary') doc_dict['_attachments'][attachment_name]['data'] = base64.b64encode(attachment_data) del doc_dict['_attachments'][attachment_name]['stub'] del doc_dict['_attachments'][attachment_name]['length'] if wrapper is not None: if not callable(wrapper): raise TypeError("wrapper isn't a callable") return wrapper(doc_dict) return doc_dict get = open_doc def list(self, list_name, view_name, **params): raise NoLongerSupportedException def show(self, show_name, doc_id, **params): raise NoLongerSupportedException def update(self, update_name, doc_id=None, **params): raise NoLongerSupportedException def all_docs(self, by_seq=False, **params): """Get all documents from a database This method has the same behavior as a view. `all_docs( **params )` is the same as `view('_all_docs', **params)` and `all_docs( by_seq=True, **params)` is the same as `view('_all_docs_by_seq', **params)` You can use all(), one(), first() just like views Args: @param by_seq: bool, if True the "_all_docs_by_seq" is passed to couchdb. It will return an updated list of all documents. @return: list, results of the view """ if by_seq: try: return self.view('_all_docs_by_seq', **params) except ResourceNotFound: # CouchDB 0.11 or sup raise AttributeError("_all_docs_by_seq isn't supported on Couchdb %s" % self.server.info()[1]) return self.view('_all_docs', **params) def get_rev(self, docid): """ Get last revision from docid (the '_rev' member) @param docid: str, undecoded document id. @return rev: str, the last revision of document. """ response = self._request_session.head(self._database_path(docid)) try: response.raise_for_status() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound raise # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag return response.headers['ETag'].strip('"').lstrip('W/"') def save_doc(self, doc, encode_attachments=True, force_update=False, **params): """ Save a document. It will use the `_id` member of the document or request a new uuid from CouchDB. IDs are attached to documents on the client side because POST has the curious property of being automatically retried by proxies in the event of network segmentation and lost responses. (Idee from `Couchrest <http://github.com/jchris/couchrest/>`) @param doc: dict. doc is updated with doc '_id' and '_rev' properties returned by CouchDB server when you save. @param force_update: boolean, if there is conlict, try to update with latest revision @param params, list of optionnal params, like batch="ok" @return res: result of save. doc is updated in the mean time """ if doc is None: doc1 = {} else: doc1, schema = _maybe_serialize(doc) if '_attachments' in doc1 and encode_attachments: doc1['_attachments'] = resource.encode_attachments(doc['_attachments']) if '_id' in doc1: docid = doc1['_id'] if six.PY3 else doc1['_id'].encode('utf-8') couch_doc = Document(self.cloudant_database, docid) couch_doc.update(doc1) try: # Copied from Document.save to ensure that a deleted doc cannot be saved. headers = {} headers.setdefault('Content-Type', 'application/json') put_resp = couch_doc.r_session.put( couch_doc.document_url, data=couch_doc.json(), headers=headers ) put_resp.raise_for_status() data = put_resp.json() super(Document, couch_doc).__setitem__('_rev', data['rev']) except HTTPError as e: if e.response.status_code != 409: raise if force_update: couch_doc['_rev'] = self.get_rev(docid) couch_doc.save() else: raise ResourceConflict res = couch_doc else: res = self.cloudant_database.create_document(doc1) if 'batch' in params and ('id' in res or '_id' in res): doc1.update({ '_id': res.get('_id')}) else: doc1.update({'_id': res.get('_id'), '_rev': res.get('_rev')}) if schema: for key, value in six.iteritems(doc.__class__.wrap(doc1)): doc[key] = value else: doc.update(doc1) return { 'id': res['_id'], 'rev': res['_rev'], 'ok': True, } def save_docs(self, docs, use_uuids=True, new_edits=None, **params): """ bulk save. Modify Multiple Documents With a Single Request @param docs: list of docs @param use_uuids: add _id in doc who don't have it already set. @param new_edits: When False, this saves existing revisions instead of creating new ones. Used in the replication Algorithm. Each document should have a _revisions property that lists its revision history. .. seealso:: `HTTP Bulk Document API <http://wiki.apache.org/couchdb/HTTP_Bulk_Document_API>` """ if not isinstance(docs, (list, tuple)): docs = tuple(docs) docs1 = [] docs_schema = [] for doc in docs: doc1, schema = _maybe_serialize(doc) docs1.append(doc1) docs_schema.append(schema) def is_id(doc): return '_id' in doc if use_uuids: noids = [] for k, g in groupby(docs1, is_id): if not k: noids = list(g) uuid_count = max(len(noids), self.server.uuid_batch_count) for doc in noids: nextid = self.server.next_uuid(count=uuid_count) if nextid: doc['_id'] = nextid payload = {"docs": docs1} if new_edits is not None: payload["new_edits"] = new_edits # update docs res = self._request_session.post( self._database_path('_bulk_docs'), data=json.dumps(payload), headers={"Content-Type": "application/json"}, **params) res.raise_for_status() results = res.json() errors = [] for i, res in enumerate(results): if 'error' in res: errors.append(res) logging_context = dict( method='save_docs', params=params, error=res['error'], ) error_logger.error("save_docs error", extra=logging_context) else: if docs_schema[i]: docs[i]._doc.update({ '_id': res['id'], '_rev': res['rev'] }) else: docs[i].update({ '_id': res['id'], '_rev': res['rev'] }) if errors: raise BulkSaveError(errors, results) return results bulk_save = save_docs def delete_docs(self, docs, empty_on_delete=False, **params): """ bulk delete. It adds '_deleted' member to doc then uses bulk_save to save them. @param empty_on_delete: default is False if you want to make sure the doc is emptied and will not be stored as is in Apache CouchDB. .. seealso:: `HTTP Bulk Document API <http://wiki.apache.org/couchdb/HTTP_Bulk_Document_API>` """ if empty_on_delete: for doc in docs: new_doc = {"_id": doc["_id"], "_rev": doc["_rev"], "_deleted": True} doc.clear() doc.update(new_doc) else: for doc in docs: doc['_deleted'] = True return self.bulk_save(docs, use_uuids=False, **params) bulk_delete = delete_docs def delete_doc(self, doc, **params): """ delete a document or a list of documents @param doc: str or dict, document id or full doc. @return: dict like: .. code-block:: python {"ok":true,"rev":"2839830636"} """ result = { 'ok': False } doc1, schema = _maybe_serialize(doc) if isinstance(doc1, dict): if not '_id' or not '_rev' in doc1: raise KeyError('_id and _rev are required to delete a doc') couch_doc = Document(self.cloudant_database, doc1['_id']) couch_doc['_rev'] = doc1['_rev'] elif isinstance(doc1, six.string_types): # we get a docid couch_doc = Document(self.cloudant_database, doc1) couch_doc['_rev'] = self.get_rev(doc1) # manual request because cloudant library doesn't return result res = self._request_session.delete( couch_doc.document_url, params={"rev": couch_doc["_rev"]}, ) try: res.raise_for_status() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound raise result = res.json() if schema: doc._doc.update({ "_rev": result['rev'], "_deleted": True }) elif isinstance(doc, dict): doc.update({ "_rev": result['rev'], "_deleted": True }) return result def copy_doc(self, doc, dest=None, headers=None): """ copy an existing document to a new id. If dest is None, a new uuid will be requested @param doc: dict or string, document or document id @param dest: basestring or dict. if _rev is specified in dict it will override the doc """ if not headers: headers = {} doc1, schema = _maybe_serialize(doc) if isinstance(doc1, six.string_types): docid = doc1 else: if '_id' not in doc1: raise KeyError('_id is required to copy a doc') docid = doc1['_id'] if dest is None: destination = self.server.next_uuid(count=1) elif isinstance(dest, six.string_types): if dest in self: dest = self.get(dest) destination = "%s?rev=%s" % (dest['_id'], dest['_rev']) else: destination = dest elif isinstance(dest, dict): if '_id' in dest and '_rev' in dest and dest['_id'] in self: destination = "%s?rev=%s" % (dest['_id'], dest['_rev']) else: raise KeyError("dest doesn't exist or this not a document ('_id' or '_rev' missig).") if destination: headers.update({"Destination": str(destination)}) resp = self._request_session.request('copy', self._database_path(docid), headers=headers) try: resp.raise_for_status() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound raise return resp.json() return {'ok': False} def raw_view(self, view_path, params): params = deepcopy(params) params.pop('dynamic_properties', None) if view_path == '_all_docs': return self.cloudant_database.all_docs(**params) else: view_path = view_path.split('/') assert len(view_path) == 4 ddoc = DesignDocument(self.cloudant_database, view_path[1]) ddoc.fetch() view = ddoc.get_view(view_path[3]) return view(**params) def view(self, view_name, schema=None, wrapper=None, **params): """ get view results from database. viewname is generally a string like `designname/viewname". It return an ViewResults object on which you could iterate, list, ... . You could wrap results in wrapper function, a wrapper function take a row as argument. Wrapping could be also done by passing an Object in obj arguments. This Object should have a `wrap` method that work like a simple wrapper function. @param view_name, string could be '_all_docs', '_all_docs_by_seq', 'designname/viewname' if view_name start with a "/" it won't be parsed and beginning slash will be removed. Usefull with c-l for example. @param schema, Object with a wrapper function @param wrapper: function used to wrap results @param params: params of the view """ if view_name.startswith('/'): view_name = view_name[1:] if view_name == '_all_docs': view_path = view_name elif view_name == '_all_docs_by_seq': view_path = view_name else: view_name = view_name.split('/') dname = view_name.pop(0) vname = '/'.join(view_name) view_path = '_design/%s/_view/%s' % (dname, vname) return ViewResults(self.raw_view, view_path, wrapper, schema, params) def search( self, view_name, handler='_fti/_design', wrapper=None, schema=None, **params): """ Search. Return results from search. Use couchdb-lucene with its default settings by default.""" return ViewResults(self.raw_view, "/%s/%s" % (handler, view_name), wrapper=wrapper, schema=schema, params=params) def documents(self, schema=None, wrapper=None, **params): """ return a ViewResults objects containing all documents. This is a shorthand to view function. """ return ViewResults(self.raw_view, '_all_docs', wrapper=wrapper, schema=schema, params=params) iterdocuments = documents def put_attachment(self, doc, content, name=None, content_type=None, content_length=None, headers=None): raise NoLongerSupportedException def delete_attachment(self, doc, name, headers=None): raise NoLongerSupportedException def fetch_attachment(self, id_or_doc, name, stream=False, headers=None): raise NoLongerSupportedException def ensure_full_commit(self): """ commit all docs in memory """ path = self._database_path('_ensure_full_commit') res = self._request_session.post(path, headers={"Content-Type": "application/json"}) res.raise_for_status() return res.json() def __len__(self): return self.info()['doc_count'] def __contains__(self, docid): return self.doc_exist(docid) def __getitem__(self, docid): return self.get(docid) def __setitem__(self, docid, doc): doc['_id'] = docid self.save_doc(doc) def __delitem__(self, docid): self.delete_doc(docid) def __iter__(self): return self.documents().iterator() def __nonzero__(self): return (len(self) > 0)
def create(self, db: CouchDatabase) -> None: db.create_document(data=vars(self))