def test_update_field_maxretries(self, m_save): """ Test that conflict retries work for updating a single field. """ # Create a doc doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.create() self.assertTrue(doc['_rev'].startswith('1-')) self.assertEqual(doc['age'], 6) # Mock conflicts when saving updates m_save.side_effect = requests.HTTPError( response=mock.Mock(status_code=409, reason='conflict')) # Tests that failing on retry eventually throws with self.assertRaises(requests.HTTPError) as cm: doc.update_field(doc.field_set, 'age', 7, max_tries=2) # There is an off-by-one error for "max_tries" # It really means max_retries i.e. 1 attempt # followed by a max of 2 retries self.assertEqual(m_save.call_count, 3) self.assertEqual(cm.exception.response.status_code, 409) self.assertEqual(cm.exception.response.reason, 'conflict') # Fetch again before asserting, otherwise we assert against # the locally updated age field doc.fetch() self.assertFalse(doc['_rev'].startswith('2-')) self.assertNotEqual(doc['age'], 7)
def test_timeout_in_create_replication(self): """ Test that a read timeout exception is thrown when creating a replicator with a timeout value of 500 ms. """ # Setup client with a timeout self.set_up_client(auto_connect=True, timeout=.5) self.db = self.client[self.test_target_dbname] self.target_db = self.client[self.test_dbname] # Construct a replicator with the updated client self.replicator = Replicator(self.client) repl_id = 'test-repl-{}'.format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication(self.db, self.target_db, repl_id) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list if self.client.admin_party: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication timed out. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') not in ('completed', 'error'): # assert that a connection error is thrown because the read timed out with self.assertRaises(ConnectionError) as cm: changes = self.replicator.database.changes(feed='continuous') for change in changes: continue self.assertTrue(str(cm.exception).endswith('Read timed out.'))
def test_fetch_non_existing_document(self): """ Test fetching document content from a non-existing document """ doc = Document(self.db, 'julia006') try: doc.fetch() self.fail('Above statement should raise an Exception') except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404)
def test_fetch_existing_document_with_docid(self): """ Test fetching document content from an existing document """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.create() new_doc = Document(self.db, 'julia006') new_doc.fetch() self.assertEqual(new_doc, doc)
def test_document_update_field(self): """ _test_document_update_field_ Tests for the field update functions. """ # Setup a routine for testing conflict handing. errors = {'conflicts': 0} def raise_conflict(conflicts=3): if errors['conflicts'] < conflicts: errors['conflicts'] += 1 err = requests.HTTPError() err.response = mock.Mock() err.response.status_code = 409 raise err # Mock our our doc doc = Document(self.database, "HOWARD") mock_put_resp = mock.Mock() mock_put_resp.side_effect = mock.Mock() mock_put_resp.status_code = 200 mock_put_resp.raise_for_status = raise_conflict mock_put_resp.json.side_effect = lambda: {'id': "ID", "rev": "updated"} self.mock_session.put.return_value = mock_put_resp mock_get_resp = mock.Mock() mock_get_resp.status_code = 200 mock_get_resp.json.side_effect = lambda: {"foo": "baz"} self.mock_session.get.return_value = mock_get_resp # Verify that our mock doc has the old value doc.fetch() self.assertEqual(doc["foo"], "baz") # And that we replace it with an updated value doc.update_field(doc.field_set, "foo", "bar") self.assertEqual(doc["foo"], "bar") # And verify that we called mock_session.put self.assertTrue(self.mock_session.put.called) # Try again, verifing that excessive conflicts get raised errors['conflicts'] = 0 mock_put_resp.raise_for_status = lambda: raise_conflict(conflicts=11) self.assertRaises( requests.HTTPError, doc.update_field, doc.field_set, "foo", "bar" )
def read(poll_id): if request.method == 'GET': doc = Document(couch, poll_id) doc.fetch() return doc.json() if request.method == 'POST': data = request.get_json() data['pollID'] = poll_id data['remote_addr'] = request.remote_addr data['user_agent'] = request.headers.get('User-Agent') doc = couch.create_document(data) return doc['_id'], 201
def test_fetch_existing_document_with_docid_encoded_url(self): """ Test fetching document content from an existing document where the document id requires an encoded url """ doc = Document(self.db, 'http://example.com') doc['name'] = 'julia' doc['age'] = 6 doc.create() new_doc = Document(self.db, 'http://example.com') new_doc.fetch() self.assertEqual(new_doc, doc)
def open_doc(self, docid, **params): """Get document from database Args: @param docid: str, document id to retrieve @param wrapper: callable. function that takes dict as a param. Used to wrap an object. @param **params: See doc api for parameters to use: http://wiki.apache.org/couchdb/HTTP_Document_API @return: dict, representation of CouchDB document as a dict. """ wrapper = None if "wrapper" in params: wrapper = params.pop("wrapper") elif "schema" in params: schema = params.pop("schema") if not hasattr(schema, "wrap"): raise TypeError("invalid schema") wrapper = schema.wrap attachments = params.get('attachments', False) if six.PY2 and isinstance(docid, six.text_type): docid = docid.encode('utf-8') if six.PY3 and isinstance(docid, bytes): docid = docid.decode('utf-8') doc = Document(self.cloudant_database, docid) try: doc.fetch() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound( json.loads(e.response.content.decode('utf-8'))['reason']) raise doc_dict = dict(doc) if attachments and '_attachments' in doc_dict: for attachment_name in doc_dict['_attachments']: attachment_data = doc.get_attachment(attachment_name, attachment_type='binary') doc_dict['_attachments'][attachment_name][ 'data'] = base64.b64encode(attachment_data) del doc_dict['_attachments'][attachment_name]['stub'] del doc_dict['_attachments'][attachment_name]['length'] if wrapper is not None: if not callable(wrapper): raise TypeError("wrapper isn't a callable") return wrapper(doc_dict) return doc_dict
def test_create_replication(self): """ Test that the replication document gets created and that the replication is successful. """ self.populate_db_with_documents(3) repl_id = 'test-repl-{}'.format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list if self.client.admin_party: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication occurred. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') not in ('completed', 'error'): changes = self.replicator.database.changes( feed='continuous', heartbeat=1000) beats = 0 for change in changes: if beats == 300: changes.stop() if not change: beats += 1 continue elif change.get('id') == repl_id: beats = 0 repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') in ('completed', 'error'): changes.stop() self.assertEqual(repl_doc.get('_replication_state'), 'completed') self.assertEqual(self.db.all_docs(), self.target_db.all_docs()) self.assertTrue( all(x in self.target_db.keys(True) for x in [ 'julia000', 'julia001', 'julia002' ]) )
def test_create_replication(self): """ Test that the replication document gets created and that the replication is successful. """ self.populate_db_with_documents(3) repl_id = 'test-repl-{}'.format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list if self.client.admin_party or self.client.is_iam_authenticated: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication occurred. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') not in ('completed', 'error'): changes = self.replicator.database.changes( feed='continuous', heartbeat=1000) beats = 0 for change in changes: if beats == 300: changes.stop() if not change: beats += 1 continue elif change.get('id') == repl_id: beats = 0 repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') in ('completed', 'error'): changes.stop() self.assertEqual(repl_doc.get('_replication_state'), 'completed') self.assertEqual(self.db.all_docs(), self.target_db.all_docs()) self.assertTrue( all(x in self.target_db.keys(True) for x in [ 'julia000', 'julia001', 'julia002' ]) )
def test_fetch_document_without_docid(self): """ Test fetching document content with no id provided """ doc = Document(self.db) try: doc.fetch() self.fail('Above statement should raise an Exception') except CloudantDocumentException as err: self.assertEqual( str(err), 'A document id is required to fetch document contents. ' 'Add an _id key and value to the document and re-try.')
def open_doc(self, docid, **params): """Get document from database Args: @param docid: str, document id to retrieve @param wrapper: callable. function that takes dict as a param. Used to wrap an object. @param **params: See doc api for parameters to use: http://wiki.apache.org/couchdb/HTTP_Document_API @return: dict, representation of CouchDB document as a dict. """ wrapper = None if "wrapper" in params: wrapper = params.pop("wrapper") elif "schema" in params: schema = params.pop("schema") if not hasattr(schema, "wrap"): raise TypeError("invalid schema") wrapper = schema.wrap attachments = params.get('attachments', False) if six.PY2 and isinstance(docid, six.text_type): docid = docid.encode('utf-8') if six.PY3 and isinstance(docid, bytes): docid = docid.decode('utf-8') doc = Document(self.cloudant_database, docid) try: doc.fetch() except HTTPError as e: if e.response.status_code == 404: raise ResourceNotFound(json.loads(e.response.content.decode('utf-8'))['reason']) raise doc_dict = dict(doc) if attachments and '_attachments' in doc_dict: for attachment_name in doc_dict['_attachments']: attachment_data = doc.get_attachment(attachment_name, attachment_type='binary') doc_dict['_attachments'][attachment_name]['data'] = base64.b64encode(attachment_data) del doc_dict['_attachments'][attachment_name]['stub'] del doc_dict['_attachments'][attachment_name]['length'] if wrapper is not None: if not callable(wrapper): raise TypeError("wrapper isn't a callable") return wrapper(doc_dict) return doc_dict
def test_fetch_document_without_docid(self): """ Test fetching document content with no id provided """ doc = Document(self.db) try: doc.fetch() self.fail('Above statement should raise an Exception') except CloudantException as err: self.assertEqual( str(err), 'A document id is required to fetch document contents. ' 'Add an _id key and value to the document and re-try.' )
def test_fetch_doc_by_id(db, _id): """Test document fetch by Id.""" doc = Document(db, _id) # doc = Document(db, _id, decoder=json_decoder) if not doc.exists(): raise ValueError(f'Document with id {_id} not found') else: try: doc.fetch() print(f'SUCCESS FETCH: Document with id {_id}: {doc}') except Exception as e: print(f'FAILED FETCH: {e}') raise return doc
def test_create_document_using_save(self): """ Test that save functionality works. If a document does not exist remotely then create it. """ doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 self.assertIsNone(doc.get('_rev')) doc.save() self.assertTrue(doc.exists()) self.assertTrue(doc['_rev'].startswith('1-')) remote_doc = Document(self.db, 'julia006') remote_doc.fetch() self.assertEqual(remote_doc, doc)
def test_document_request_fails_after_client_disconnects(self): """ Test that after disconnecting from a client any objects created based on that client are not able to make requests. """ self.client.connect() doc = Document(self.db, 'julia001') doc.save() self.client.disconnect() try: with self.assertRaises(AttributeError): doc.fetch() self.assertIsNone(doc.r_session) finally: self.client.connect()
def test_update_document_with_encoded_url(self): """ Test that updating a document where the document id requires that the document url be encoded is successful. """ # First create the document doc = Document(self.db, 'http://example.com') doc['name'] = 'julia' doc['age'] = 6 doc.save() # Now test that the document gets updated doc['name'] = 'jules' doc.save() self.assertTrue(doc['_rev'].startswith('2-')) remote_doc = Document(self.db, 'http://example.com') remote_doc.fetch() self.assertEqual(remote_doc, doc) self.assertEqual(remote_doc['name'], 'jules')
def test_document_context_manager_creation_failure_on_error(self): """ Test that the document context manager skips document creation if there is an error. """ with self.assertRaises(ZeroDivisionError), Document( self.db, 'julia006') as doc: doc['name'] = 'julia' doc['age'] = 6 raise ZeroDivisionError() doc = Document(self.db, 'julia006') try: doc.fetch() except requests.HTTPError as err: self.assertEqual(err.response.status_code, 404) else: self.fail('Above statement should raise a HTTPError.')
def test_update_document_using_save(self): """ Test that save functionality works. If a document exists remotely then update it. """ # First create the document doc = Document(self.db, 'julia006') doc['name'] = 'julia' doc['age'] = 6 doc.save() # Now test that the document gets updated doc['name'] = 'jules' doc.save() self.assertTrue(doc['_rev'].startswith('2-')) remote_doc = Document(self.db, 'julia006') remote_doc.fetch() self.assertEqual(remote_doc, doc) self.assertEqual(remote_doc['name'], 'jules')
def test_create_replication(self): """ Test that the replication document gets created and that the replication is successful. """ self.populate_db_with_documents(3) repl_id = 'test-repl-{}'.format(unicode(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] self.assertTrue(all(x in repl_doc.keys() for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication occurred. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if not (repl_doc.get('_replication_state') in ('completed', 'error')): for change in self.replicator.database.changes(): if change.get('id') == repl_id: repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if (repl_doc.get('_replication_state') in ('completed', 'error')): break self.assertEqual(repl_doc['_replication_state'], 'completed') self.assertEqual(self.db.all_docs(), self.target_db.all_docs()) self.assertTrue( all(x in self.target_db.keys(True) for x in [ 'julia000', 'julia001', 'julia002' ]) )
def test_document_custom_json_encoder_and_decoder(self): dt_format = '%Y-%m-%dT%H:%M:%S' class DTEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return { '_type': 'datetime', 'value': obj.strftime(dt_format) } return super(DTEncoder, self).default(obj) class DTDecoder(json.JSONDecoder): def __init__(self, *args, **kwargs): json.JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs) def object_hook(self, obj): if '_type' not in obj: return obj if obj['_type'] == 'datetime': return datetime.strptime(obj['value'], dt_format) return obj doc = Document(self.db, encoder=DTEncoder) doc['name'] = 'julia' doc['dt'] = datetime(2018, 7, 9, 15, 11, 10, 0) doc.save() raw_doc = self.db.all_docs(include_docs=True)['rows'][0]['doc'] self.assertEqual(raw_doc['name'], 'julia') self.assertEqual(raw_doc['dt']['_type'], 'datetime') self.assertEqual(raw_doc['dt']['value'], '2018-07-09T15:11:10') doc2 = Document(self.db, doc['_id'], decoder=DTDecoder) doc2.fetch() self.assertEqual(doc2['dt'], doc['dt'])
def test_timeout_in_create_replication(self): """ Test that a read timeout exception is thrown when creating a replicator with a timeout value of 500 ms. """ # Setup client with a timeout self.set_up_client(auto_connect=True, timeout=.5) self.db = self.client[self.test_target_dbname] self.target_db = self.client[self.test_dbname] # Construct a replicator with the updated client self.replicator = Replicator(self.client) repl_id = 'test-repl-{}'.format(unicode_(uuid.uuid4())) repl_doc = self.replicator.create_replication( self.db, self.target_db, repl_id ) self.replication_ids.append(repl_id) # Test that the replication document was created expected_keys = ['_id', '_rev', 'source', 'target', 'user_ctx'] # If Admin Party mode then user_ctx will not be in the key list if self.client.admin_party: expected_keys.pop() self.assertTrue(all(x in list(repl_doc.keys()) for x in expected_keys)) self.assertEqual(repl_doc['_id'], repl_id) self.assertTrue(repl_doc['_rev'].startswith('1-')) # Now that we know that the replication document was created, # check that the replication timed out. repl_doc = Document(self.replicator.database, repl_id) repl_doc.fetch() if repl_doc.get('_replication_state') not in ('completed', 'error'): # assert that a connection error is thrown because the read timed out with self.assertRaises(ConnectionError) as cm: changes = self.replicator.database.changes( feed='continuous') for change in changes: continue self.assertTrue(str(cm.exception).endswith('Read timed out.'))
class PortfolioUser: def __init__(self, client): self.client = client self.username = None self.db_name = None self.portfolios = [] self.email = None self.admin = False self.auth_db = self.client['investauth'] self.logged_in = False self.view_definitions = [ dict( ddoc="stocks", views=[ dict( view="allowned", map_func= "function (doc) {var amount; if (doc.type === 'transaction') {if (doc.action === 'buy' || doc.action === 'deposit') {amount = doc.quantity;} else if (doc.action === 'sell' || doc.action === 'withdrawl') {amount = doc.quantity * -1;}emit(doc.symbol, amount);}}", reduce_func="_sum"), dict( view="bycategory", map_func= "function (doc) {if (doc.type === 'stock' && doc.active === true) {emit([doc.category,doc.symbol,doc.name,doc.buybelow,doc.comments], 1);}}", reduce_func="_count"), dict( view="manage", map_func= "function (doc) {if (doc.type === 'stock') {emit(doc.symbol, null);}}", reduce_func="_count"), dict( view="owned", map_func= "function (doc) {var amount;if (doc.type === 'transaction') {if (doc.action === 'buy') {amount = doc.quantity;} else if (doc.action === 'sell') {amount = doc.quantity * -1;}emit([doc.symbol,doc.date,doc.action,doc.price], amount);}}", reduce_func="_sum"), dict( view="manualquotes", map_func= "function (doc) {if (doc.type === 'manualquote') {emit([doc.symbol,doc.date], doc.price);}}", reduce_func="_stats") ]), dict( ddoc="activefolios", views=[ dict( name="currentfolio", map_func= "function (doc) {if (doc.type === 'foliocache') {emit(doc._id, null);}}", reduce_func="_count") ]), dict( ddoc="transactions", views=[ dict( name="history", map_func= "function (doc) {if (doc.type === 'transaction') {var total = (doc.price * doc.quantity) + doc.fee;emit([doc.date,doc.symbol,doc.action,doc.quantity,doc.price,doc.fee], total);}}", reduce_func="_stats") ]) ] def login(self, username, password): try: self.userdoc = Document(self.auth_db, username) self.userdoc.fetch() if self.check_credentials(password): self.username = username self.db_name = "investfolio-{0}".format(self.username) self.email = self.userdoc['email'] self.admin = self.userdoc['admin'] self.portfolios = self.userdoc['portfolios'] # one portfolio is hard-coded right now self.selected_portfolio = self.portfolios[0] self.load_db() self.logged_in = True return True else: return False except Exception as e: print(e) return False def logout(self): self.db_name = None self.username = None self.email = None self.admin = False self.portfolios = None self.logged_in = False def check_credentials(self, password): # password created with pbkdf2_sha256.hash("<password>") if pbkdf2_sha256.verify(password, self.userdoc['password']): # print_local("Good password") return True else: # print_local("Bad password") return False # Data for user & db management page def load_admin_console(self): if self.admin == True: # get all docs from authdb for user list pass # get list of databases pass # assemble into a combined list of users and their associated metadata and the databases pass admin_page = [] # return the data for a user list table that can be rendered by the jinja template for username in userlist: admin_page.append([username, databases[username]]) return admin_page # Create new user def create_user(self, username, password, admin, portfolios): if self.admin == True: try: # Create user document in auth_db with Document(self.auth_db, username) as doc: doc['password'] = pbkdf2_sha256.hash(password) doc['admin'] = admin doc['portfolios'] = portfolios # Create user's custom database and populate views newdb_name = "{0}".format(username) newdb = self.client.create_database(newdb_name) self.initialize_views(newdb) return "User created" except Exception as e: return "Cannot create user: {0}".format(e) # Delete user def delete_user(self, confirmation): if self.admin == True: try: # Match confirmation phrase and delete user ID document, and user's associated database pass except Exception as e: return "Unable to delete user: {0}".format(e) def load_db(self): # check to see if user's database exists self.db = self.client[self.db_name] # If it does, we're good If it doesn't, create it and initialize indexes if not self.db.exists(): self.db = self.client.create_database(self.db_name) self.initialize_views(self.db) def initialize_views(self, database): for ddoc_definition in self.view_definitions: this_ddoc = DesignDocument(database, ddoc_definition['ddoc']) for view_definition in ddoc_definition['views']: this_ddoc.add_view(view_definition['name'], view_definition['map_func'], reduce_func=view_definition['reduce_func']) this_ddoc.save()
class Portfolio: def __init__(self, stockAPIkey): print("Portfolio.init({0})".format(stockAPIkey)) self.stocks = dict( ) # Contains all stocks, both owned and tracked, with all their metadata # { symbol: {metadata} } self.categories = dict( ) # Contains major categories with arrays of (sub)categories and their metadata # { category: { target: %, actual: %, type: 'stock/fixed income' }} self.prices_last_updated = None # Might use this to update prices on a schedule self.foliodoc = None # placeholder for Cloudant document that stores portfolio metadata self.stockAPIkey = stockAPIkey self.barchartAPIkey = getenv('BARCHART') self.total_portfolio_value = 0 self.status = "" def load(self, db, portfolioname): print("Portfolio.load()") # Initialize database and variables self.name = portfolioname self.db = db # Initialize Cloudant database views self.stockddoc = DesignDocument(self.db, 'stocks') self.stockddoc.fetch() self.bycategory_view = self.stockddoc.get_view('bycategory') self.owned_view = self.stockddoc.get_view('owned') self.allowned_view = self.stockddoc.get_view('allowned') self.manualquote_view = self.stockddoc.get_view('manualquotes') self.folio_ddoc = DesignDocument(self.db, 'activefolios') self.folio_ddoc.fetch() self.active_folios_view = self.folio_ddoc.get_view('currentfolio') self.populate_categories() self.populate_stocks() self.refresh_total_value() # Load available category metadata into memory from DB def populate_categories(self): pass print("Portfolio.populate_categories()") # Load portfolio specification document from DB self.foliodoc = Document(self.db, self.name) self.foliodoc.fetch() for category in self.foliodoc['categories']: # print category for subcategory in self.foliodoc['categories'][category].keys(): # print "Subcategory name: {0} Target: {1}".format(subcategory, self.foliodoc['categories'][category][subcategory]) self.categories[subcategory] = dict( target=self.foliodoc['categories'][category][subcategory], actual=0, type=category) # Populate stock metadata in memory from DB. (tracked and owned) def populate_stocks(self): print("Portfolio.populate_stocks()") # get metadata on stocks we're tracking in the portfolio with self.bycategory_view.custom_result(include_docs=True, reduce=False) as rslt: for line in rslt: doc = line['doc'] if doc['symbol'] == 'Cash': temp_price = 1 else: temp_price = -1 self.stocks[doc['symbol']] = dict(symbol=doc['symbol'], name=doc['name'], comments=doc['comments'], active=doc['active'], buybelow=doc['buybelow'], lastprice=temp_price, category=doc['category'], qty=0) # Refresh current total value of portfolio for percentage calcuations and update subcategory totals def refresh_total_value(self): print("Portfolio.refresh_total_value()") self.total_portfolio_value = 0 # Make sure prices are current quote_success = self.refresh_all_stock_prices() if not quote_success: # Alert user that stock prices are potentially out of date self.status = 'WARNING: Stock price quote error. Data may be out of date.' # Update quantities of stocks we own with self.allowned_view.custom_result( reduce=True, include_docs=False, group_level=1) as resultcollection: for stock in resultcollection: self.stocks[stock['key']]['qty'] = stock['value'] # total up the account's value for stock in self.stocks.values(): self.total_portfolio_value = self.total_portfolio_value + ( stock['qty'] * stock['lastprice']) # Update each subcategory's percentage by summing the stocks within it # Right now this is a nested for loop, but through data in memory. category_value = 0 for category_name in self.categories.keys(): for stock in self.stocks.values(): if stock['category'] == category_name: category_value = category_value + stock[ 'lastprice'] * stock['qty'] self.categories[category_name]['actual'] = ( category_value / self.total_portfolio_value) * 100 category_value = 0 def get_subcategory_list(self): print("Portfolio.get_subcategory_list()") return self.categories.keys() def barchart_quote(self, symbols_string): # execute stock API call (BarChart) start_time = time() myurl = "https://marketdata.websol.barchart.com/getQuote.json" payload = { 'apikey': self.barchartAPIkey, 'only': 'symbol,name,lastPrice', 'symbols': symbols_string, 'mode': 'R' } try: r = requests.get(url=myurl, params=payload) print r.text data = r.json() end_time = time() print("Barchart API query time: {0} seconds".format( float(end_time - start_time))) return data except Exception as e: print "Cannot get quotes from BarChart: {0}".format(e) return None def alphavantage_quote(self, symbols_string): start_time = time() myurl = "https://www.alphavantage.co/query" payload = { 'function': 'BATCH_STOCK_QUOTES', 'symbols': symbols_string, 'apikey': self.stockAPIkey } try: r = requests.get(url=myurl, params=payload) data = r.json() end_time = time() print("AlphaVantage API query time: {0} seconds".format( float(end_time - start_time))) return data except Exception as e: print "Cannot get quotes from AlphaVantage: {0}".format(e) return None def refresh_all_stock_prices(self): print("Portfolio.refresh_all_stock_prices()") # Iterate through manual stock quotes in DB first (to cover missing symbols in stock APIs) with self.manualquote_view.custom_result(reduce=False) as rslt: manual_quotes = rslt[:] for row in manual_quotes: symbol = row['key'][0] date = row['key'][1] self.stocks[symbol]['lastprice'] = row['value'] # construct string for API call symbols_string = '' for symbol in self.stocks.keys(): if symbol <> "Cash": symbols_string = symbols_string + "{0},".format(symbol) # trim last comma symbols_string = symbols_string[:-1] ## execute stock API call (BarChart) barchartdata = self.barchart_quote(symbols_string) if barchartdata <> None: pass # Execute stock API call (AlphaVantage) alphavantagedata = self.alphavantage_quote(symbols_string) if alphavantagedata <> None: for stock_data in alphavantagedata['Stock Quotes']: # set the price of the holding in question if float(stock_data['2. price']) <> 0.0: self.stocks[stock_data['1. symbol']]['lastprice'] = float( stock_data['2. price']) # Update last quoted time self.prices_last_updated = int(time()) # Check for any missing stock prices (any that are not set will be -1) for stock in self.stocks.values(): if stock['lastprice'] == -1.0: return False return True # Create a new doc to track this stock and add it to the dictionary of stock data # custom ID is OK, since it prevents duplicate stock trackers # We should keep all prices in memory exclusively, or create "quote" docs # This DOES NOT store information about how much we own, because that's event sourced by transactions def new_stock(self, category, symbol, name, buybelow, comments, active): print("Portfolio.new_stock()") with Document(self.db, symbol) as stockdoc: stockdoc['type'] = 'stock' stockdoc['updated'] = strftime("%Y-%m-%d %H:%M:%S") stockdoc['category'] = category stockdoc['symbol'] = symbol stockdoc['active'] = active stockdoc['name'] = name stockdoc['comments'] = comments stockdoc['buybelow'] = buybelow self.stocks[symbol] = json.loads(stockdoc.json()) # Get a quote for the new stock self.stocks[symbol]['lastprice'] = -1 self.stocks[symbol]['qty'] = 0 # update all holdings and totals self.refresh_total_value() def new_transaction_doc(self, symbol, quantity, price, fee, action): xactiondoc = Document(self.db) xactiondoc['type'] = 'transaction' xactiondoc['action'] = action xactiondoc['quantity'] = quantity xactiondoc['date'] = strftime("%Y-%m-%d %H:%M:%S") xactiondoc['fee'] = fee xactiondoc['price'] = price if action == 'deposit' or action == 'withdrawl': xactiondoc['symbol'] = 'Cash' xactiondoc['price'] = 1 else: #otherwise use symbol passed and check to see if updating cash is needed xactiondoc['symbol'] = symbol xactiondoc.save() # Execute a transaction document and update cash balance (if appropriate) def trade(self, symbol, quantity, price, fee, action, usebalance): print("Portfolio.trade()") self.new_transaction_doc(symbol, quantity, price, fee, action) if (usebalance == True and symbol <> "Cash"): cashqty = (quantity * price) + fee if action == 'buy': cashaction = 'withdrawl' else: cashaction = 'deposit' self.new_transaction_doc('Cash', cashqty, 1, 0, cashaction) self.refresh_total_value() # Return currently cached metadata for this stock def get_stock(self, symbol): return self.stocks[symbol] # Get an individual stock symbol's quote via the API def get_quote(self, symbol): print("Portflio.get_quote({0})".format(symbol)) myurl = "https://www.alphavantage.co/query?function=BATCH_STOCK_QUOTES&symbols={0}&apikey={1}".format( symbol, self.stockAPIkey) try: r = requests.get(url=myurl) data = r.json() return float(data['Stock Quotes'][0]['2. price']) except Exception as e: print_local("Unable to get stock price: {0}".format(e)) return -1 # Update a tracked stock's metadata def update_stock(self, symbol, name, buybelow, comments, active): with Document(self.db, symbol) as doc: doc['updated'] = strftime("%Y-%m-%d %H:%M:%S") doc['name'] = str(name) if active == 'true': doc['active'] = True else: doc['active'] = False doc['buybelow'] = float(buybelow) doc['comments'] = str(comments) for x in ('updated', 'name', 'active', 'buybelow', 'comments'): self.stocks[symbol][x] = doc[x] # Neutralize content upon logout def clear(self): print("Portfolio.clear()") self.name = None self.db = None self.categories = dict() self.stocks = dict() self.prices_last_updated = None self.total_portfolio_value = 0 self.foliodoc = None # Return list of historical trasactions from DB def get_transactions(self, page, pagesize): print("Portfolio.get_transactions()") skip = page * pagesize ddoc = DesignDocument(self.db, 'transactions') ddoc.fetch() view = View(ddoc, 'history') return view(include_docs=False, limit=pagesize, skip=skip, reduce=False)['rows'] # Return a full state of the portfolio with metadata formatted for the Jinja template's rendering def get_template_data(self): print "Portfolio.get_template_data()" template_data = dict() # Iterate through the sub-categories for subcategory in self.categories.keys(): # print "Processing {0}:\nData: {1}".format(subcategory,self.categories[subcategory]) # local dictionary for this subcategory's data to go into the array above. Insert what we have so far subcategory_data = dict( type=subcategory, target_percentage=self.categories[subcategory]['target'], value= 0, # Tracks total value of all invested holdings in this particular subcategory (not used right now) actual_percentage="{0:,.1f}".format( self.categories[subcategory]['actual']), holdings=[] # array for all stocks in this subcat ) template_data[subcategory] = subcategory_data # print template_data # Iterate through all tracked stocks in this subcategory for stock in self.stocks.keys(): # local dictionary for this stock's data stock_data = dict( symbol=self.stocks[stock]['symbol'], name=self.stocks[stock]['name'], qty=self.stocks[stock]['qty'], price="$ {0:,.2f}".format(self.stocks[stock]['lastprice']), buy_below="$ {0:,.2f}".format(self.stocks[stock]['buybelow']), comments=self.stocks[stock]['comments'], value="$ {0:,.2f}".format( float(self.stocks[stock]['qty'] * self.stocks[stock]['lastprice']) ) # value of this security owned ) template_data[self.stocks[stock]['category']]['holdings'].append( stock_data) return template_data
def test_attachment_management(self): """ Test the adding, retrieving, updating, and deleting of attachments """ doc = self.db.create_document( {'_id': 'julia006', 'name': 'julia', 'age': 6} ) attachment = StringIO() try: attachment.write('This is line one of the attachment.\n') attachment.write('This is line two of the attachment.\n') self.assertTrue(doc['_rev'].startswith('1-')) # Test adding an attachment resp = doc.put_attachment( 'attachment.txt', 'text/plain', attachment.getvalue() ) self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('2-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age', '_attachments' ]) ) self.assertTrue( all(x in list(doc['_attachments'].keys()) for x in [ 'attachment.txt' ]) ) orig_size = doc['_attachments']['attachment.txt']['length'] self.assertEqual(orig_size, len(attachment.getvalue())) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() # Test retrieving an attachment self.assertEqual( doc.get_attachment('attachment.txt', attachment_type='text'), attachment.getvalue() ) # Test update an attachment attachment.write('This is line three of the attachment.\n') resp = doc.put_attachment( 'attachment.txt', 'text/plain', attachment.getvalue() ) self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('3-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age', '_attachments' ]) ) self.assertTrue( all(x in list(doc['_attachments'].keys()) for x in [ 'attachment.txt' ]) ) updated_size = doc['_attachments']['attachment.txt']['length'] self.assertTrue(updated_size > orig_size) self.assertEqual(updated_size, len(attachment.getvalue())) self.assertEqual( doc.get_attachment('attachment.txt', attachment_type='text'), attachment.getvalue() ) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() # Test delete attachments # Add a second attachment so we can fully test # delete functionality. resp = doc.put_attachment( 'attachment2.txt', 'text/plain', attachment.getvalue() ) # Test deleting an attachment from a document # with multiple atatchments. resp = doc.delete_attachment('attachment.txt') self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('5-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age', '_attachments' ]) ) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() self.assertEqual(doc, expected) # Test deleting an attachment from a document # with a single attachment. resp = doc.delete_attachment('attachment2.txt') self.assertTrue(resp['ok']) self.assertTrue(resp['rev'].startswith('6-')) self.assertEqual(doc['_rev'], resp['rev']) self.assertTrue( all(x in list(doc.keys()) for x in [ '_id', '_rev', 'name', 'age' ]) ) # Confirm that the local document dictionary matches # the document on the database. expected = Document(self.db, 'julia006') expected.fetch() self.assertEqual(doc, expected) finally: attachment.close()
def test_document_crud(self): """test basic crud operations with mocked backend""" doc = Document(self.database, "DUCKUMENT") # exists mock_resp = mock.Mock() mock_resp.status_code = 200 self.mock_session.get.return_value = mock_resp self.assertTrue(doc.exists()) self.assertTrue(self.mock_session.get.called) self.mock_session.get.assert_has_calls( [ mock.call('https://bob.cloudant.com/unittest/DUCKUMENT') ] ) self.mock_session.get.reset_mock() # create mock_resp = mock.Mock() mock_resp.raise_for_status = mock.Mock() mock_resp.status_code = 200 mock_resp.json = mock.Mock() mock_resp.json.return_value = {'id': 'DUCKUMENT', 'rev': 'DUCK2'} self.mock_session.post.return_value = mock_resp doc.create() self.assertEqual(doc['_rev'], 'DUCK2') self.assertEqual(doc['_id'], 'DUCKUMENT') self.assertTrue(self.mock_session.post.called) self.mock_session.post.reset_mock() # fetch mock_resp = mock.Mock() mock_resp.status_code = 200 mock_resp.raise_for_status = mock.Mock() mock_resp.json = mock.Mock() mock_resp.json.return_value = { '_id': 'DUCKUMENT', '_rev': 'DUCK2', 'herp': 'HERP', 'derp': 'DERP' } self.mock_session.get.return_value = mock_resp doc.fetch() self.assertTrue('herp' in doc) self.assertTrue('derp' in doc) self.assertEqual(doc['herp'], 'HERP') self.assertEqual(doc['derp'], 'DERP') self.assertTrue(self.mock_session.get.called) self.mock_session.get.assert_has_calls( [ mock.call('https://bob.cloudant.com/unittest/DUCKUMENT') ] ) self.mock_session.get.reset_mock() # save mock_put_resp = mock.Mock() mock_put_resp.status_code = 200 mock_put_resp.raise_for_status = mock.Mock() mock_put_resp.json = mock.Mock() mock_put_resp.json.return_value = {'id': 'DUCKUMENT', 'rev': 'DUCK3'} self.mock_session.put.return_value = mock_put_resp mock_get_resp = mock.Mock() mock_get_resp.status_code = 200 self.mock_session.get.return_value = mock_get_resp doc.save() self.assertEqual(doc['_rev'], 'DUCK3') self.assertEqual(doc['_id'], 'DUCKUMENT') self.assertTrue(self.mock_session.get.called) self.assertTrue(self.mock_session.put.called) self.mock_session.get.assert_has_calls( [ mock.call('https://bob.cloudant.com/unittest/DUCKUMENT') ] ) self.mock_session.put.assert_has_calls( [ mock.call( 'https://bob.cloudant.com/unittest/DUCKUMENT', headers={'Content-Type': 'application/json'}, data=mock.ANY ) ] ) self.mock_session.get.reset_mock() self.mock_session.put.reset_mock() # delete mock_resp = mock.Mock() mock_resp.status_code = 200 mock_resp.raise_for_status = mock.Mock() self.mock_session.delete.return_value = mock_resp doc.delete() self.assertTrue(self.mock_session.delete.called) self.mock_session.delete.assert_has_calls( [ mock.call( 'https://bob.cloudant.com/unittest/DUCKUMENT', params={'rev': 'DUCK3'} ) ] ) self.mock_session.delete.reset_mock() # test delete with no rev explodes as expected self.assertRaises(CloudantException, doc.delete)
def save_doc(self, doc: Document, key, value): doc.fetch() doc['key'] = key doc['value'] = value doc.save()