def testFunctions(self): datastore.set_options(dataset='foo') def caml(s): return ''.join(p[0].upper() + p[1:] for p in s.split('_')) rpcs = [ 'lookup', 'run_query', 'begin_transaction', 'commit', 'rollback', 'allocate_ids' ] methods = [(r, getattr(datastore, caml(r) + 'Request'), getattr(datastore, caml(r) + 'Response')) for r in rpcs] conn = datastore.get_default_connection() for m, req_class, resp_class in methods: self.mox.StubOutWithMock(conn, m) method = getattr(conn, m) method(mox.IsA(req_class)).AndReturn(resp_class()) self.mox.ReplayAll() for m, req_class, resp_class in methods: method = getattr(datastore, m) result = method(req_class()) self.assertEqual(resp_class, type(result)) self.mox.VerifyAll()
def get_data(dataset_id): # Set the dataset from the command line parameters. datastore.set_options(dataset=dataset_id) try: req = datastore.RunQueryRequest() query = req.query query.kind.add().name = 'Hackathlon' results = datastore.run_query(req).batch.entity_result # results[0].entity.property[0].value.string_value useful_res = [] for res in results: ent = res.entity dict_res = {} for prop in ent.property: dict_res[prop.name] = prop.value.string_value useful_res.append(dict_res) return json.dumps(useful_res) except datastore.RPCError as e: logging.error('Error while doing datastore operation') logging.error('RPCError: %(method)s %(reason)s', {'method': e.method, 'reason': e.reason}) logging.error('HTTPError: %(status)s %(reason)s', {'status': e.response.status, 'reason': e.response.reason}) return
def clouddatastore_client(): try: logFormatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') root = logging.getLogger() root.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) ch.setFormatter(logFormatter) root.addHandler(ch) logging.getLogger('oauth2service.client').setLevel(logging.INFO) logging.getLogger('apiclient.discovery').setLevel(logging.INFO) credentials = GoogleCredentials.get_application_default() http = httplib2.Http() http = credentials.authorize(http) credentials.refresh(http) credentials.access_token = 'foo' print credentials.access_token datastore.set_options(project_id='p0', credentials=credentials) req = datastore.LookupRequest() key = datastore.Key() path = key.path.add() path.kind = 'Employee' path.name = 'aguadypoogznoqofmgmy' req.keys.extend([key]) resp = datastore.lookup(req) if resp.found: entity = resp.found[0].entity print (str(entity)) for prop in entity.properties: print 'Lookup: ' + str(prop) else: print 'entity not found; initialize entity and insert..' req = datastore.CommitRequest() req.mode = datastore.CommitRequest.NON_TRANSACTIONAL employee = req.mutations.add().insert path = employee.key.path.add() path.kind = 'Employee' path.name = 'aguadypoogznoqofmgmy' res = datastore.commit(req) print res except HttpError as err: print 'Error:', pprint.pprint(err.content) except AccessTokenRefreshError: print ("Credentials have been revoked or expired, please re-run" "the application to re-authorize")
def testSetUrlOverride(self): self.mox.StubOutWithMock(os, 'getenv') os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn( 'http://prom-qa/datastore/v1beta42') self.mox.ReplayAll() datastore.set_options(host='http://example.com', dataset='bar') conn = datastore.get_default_connection() self.assertEqual('http://prom-qa/datastore/v1beta42/datasets/bar/', conn._url) self.mox.VerifyAll()
def testSetOptions(self): datastore._conn = None self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(helper, 'get_credentials_from_env') os.getenv('DATASTORE_HOST').AndReturn('http://localhost:8080') helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() datastore.set_options(dataset='bar') conn = datastore.get_default_connection() self.assertEqual('http://localhost:8080/datastore/v1beta1/datasets/bar/', conn._url) self.assertEqual(FakeCredentialsFromEnv, type(conn._credentials)) self.mox.VerifyAll()
def testFunctions(self): datastore.set_options(dataset='foo') def caml(s): return ''.join(p[0].upper()+p[1:] for p in s.split('_')) rpcs = ['lookup', 'run_query', 'begin_transaction', 'commit', 'rollback', 'allocate_ids'] methods = [(r, getattr(datastore, caml(r)+'Request'), getattr(datastore, caml(r)+'Response')) for r in rpcs] conn = datastore.get_default_connection() for m, req_class, resp_class in methods: self.mox.StubOutWithMock(conn, m) method = getattr(conn, m) method(mox.IsA(req_class)).AndReturn(resp_class()) self.mox.ReplayAll() for m, req_class, resp_class in methods: method = getattr(datastore, m) result = method(req_class()) self.assertEqual(resp_class, type(result)) self.mox.VerifyAll()
def testFunctions(self): datastore.set_options( credentials=FakeCredentialsFromEnv(), project_endpoint="http://localhost:8080/datastore/v1/projects/foo" ) def caml(s): return "".join(p[0].upper() + p[1:] for p in s.split("_")) rpcs = ["lookup", "run_query", "begin_transaction", "commit", "rollback", "allocate_ids"] methods = [(r, getattr(datastore, caml(r) + "Request"), getattr(datastore, caml(r) + "Response")) for r in rpcs] conn = datastore.get_default_connection() for m, req_class, resp_class in methods: self.mox.StubOutWithMock(conn, m) method = getattr(conn, m) method(mox.IsA(req_class)).AndReturn(resp_class()) self.mox.ReplayAll() for m, req_class, resp_class in methods: method = getattr(datastore, m) result = method(req_class()) self.assertEqual(resp_class, type(result)) self.mox.VerifyAll()
import googledatastore import logging import calendar import sha from googledatastore.helper import * from datetime import datetime from config import ConfigSection _config = ConfigSection('datastore') _log = logging.getLogger('sound.be.datastore') googledatastore.set_options(dataset = _config.get_option('dataset')) def _date_to_timestamp(date): return long(calendar.timegm(date.utctimetuple()) * 1000000L) + date.microsecond class _BaseEntity(object): """ Base class for all entities. Handles the conversion between the simple model objects defined in this module and google cloud datastore entity objects. """ def _sub_entities(self): """ Get a dictionary of sub entities of the current entity. The keys of the dictionary are the field names in the current entity. The values are the model class used by that field. By default this is an empty dictionary unless overridden in a subclass. """ return dict()
return self app = Flask(__name__) default_todo_list = TodoList('default').save() @app.route('/todos', methods=['GET', 'POST', 'DELETE']) def TodoService(): try: if request.method == 'GET': return json.dumps(Todo.get_all(), cls=Todo.Encoder) elif request.method == 'POST': todo = Todo(json.loads(request.data)) return json.dumps(todo.save(), cls=Todo.Encoder) elif request.method == 'DELETE': return Todo.archive() abort(405) except datastore.RPCError as e: app.logger.error(str(e)) abort(e.response.status) if __name__ == '__main__': # Set dataset from command line argument. if len(sys.argv) < 2: print 'Usage: todos.py <DATASET_ID>' sys.exit(1) datastore.set_options(dataset=sys.argv[1]) app.run(host='0.0.0.0', debug=True)
# XXX purpose of this test program is to lookup a key without a # transaction if it is possible to do that with datastore for # simplicity, performance and undestanding import logging import sys import time import httplib2 # will log to console explicit detail of the request and response httplib2.debuglevel = 2 import googledatastore as datastore datastore.set_options(host='https://www.googleapis.com', dataset='glowing-thunder-842') def measureTime(a): start = time.time() # print start a() elapsed = time.time() # print elapsed elapsed = elapsed - start print "Time spent in (function name) is: ", elapsed # print ("%.5f" % elapsed) # print time.time() def main(): measureit()
import sys, re, json, time from hashlib import md5 from twisted.python.filepath import FilePath from PacketAES import AESCipher from twisted.internet import threads, defer from twisted.web.server import Session, NOT_DONE_YET from zope.interface import implements from twisted.cred import portal, checkers, credentials, error as credError from twisted.web.resource import IResource, Resource, NoResource from twisted.web.guard import HTTPAuthSessionWrapper from twisted.web.guard import DigestCredentialFactory import googledatastore as datastore datastore.set_options(dataset='packetservo') class Verify(object): def cbquery(self,camid,field): camkey = datastore.Key() path = camkey.path_element.add() path.kind = 'PSCams' path.name = camid req = datastore.LookupRequest() req.key.extend([camkey]) resp = datastore.lookup(req) if resp.found: pscam = resp.found[0].entity password_property = datastore.Property()
import sys, re, json, time from hashlib import md5 from twisted.python.filepath import FilePath from PacketAES import AESCipher from twisted.internet import threads, defer from twisted.web.server import Session, NOT_DONE_YET from zope.interface import implements from twisted.cred import portal, checkers, credentials, error as credError from twisted.web.resource import IResource, Resource, NoResource from twisted.web.guard import HTTPAuthSessionWrapper from twisted.web.guard import DigestCredentialFactory import googledatastore as datastore datastore.set_options(dataset='packetservo') class Verify: def cbquery(self, camid, field): camkey = datastore.Key() path = camkey.path_element.add() path.kind = 'PSCams' path.name = camid req = datastore.LookupRequest() req.key.extend([camkey]) resp = datastore.lookup(req) if resp.found: pscam = resp.found[0].entity
import googledatastore as datastore datastore.set_options(dataset='glowing-thunder-842') req = datastore.BeginTransactionRequest() datastore.begin_transaction(req)
def main(): # Set project id from command line argument. if len(sys.argv) < 2: print 'Usage: adams.py <PROJECT_ID>' sys.exit(1) # Set the project from the command line parameters. datastore.set_options(project_id=sys.argv[1]) try: # Create a RPC request to begin a new transaction. req = datastore.BeginTransactionRequest() # Execute the RPC synchronously. resp = datastore.begin_transaction(req) # Get the transaction handle from the response. tx = resp.transaction # Create a RPC request to get entities by key. req = datastore.LookupRequest() # Create a new entity key. key = datastore.Key() # Set the entity key with only one `path` element: no parent. elem = key.path.add() elem.kind = 'Trivia' elem.name = 'hgtg' # Add one key to the lookup request. req.keys.extend([key]) # Set the transaction, so we get a consistent snapshot of the # entity at the time the transaction started. req.read_options.transaction = tx # Execute the RPC and get the response. resp = datastore.lookup(req) # Create a RPC request to commit the transaction. req = datastore.CommitRequest() # Set the transaction to commit. req.transaction = tx if resp.found: # Get the entity from the response if found. entity = resp.found[0].entity else: # If no entity was found, insert a new one in the commit request mutation. entity = req.mutations.add().insert # Copy the entity key. entity.key.CopyFrom(key) # Add two entity properties: # - a utf-8 string: `question` entity.properties['question'].string_value ='Meaning of life?' # - a 64bit integer: `answer` prop = entity.properties['answer'].integer_value = 42 # Execute the Commit RPC synchronously and ignore the response: # Apply the insert mutation if the entity was not found and close # the transaction. datastore.commit(req) # Get question property value. question = entity.properties['question'].string_value # Get answer property value. answer = entity.properties['answer'].integer_value # Print the question and read one line from stdin. print question result = raw_input('> ') if result == str(answer): print ('fascinating, extraordinary and, ' 'when you think hard about it, completely obvious.') else: print "Don't Panic!" except datastore.RPCError as e: # RPCError is raised if any error happened during a RPC. # It includes the `method` called, the canonical error code `code`, # and the `message` of the failure. logging.error('Error while doing datastore operation') logging.error('RPCError: %(method)s %(reason)s', {'method': e.method, 'code': e.code, 'reason': e.message}) return
#Rank each stock based on its model score(for the close price) rank = {} rankScore = [] for i in np.arange(len(symbols)): rankScore.append(np.array(savedScores[symbols[i]])[:, CLOSE][0]) rankIndex = np.array(rankScore).argsort()[::-1] counter = 1 for i in rankIndex: rank[symbols[i]] = counter counter += 1 #print rankIndex[i]+1, symbols[i] , np.array(savedScores[symbols[i]])[:,CLOSE][0] # if platform.system() != 'Windows' and platform.system() != 'Darwin': # Set the dataset from the command line parameters. datastore.set_options(dataset="daily-stock-forecast") #Save each symbol into the datastore for i in np.arange(len(symbols)): if rank[symbols[i]] <= 100000: try: req = datastore.CommitRequest() req.mode = datastore.CommitRequest.NON_TRANSACTIONAL entity = req.mutation.insert_auto_id.add() # Create a new entity key. key = datastore.Key() # Set the entity key with only one `path_element`: no parent. path = key.path_element.add() path.kind = 'Forecast'
def main(): datastore.set_options(dataset='glowing-thunder-842') try: # Create a RPC request to begin a new transaction. req = datastore.BeginTransactionRequest() # Execute the RPC synchronously. resp = datastore.begin_transaction(req) # Get the transaction handle from the response. tx = resp.transaction # Create a RPC request to get entities by key. req = datastore.LookupRequest() # Create a new entity key. key = datastore.Key() # Set the entity key with only one `path_element`: no parent. path = key.path_element.add() path.kind = 'kindlooptest' path.name = randstr() # Add one key to the lookup request. req.key.extend([key]) # Set the transaction, so we get a consistent snapshot of the # entity at the time the transaction started. req.read_options.transaction = tx # Execute the RPC and get the response. resp = datastore.lookup(req) # Create a RPC request to commit the transaction. req = datastore.CommitRequest() # Set the transaction to commit. req.transaction = tx if resp.found: # Get the entity from the response if found. entity = resp.found[0].entity else: # If no entity was found, insert a new one in the commit request mutation. entity = req.mutation.insert.add() # Copy the entity key. entity.key.CopyFrom(key) # Add two entity properties: # - a utf-8 string: `question` prop = entity.property.add() prop.name = 'prop1test' prop.value.string_value = randstr() # - a 64bit integer: `answer` prop = entity.property.add() prop.name = 'prop2test' prop.value.integer_value = 77 # Execute the Commit RPC synchronously and ignore the response: # Apply the insert mutation if the entity was not found and close # the transaction. datastore.commit(req) # Get question property value. question = entity.property[0].value.string_value # Get answer property value. answer = entity.property[1].value.integer_value # Print the question and read one line from stdin. print answer except datastore.RPCError as e: # RPCError is raised if any error happened during a RPC. # It includes the `method` called and the `reason` of the # failure as well as the original `HTTPResponse` object. logging.error('Error while doing datastore operation') logging.error('RPCError: %(method)s %(reason)s', {'method': e.method, 'reason': e.reason}) logging.error('HTTPError: %(status)s %(reason)s', {'status': e.response.status, 'reason': e.response.reason}) return
self.id = keys[0].path_element[-1].id return self app = Flask(__name__) default_todo_list = TodoList('default').save() @app.route('/todos', methods=['GET', 'POST', 'DELETE']) def TodoService(): try: if request.method == 'GET': return json.dumps(Todo.get_all(), cls=Todo.Encoder) elif request.method == 'POST': todo = Todo(json.loads(request.data)) return json.dumps(todo.save(), cls=Todo.Encoder) elif request.method == 'DELETE': return Todo.archive() abort(405) except datastore.RPCError as e: app.logger.error(str(e)) abort(e.response.status) if __name__ == '__main__': # Set dataset from command line argument. if len(sys.argv) < 2: print 'Usage: todos.py <DATASET_ID>' sys.exit(1) datastore.set_options(dataset=sys.argv[1]) app.run(host='0.0.0.0', debug=True)
def main(): # Set dataset id from command line argument. if len(sys.argv) < 2: print 'Usage: adams.py <DATASET_ID>' sys.exit(1) # Set the dataset from the command line parameters. datastore.set_options(dataset=sys.argv[1]) try: # Create a RPC request to write mutations outside of a transaction. req = datastore.BlindWriteRequest() # Add mutation that update or insert one entity. entity = req.mutation.upsert.add() # Set the entity key with only one `path_element`: no parent. path = entity.key.path_element.add() path.kind = 'Trivia' path.name = 'hgtg' # Add two entity properties: # - a utf-8 string: `question` property = entity.property.add() property.name = 'questions' value = property.value.add() value.string_value = 'Meaning of life?' # - a 64bit integer: `answer` property = entity.property.add() property.name = 'answer' value = property.value.add() value.integer_value = 42 # Execute the RPC synchronously and ignore the response. datastore.blind_write(req) # Create a RPC request to get entities by key. req = datastore.LookupRequest() # Add one key to lookup w/ the same entity key. req.key.extend([entity.key]) # Execute the RPC and get the response. resp = datastore.lookup(req) # Found one entity result. entity = resp.found[0].entity # Get question property value. question = entity.property[0].value[0].string_value # Get answer property value. answer = entity.property[1].value[0].integer_value # Print the question and read one line from stdin. print question result = raw_input('> ') if result == str(answer): print ('fascinating, extraordinary and, ' 'when you think hard about it, completely obvious.') else: print "Don't Panic!" except datastore.RPCError as e: # RPCError is raised if any error happened during a RPC. # It includes the `method` called and the `reason` of the # failure as well as the original `HTTPResponse` object. logging.error('Error while doing datastore operation') logging.error('RPCError: %(method)s %(reason)s', {'method': e.method, 'reason': e.reason}) logging.error('HTTPError: %(status)s %(reason)s', {'status': e.response.status, 'reason': e.response.reason}) return
def testSetOptions(self): other_thread_conn = [] lock1 = threading.Lock() lock2 = threading.Lock() lock1.acquire() lock2.acquire() def target(): # Grab two connections other_thread_conn.append(datastore.get_default_connection()) other_thread_conn.append(datastore.get_default_connection()) lock1.release() # Notify that we have grabbed the first 2 connections. lock2.acquire() # Wait for the signal to grab the 3rd. other_thread_conn.append(datastore.get_default_connection()) other_thread = threading.Thread(target=target) # Resetting options and state. datastore._options = {} datastore.set_options(project_id="foo") self.mox.StubOutWithMock(helper, "get_credentials_from_env") self.mox.StubOutWithMock(helper, "get_project_endpoint_from_env") endpoint = "http://localhost:8080/datastore/v1/projects/%s" helper.get_project_endpoint_from_env(project_id="foo").AndReturn(endpoint % "foo") helper.get_project_endpoint_from_env(project_id="foo").AndReturn(endpoint % "foo") helper.get_project_endpoint_from_env(project_id="bar").AndReturn(endpoint % "bar") helper.get_project_endpoint_from_env(project_id="bar").AndReturn(endpoint % "bar") helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() # Start the thread and wait for the first lock. other_thread.start() lock1.acquire() t1_conn1 = datastore.get_default_connection() t2_conn1, t2_conn1b = other_thread_conn other_thread_conn = [] # The two threads get different connections. self.assertIsNot(t1_conn1, t2_conn1) # Multiple calls on the same thread get the same connection. self.assertIs(t1_conn1, datastore.get_default_connection()) self.assertIs(t2_conn1, t2_conn1b) # Change the global options and grab the connections again. datastore.set_options(project_id="bar") lock2.release() other_thread.join() t1_conn2 = datastore.get_default_connection() t2_conn2 = other_thread_conn[0] # Changing the options causes all threads to create new connections. self.assertIsNot(t1_conn1, t1_conn2) self.assertIsNot(t2_conn1, t2_conn2) # The new connections are still different for each thread. self.assertIsNot(t1_conn2, t2_conn2) # The old connections has the old settings. self.assertEqual("http://localhost:8080/datastore/v1/projects/foo", t1_conn1._url) self.assertEqual("http://localhost:8080/datastore/v1/projects/foo", t2_conn1._url) # The new connections has the new settings. self.assertEqual("http://localhost:8080/datastore/v1/projects/bar", t1_conn2._url) self.assertEqual("http://localhost:8080/datastore/v1/projects/bar", t2_conn2._url) self.assertEqual(FakeCredentialsFromEnv, type(t1_conn2._credentials)) self.assertEqual(FakeCredentialsFromEnv, type(t2_conn2._credentials)) self.mox.VerifyAll()
def main(): # Set dataset id from command line argument. if len(sys.argv) < 2: print 'Usage: adams.py <DATASET_ID>' sys.exit(1) # Set the dataset from the command line parameters. datastore.set_options(dataset=sys.argv[1]) try: # Create a RPC request to begin a new transaction. req = datastore.BeginTransactionRequest() # Execute the RPC synchronously. resp = datastore.begin_transaction(req) # Get the transaction handle from the response. tx = resp.transaction # Create a RPC request to get entities by key. req = datastore.LookupRequest() # Create a new entity key. key = datastore.Key() # Set the entity key with only one `path_element`: no parent. path = key.path_element.add() path.kind = 'Trivia' path.name = 'hgtg' # Add one key to the lookup request. req.key.extend([key]) # Set the transaction, so we get a consistent snapshot of the # entity at the time the transaction started. req.read_options.transaction = tx # Execute the RPC and get the response. resp = datastore.lookup(req) # Create a RPC request to commit the transaction. req = datastore.CommitRequest() # Set the transaction to commit. req.transaction = tx if resp.found: # Get the entity from the response if found. entity = resp.found[0].entity else: # If no entity was found, insert a new one in the commit request mutation. entity = req.mutation.insert.add() # Copy the entity key. entity.key.CopyFrom(key) # Add two entity properties: # - a utf-8 string: `question` prop = entity.property.add() prop.name = 'question' prop.value.string_value = 'Meaning of life?' # - a 64bit integer: `answer` prop = entity.property.add() prop.name = 'answer' prop.value.integer_value = 42 # Execute the Commit RPC synchronously and ignore the response: # Apply the insert mutation if the entity was not found and close # the transaction. datastore.commit(req) props = get_property_dict(entity) # Get question property value. question = props['question'].string_value # Get answer property value. answer = props['answer'].integer_value # Print the question and read one line from stdin. print question result = raw_input('> ') if result == str(answer): print ('fascinating, extraordinary and, ' 'when you think hard about it, completely obvious.') else: print "Don't Panic!" except datastore.RPCError as e: # RPCError is raised if any error happened during a RPC. # It includes the `method` called and the `reason` of the # failure as well as the original `HTTPResponse` object. logging.error('Error while doing datastore operation') logging.error('RPCError: %(method)s %(reason)s', {'method': e.method, 'reason': e.reason}) logging.error('HTTPError: %(status)s %(reason)s', {'status': e.response.status, 'reason': e.response.reason}) return
prop = entity.property.add() prop.name = 'model' prop.value.string_value = model prop = entity.property.add() prop.name = 'country' prop.value.string_value = country prop = entity.property.add() prop.name = 'year' prop.value.integer_value = int(year) datastore.commit(request) r = requests.get('http://planecheck.com/aspsel2.asp?parmstr=&page=0') soup = bs4.BeautifulSoup(r.text, 'html.parser') datastore.set_options(dataset='bluecmd0') pages = set() for link in soup.find_all('a'): href = link.get('href') if href.startswith('aspsel2.asp?'): pages.add(href) process_page(soup) for page in pages: r = requests.get('http://planecheck.com/' + page) soup = bs4.BeautifulSoup(r.text, 'html.parser') process_page(soup)
def testSetOptions(self): other_thread_conn = [] lock1 = threading.Lock() lock2 = threading.Lock() lock1.acquire() lock2.acquire() def target(): # Grab two connections other_thread_conn.append(datastore.get_default_connection()) other_thread_conn.append(datastore.get_default_connection()) lock1.release( ) # Notify that we have grabbed the first 2 connections. lock2.acquire() # Wait for the signal to grab the 3rd. other_thread_conn.append(datastore.get_default_connection()) other_thread = threading.Thread(target=target) # Resetting options and state. datastore._options = {} datastore.set_options(dataset='foo') self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(helper, 'get_credentials_from_env') os.getenv('DATASTORE_HOST').AndReturn('http://localhost:8080') os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() # Start the thread and wait for the first lock. other_thread.start() lock1.acquire() t1_conn1 = datastore.get_default_connection() t2_conn1, t2_conn1b = other_thread_conn other_thread_conn = [] # The two threads get different connections. self.assertIsNot(t1_conn1, t2_conn1) # Multiple calls on the same thread get the same connection. self.assertIs(t1_conn1, datastore.get_default_connection()) self.assertIs(t2_conn1, t2_conn1b) # Change the global options and grab the connections again. datastore.set_options(dataset='bar') lock2.release() other_thread.join() t1_conn2 = datastore.get_default_connection() t2_conn2 = other_thread_conn[0] # Changing the options causes all threads to create new connections. self.assertIsNot(t1_conn1, t1_conn2) self.assertIsNot(t2_conn1, t2_conn2) # The new connections are still different for each thread. self.assertIsNot(t1_conn2, t2_conn2) # The old connections has the old settings. self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/foo/', t1_conn1._url) self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/foo/', t2_conn1._url) # The new connections has the new settings. self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/bar/', t1_conn2._url) self.assertEqual( 'http://localhost:8080/datastore/v1beta2/datasets/bar/', t2_conn2._url) self.assertEqual(FakeCredentialsFromEnv, type(t1_conn2._credentials)) self.assertEqual(FakeCredentialsFromEnv, type(t2_conn2._credentials)) self.mox.VerifyAll()
def PushData(data, original_data={}): '''Pushes a bunch of data into the datastore. The data should be a dict. Each key is treated as a namespace, and each value is also a dict. A new datastore entry is upserted for every inner key, with the value pickled into the |pickled_value| field. For example, if given the dictionary: { 'fruit': { 'apple': 1234, 'banana': 'yellow', 'trolling carrot': { 'arbitrarily complex': ['value', 'goes', 'here'] } }, 'animal': { 'sheep': 'baaah', 'dog': 'woof', 'trolling cat': 'moo' } } this would result in a push of 6 keys in total, with the following IDs: Key('PersistentObjectStoreItem', 'fruit/apple') Key('PersistentObjectStoreItem', 'fruit/banana') Key('PersistentObjectStoreItem', 'fruit/trolling carrot') Key('PersistentObjectStoreItem', 'animal/sheep') Key('PersistentObjectStoreItem', 'animal/dog') Key('PersistentObjectStoreItem', 'animal/trolling cat') If given |original_data|, this will only push key-value pairs for entries that are either new or have changed from their original (pickled) value. Caveat: Pickling and unpickling a dictionary can (but does not always) change its key order. This means that objects will often be seen as changed even when they haven't changed. ''' datastore.set_options(dataset=_DATASET_NAME) def flatten(dataset): flat = {} for namespace, items in dataset.iteritems(): for k, v in items.iteritems(): flat['%s/%s' % (namespace, k)] = cPickle.dumps(v) return flat logging.info('Flattening data sets...') data = flatten(data) original_data = flatten(original_data) logging.info('Culling new data...') for k in data.keys(): if ((k in original_data and original_data[k] == data[k]) or (len(data[k]) > _MAX_ENTITY_SIZE)): del data[k] for batch, n, total in _CreateBatches(data): commit_request = datastore.CommitRequest() commit_request.mode = datastore.CommitRequest.NON_TRANSACTIONAL commit_request.mutation.upsert.extend(list(batch)) logging.info('Committing %s/%s entities...' % (n, total)) datastore.commit(commit_request)
import googledatastore import logging import calendar import sha from googledatastore.helper import * from datetime import datetime from config import ConfigSection _config = ConfigSection('datastore') _log = logging.getLogger('sound.be.datastore') googledatastore.set_options(dataset=_config.get_option('dataset')) def _date_to_timestamp(date): return long( calendar.timegm(date.utctimetuple()) * 1000000L) + date.microsecond class _BaseEntity(object): """ Base class for all entities. Handles the conversion between the simple model objects defined in this module and google cloud datastore entity objects. """ def _sub_entities(self): """ Get a dictionary of sub entities of the current entity. The keys of the dictionary are the field names in the current entity. The values are the model class used by that field. By default this is an empty dictionary unless overridden in a subclass. """
def testSetOptions(self): other_thread_conn = [] lock1 = threading.Lock() lock2 = threading.Lock() lock1.acquire() lock2.acquire() def target(): # Grab two connections other_thread_conn.append(datastore.get_default_connection()) other_thread_conn.append(datastore.get_default_connection()) lock1.release() # Notify that we have grabbed the first 2 connections. lock2.acquire() # Wait for the signal to grab the 3rd. other_thread_conn.append(datastore.get_default_connection()) other_thread = threading.Thread(target=target) # Resetting options and state. datastore._options = {} datastore.set_options(dataset='foo') self.mox.StubOutWithMock(os, 'getenv') self.mox.StubOutWithMock(helper, 'get_credentials_from_env') os.getenv('DATASTORE_HOST').AndReturn('http://localhost:8080') os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) os.getenv('DATASTORE_URL_INTERNAL_OVERRIDE').AndReturn(None) helper.get_credentials_from_env().AndReturn(FakeCredentialsFromEnv()) self.mox.ReplayAll() # Start the thread and wait for the first lock. other_thread.start() lock1.acquire() t1_conn1 = datastore.get_default_connection() t2_conn1, t2_conn1b = other_thread_conn other_thread_conn = [] # The two threads get different connections. self.assertIsNot(t1_conn1, t2_conn1) # Multiple calls on the same thread get the same connection. self.assertIs(t1_conn1, datastore.get_default_connection()) self.assertIs(t2_conn1, t2_conn1b) # Change the global options and grab the connections again. datastore.set_options(dataset='bar') lock2.release() other_thread.join() t1_conn2 = datastore.get_default_connection() t2_conn2 = other_thread_conn[0] # Changing the options causes all threads to create new connections. self.assertIsNot(t1_conn1, t1_conn2) self.assertIsNot(t2_conn1, t2_conn2) # The new connections are still different for each thread. self.assertIsNot(t1_conn2, t2_conn2) # The old connections has the old settings. self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/foo/', t1_conn1._url) self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/foo/', t2_conn1._url) # The new connections has the new settings. self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/bar/', t1_conn2._url) self.assertEqual('http://localhost:8080/datastore/v1beta2/datasets/bar/', t2_conn2._url) self.assertEqual(FakeCredentialsFromEnv, type(t1_conn2._credentials)) self.assertEqual(FakeCredentialsFromEnv, type(t2_conn2._credentials)) self.mox.VerifyAll()
rank = {} counter = 1 for i in sortedRankIndexOriginal: rank[symbols[i]] = counter counter += 1 """rank = {} counter = 1 for i in rankIndex: rank[symbols[i]] = counter #if counter <= 10: # print symbols[i], counter, abs((np.array(savedPrediction[symbols[i]])[:,CLOSE][-1] - closePrice[i][-1])/abs(closePrice[i][-1])*100.0) counter += 1""" if platform.system() != 'Windows': # Set the dataset from the command line parameters. datastore.set_options(dataset="daily-stock-forecast") #Save each symbol into the datastore for i in np.arange(len(symbols)): if rank[symbols[i]] <= 100000: try: req = datastore.CommitRequest() req.mode = datastore.CommitRequest.NON_TRANSACTIONAL entity = req.mutation.insert_auto_id.add() # Create a new entity key. key = datastore.Key() # Set the entity key with only one `path_element`: no parent. path = key.path_element.add() path.kind = 'Forecast'
return self app = Flask(__name__) @app.route('/todos', methods=['GET', 'POST', 'DELETE']) def TodoService(): try: if request.method == 'GET': return json.dumps(Todo.get_all(), cls=Todo.Encoder) elif request.method == 'POST': todo = Todo(json.loads(request.data)) return json.dumps(todo.save(), cls=Todo.Encoder) elif request.method == 'DELETE': return Todo.archive() abort(405) except datastore.RPCError as e: app.logger.error(str(e)) abort(-1) if __name__ == '__main__': # Set project from command line argument. if len(sys.argv) < 2: print 'Usage: todos.py <PROJECT_ID>' sys.exit(1) datastore.set_options(project_id=sys.argv[1]) default_todo_list = TodoList('default').save() print 'Application running, visit localhost:5000/static/index.html' app.run(host='0.0.0.0', debug=True)