def Read(self): """ Reads the datastore and history files into memory. The in-memory query history is cleared, but the datastore is *not* cleared; the entities in the files are merged into the entities in memory. If you want them to overwrite the in-memory datastore, call Clear() before calling Read(). If the datastore file contains an entity with the same app name, kind, and key as an entity already in the datastore, the entity from the file overwrites the entity in the datastore. Also sets __next_id to one greater than the highest id allocated so far. """ if self.__datastore_file and self.__datastore_file != '/dev/null': for encoded_entity in self.__ReadPickled(self.__datastore_file): try: entity = entity_pb.EntityProto(encoded_entity) except self.READ_PB_EXCEPTIONS, e: raise datastore_errors.InternalError(self.READ_ERROR_MSG % (self.__datastore_file, e)) except struct.error, e: if (sys.version_info[0:3] == (2, 5, 0) and e.message.startswith('unpack requires a string argument')): raise datastore_errors.InternalError(self.READ_PY250_MSG + self.READ_ERROR_MSG % (self.__datastore_file, e)) else: raise self._StoreEntity(entity) last_path = entity.key().path().element_list()[-1] if last_path.has_id() and last_path.id() >= self.__next_id: self.__next_id = last_path.id() + 1
def Read(self): """ Reads the datastore and history files into memory. The in-memory query history is cleared, but the datastore is *not* cleared; the entities in the files are merged into the entities in memory. If you want them to overwrite the in-memory datastore, call Clear() before calling Read(). If the datastore file contains an entity with the same app name, kind, and key as an entity already in the datastore, the entity from the file overwrites the entity in the datastore. Also sets __next_id to one greater than the highest id allocated so far. """ pb_exceptions = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError, TypeError, ValueError) error_msg = ('Data in %s is corrupt or a different version. ' 'Try running with the --clear_datastore flag.\n%r') if self.__datastore_file and self.__datastore_file != '/dev/null': for encoded_entity in self.__ReadPickled(self.__datastore_file): try: entity = entity_pb.EntityProto(encoded_entity) except pb_exceptions, e: raise datastore_errors.InternalError( error_msg % (self.__datastore_file, e)) last_path = entity.key().path().element_list()[-1] app_kind = (entity.key().app(), last_path.type()) kind_dict = self.__entities.setdefault(app_kind, {}) kind_dict[entity.key()] = entity if last_path.has_id() and last_path.id() >= self.__next_id: self.__next_id = last_path.id() + 1 self.__query_history = {} for encoded_query, count in self.__ReadPickled( self.__history_file): try: query_pb = datastore_pb.Query(encoded_query) except pb_exceptions, e: raise datastore_errors.InternalError( error_msg % (self.__history_file, e)) if query_pb in self.__query_history: self.__query_history[query_pb] += count else: self.__query_history[query_pb] = count
def _RemoteSend(self, request, response, method): """Sends a request remotely to the datstore server. """ tag = self.__app_id user = users.GetCurrentUser() if user != None: tag += ":" + user.email() tag += ":" + user.nickname() tag += ":" + user.auth_domain() api_request = remote_api_pb.Request() api_request.set_method(method) api_request.set_service_name("datastore_v3") api_request.set_request(request.Encode()) api_response = remote_api_pb.Response() api_response = api_request.sendCommand(self.__datastore_location, tag, api_response, 1, self.__is_encrypted, KEY_LOCATION, CERT_LOCATION) if not api_response or not api_response.has_response(): raise datastore_errors.InternalError( 'No response from db server on %s requests.' % method) if api_response.has_application_error(): error_pb = api_response.application_error() logging.error(error_pb.detail()) raise apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()) if api_response.has_exception(): raise api_response.exception() response.ParseFromString(api_response.response())
def _RemoteSend(self, request, response, method, request_id=None): """Sends a request remotely to the datstore server. """ tag = self.project_id self._maybeSetDefaultAuthDomain() user = users.GetCurrentUser() if user != None: tag += ":" + user.email() tag += ":" + user.nickname() tag += ":" + user.auth_domain() api_request = remote_api_pb.Request() api_request.set_method(method) api_request.set_service_name("datastore_v3") api_request.set_request(request.Encode()) if request_id is not None: api_request.set_request_id(request_id) api_response = remote_api_pb.Response() try: api_response = api_request.sendCommand(self.__datastore_location, tag, api_response, 1, self.__is_encrypted, KEY_LOCATION, CERT_LOCATION) except socket.error as socket_error: if socket_error.errno == errno.ETIMEDOUT: raise apiproxy_errors.ApplicationError( datastore_pb.Error.TIMEOUT, 'Connection timed out when making datastore request') raise # AppScale: Interpret ProtocolBuffer.ProtocolBufferReturnError as # datastore_errors.InternalError except ProtocolBuffer.ProtocolBufferReturnError as e: raise datastore_errors.InternalError(e) if not api_response or not api_response.has_response(): raise datastore_errors.InternalError( 'No response from db server on %s requests.' % method) if api_response.has_application_error(): error_pb = api_response.application_error() logging.error(error_pb.detail()) raise apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()) if api_response.has_exception(): raise api_response.exception() response.ParseFromString(api_response.response())
def MakeSyncCall(self, service, call, request, response): assert service == 'cloud_datastore_v1' call = call[0:1].lower() + call[1:] try: response.CopyFrom( self._datastore._call_method(call, request, response.__class__)) except datastore_pbs.googledatastore.RPCError as e: raise datastore_rpc._DatastoreExceptionFromCanonicalErrorCodeAndDetail( e.code, e.message) except Exception as e: raise datastore_errors.InternalError(e)
def __ReadPickled(self, filename): """Reads a pickled object from the given file and returns it. """ self.__file_lock.acquire() try: try: if filename and filename != '/dev/null' and os.path.isfile(filename): return pickle.load(open(filename, 'rb')) else: logging.warning('Could not read datastore data from %s', filename) except (AttributeError, LookupError, NameError, TypeError, ValueError, struct.error, pickle.PickleError), e: raise datastore_errors.InternalError( 'Could not read data from %s. Try running with the ' '--clear_datastore flag. Cause:\n%r' % (filename, e)) finally: self.__file_lock.release() return []
class CloudDatastoreV1RemoteStub(object): """A stub for calling Cloud Datastore via the Cloud Datastore API.""" def __init__(self, datastore): """Constructs a new Cloud Datastore stub. Args: datastore: A googledatastore.Datastore object. """ self._datastore = datastore def MakeSyncCall(self, service, call, request, response): assert service == 'cloud_datastore_v1' call = call[0:1].lower() + call[1:] try: response.CopyFrom( self._datastore._call_method(call, request, response.__class__)) except datastore_pbs.googledatastore.RPCError, e: raise datastore_rpc._DatastoreExceptionFromCanonicalErrorCodeAndDetail( e.code, e.message) except Exception, e: raise datastore_errors.InternalError(e)
class DatastoreFileStub(apiproxy_stub.APIProxyStub): """ Persistent stub for the Python datastore API. Stores all entities in memory, and persists them to a file as pickled protocol buffers. A DatastoreFileStub instance handles a single app's data and is backed by files on disk. """ _PROPERTY_TYPE_TAGS = { datastore_types.Blob: entity_pb.PropertyValue.kstringValue, bool: entity_pb.PropertyValue.kbooleanValue, datastore_types.Category: entity_pb.PropertyValue.kstringValue, datetime.datetime: entity_pb.PropertyValue.kint64Value, datastore_types.Email: entity_pb.PropertyValue.kstringValue, float: entity_pb.PropertyValue.kdoubleValue, datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup, datastore_types.IM: entity_pb.PropertyValue.kstringValue, int: entity_pb.PropertyValue.kint64Value, datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup, datastore_types.Link: entity_pb.PropertyValue.kstringValue, long: entity_pb.PropertyValue.kint64Value, datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue, datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue, datastore_types.Rating: entity_pb.PropertyValue.kint64Value, str: entity_pb.PropertyValue.kstringValue, datastore_types.Text: entity_pb.PropertyValue.kstringValue, type(None): 0, unicode: entity_pb.PropertyValue.kstringValue, users.User: entity_pb.PropertyValue.kUserValueGroup, } WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY READ_WRITE = entity_pb.CompositeIndex.READ_WRITE DELETED = entity_pb.CompositeIndex.DELETED ERROR = entity_pb.CompositeIndex.ERROR _INDEX_STATE_TRANSITIONS = { WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)), READ_WRITE: frozenset((DELETED,)), ERROR: frozenset((DELETED,)), DELETED: frozenset((ERROR,)), } def __init__(self, app_id, datastore_file, history_file, require_indexes=False, service_name='datastore_v3'): """Constructor. Initializes and loads the datastore from the backing files, if they exist. Args: app_id: string datastore_file: string, stores all entities across sessions. Use None not to use a file. history_file: string, stores query history. Use None as with datastore_file. require_indexes: bool, default False. If True, composite indexes must exist in index.yaml for queries that need them. service_name: Service name expected for all calls. """ super(DatastoreFileStub, self).__init__(service_name) assert isinstance(app_id, basestring) and app_id != '' self.__app_id = app_id self.__datastore_file = datastore_file self.__history_file = history_file self.__entities = {} self.__schema_cache = {} self.__tx_snapshot = {} self.__queries = {} self.__transactions = {} self.__indexes = {} self.__require_indexes = require_indexes self.__query_history = {} self.__next_id = 1 self.__next_cursor = 1 self.__next_tx_handle = 1 self.__next_index_id = 1 self.__id_lock = threading.Lock() self.__cursor_lock = threading.Lock() self.__tx_handle_lock = threading.Lock() self.__index_id_lock = threading.Lock() self.__tx_lock = threading.Lock() self.__entities_lock = threading.Lock() self.__file_lock = threading.Lock() self.__indexes_lock = threading.Lock() self.Read() def Clear(self): """ Clears the datastore by deleting all currently stored entities and queries. """ self.__entities = {} self.__queries = {} self.__transactions = {} self.__query_history = {} self.__schema_cache = {} def _AppKindForKey(self, key): """ Get (app, kind) tuple from given key. The (app, kind) tuple is used as an index into several internal dictionaries, e.g. __entities. Args: key: entity_pb.Reference Returns: Tuple (app, kind), both are unicode strings. """ last_path = key.path().element_list()[-1] return key.app(), last_path.type() def _StoreEntity(self, entity): """ Store the given entity. Args: entity: entity_pb.EntityProto """ key = entity.key() app_kind = self._AppKindForKey(key) if app_kind not in self.__entities: self.__entities[app_kind] = {} self.__entities[app_kind][key] = _StoredEntity(entity) if app_kind in self.__schema_cache: del self.__schema_cache[app_kind] READ_PB_EXCEPTIONS = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError, TypeError, ValueError) READ_ERROR_MSG = ('Data in %s is corrupt or a different version. ' 'Try running with the --clear_datastore flag.\n%r') READ_PY250_MSG = ('Are you using FloatProperty and/or GeoPtProperty? ' 'Unfortunately loading float values from the datastore ' 'file does not work with Python 2.5.0. ' 'Please upgrade to a newer Python 2.5 release or use ' 'the --clear_datastore flag.\n') def Read(self): """ Reads the datastore and history files into memory. The in-memory query history is cleared, but the datastore is *not* cleared; the entities in the files are merged into the entities in memory. If you want them to overwrite the in-memory datastore, call Clear() before calling Read(). If the datastore file contains an entity with the same app name, kind, and key as an entity already in the datastore, the entity from the file overwrites the entity in the datastore. Also sets __next_id to one greater than the highest id allocated so far. """ if self.__datastore_file and self.__datastore_file != '/dev/null': for encoded_entity in self.__ReadPickled(self.__datastore_file): try: entity = entity_pb.EntityProto(encoded_entity) except self.READ_PB_EXCEPTIONS, e: raise datastore_errors.InternalError(self.READ_ERROR_MSG % (self.__datastore_file, e)) except struct.error, e: if (sys.version_info[0:3] == (2, 5, 0) and e.message.startswith('unpack requires a string argument')): raise datastore_errors.InternalError(self.READ_PY250_MSG + self.READ_ERROR_MSG % (self.__datastore_file, e)) else: raise self._StoreEntity(entity) last_path = entity.key().path().element_list()[-1] if last_path.has_id() and last_path.id() >= self.__next_id: self.__next_id = last_path.id() + 1 self.__query_history = {} for encoded_query, count in self.__ReadPickled(self.__history_file): try: query_pb = datastore_pb.Query(encoded_query) except self.READ_PB_EXCEPTIONS, e: raise datastore_errors.InternalError(self.READ_ERROR_MSG % (self.__history_file, e)) if query_pb in self.__query_history: self.__query_history[query_pb] += count else: self.__query_history[query_pb] = count
def throw(): raise datastore_errors.InternalError('Yeah it happens')
def _RemoteSend(self, request, response, method, request_id=None): """Sends a request remotely to the datstore server. """ tag = self.project_id self._maybeSetDefaultAuthDomain() user = users.GetCurrentUser() if user != None: tag += ":" + user.email() tag += ":" + user.nickname() tag += ":" + user.auth_domain() api_request = remote_api_pb.Request() api_request.set_method(method) api_request.set_service_name("datastore_v3") api_request.set_request(request.Encode()) if request_id is not None: api_request.set_request_id(request_id) api_response = remote_api_pb.Response() retry_count = 0 max_retries = 3 location = self.__datastore_location while True: try: api_request.sendCommand(location, tag, api_response, 1, self.__is_encrypted, KEY_LOCATION, CERT_LOCATION) break except socket.error as socket_error: if socket_error.errno in (errno.ECONNREFUSED, errno.EHOSTUNREACH): backoff_ms = 500 * 3**retry_count # 0.5s, 1.5s, 4.5s retry_count += 1 if retry_count > max_retries: raise logging.warning( 'Failed to call {} method of Datastore ({}). Retry #{} in {}ms.' .format(method, socket_error, retry_count, backoff_ms)) time.sleep(float(backoff_ms) / 1000) location = get_random_lb() api_response = remote_api_pb.Response() continue if socket_error.errno == errno.ETIMEDOUT: raise apiproxy_errors.ApplicationError( datastore_pb.Error.TIMEOUT, 'Connection timed out when making datastore request') raise # AppScale: Interpret ProtocolBuffer.ProtocolBufferReturnError as # datastore_errors.InternalError except ProtocolBuffer.ProtocolBufferReturnError as e: raise datastore_errors.InternalError(e) if not api_response or not api_response.has_response(): raise datastore_errors.InternalError( 'No response from db server on %s requests.' % method) if api_response.has_application_error(): error_pb = api_response.application_error() logging.error(error_pb.detail()) raise apiproxy_errors.ApplicationError(error_pb.code(), error_pb.detail()) if api_response.has_exception(): raise api_response.exception() response.ParseFromString(api_response.response())