def handlerUserData(urlarr, token, data, method): if len(urlarr) == 0: return abort(400) # check that the token allows read if it is a get request if method == "GET" and not isAuthorized(token, "readonly"): return abort(404) # check that the token allows readwrite for all other methods if not isAuthorized(token, "readwrite"): return abort(404) # short circuit out these two urls as they don't behave the same way # as the defaults. if urlarr[0] == "searches" or urlarr[0] == "favorites": return handleRest(urlarr, token, data, method) # Instantiates a client client = Client() # The kind for the new entity kind = GROUPNAME # The name/ID for the new entity propname = urlarr[0] # The Cloud Datastore key for the new entity key = client.key(kind, token["email"]) if method == "GET": return getData(client, key, propname) # all other methods are set methods return setData(client, key, propname, data, method)
def updatepost(): client = Client() form = PostForm() post = client.get(Key('post', int(session.get('update_post_id', None)), project=config.Config.GOOGLE_PROJECT)) """Process the update file and upload it to Google Cloud Storage.""" uploaded_file = request.files.get('file') if uploaded_file: imgname = secure_filename(uploaded_file.filename) imgname = current_user.id + '_' + imgname # Create a Cloud Storage client. gcs = storage.Client() # Get the bucket that the file will be uploaded to. bucket = gcs.get_bucket(config.Config.CLOUD_STORAGE_BUCKET) # Create a new blob and upload the file's content. blob = bucket.blob(imgname.lower()) blob.upload_from_string( uploaded_file.read(), content_type=uploaded_file.content_type ) deletebucketimg(post['img_name']) post['img_name'] = imgname.lower() post['subject'] = form.subject.data post['text'] = form.text.data client.put(post) return redirect(url_for('index'))
def store_article_issues_unless_ignored( client: datastore.Client, article_id: str, issues: Iterator[ArticleIssue]) -> Iterator[str]: existing = select(client, "ArticleIssue", parent=["Article", article_id]) not_ignored = [ issue.key for issue in existing if not issue.get("ignored", False) ] ignored = [issue.key for issue in existing if issue.get("ignored", False)] if len(not_ignored) > 0: # first, delete all existing non-ignored issues (these will be overwritten) client.delete_multi([key for key in not_ignored]) pairs = [( client.key("Article", article_id, "ArticleIssue", issue.__class__.__name__), issue, ) for issue in issues] updated_ids = [] for (key, issue) in pairs: # if issue has already been marked as ignored, don't update if key in ignored: continue else: entity = datastore.Entity(key=key) entity.update(issue.to_json()) client.put(entity) updated_ids.append(key.id_or_name) return updated_ids
def store_job(data: Dict[str, str], client: datastore.Client) -> int: """ Stores job datastore Args: data: dict of HTML fields client: datastore client to save to Returns: key that job was stored with """ # initialize entity key = client.key('job') entity = datastore.Entity( key=key, exclude_from_indexes=['note', 'circuit', 'repetitions']) # fill entity fields for field, default in fields.items(): value = data.get(field) entity[field] = value if value else default entity['submission_timestamp'] = datetime.datetime.utcnow() entity['submission_version'] = os.environ.get('GAE_VERSION') entity['verified'] = False entity['done'] = False entity['sent'] = False # store entity client.put(entity) # return entity key return entity.key
class DatastoreClient: def __init__(self, namespace: str = None, **kwargs) -> None: self.client = Client(namespace=namespace, **kwargs) self._batched_update_entities = None @contextmanager def batch_update(self): self._batched_update_entities = [] yield # Max batch size for writes is 500 for entity_chunk in chunk_iterable(self._batched_update_entities, chunk_size=500): self.client.put_multi(entity_chunk) self._batched_update_entities = None def set_key(self, entity_name: str, key_name: str, **properties: Any) -> None: key = self.client.key(entity_name, key_name) with self.client.transaction(): entity = self.client.get(key) if entity is None: entity = Entity(key=key) entity.update(properties) if self._batched_update_entities is not None: self._batched_update_entities.append(entity) else: self.client.put(entity) def get_key(self, entity_name: str, key_name: str) -> Optional[Entity]: key = self.client.key(entity_name, key_name) return self.client.get(key) def query_entity( self, entity_name: str, *query_filters: Tuple[str, str, Any], projection: List[str] = None, limit: Optional[int] = None, ) -> Iterator: query = self.client.query(kind=entity_name) for query_filter in query_filters: query.add_filter(*query_filter) if projection is not None: query.projection = projection return query.fetch(limit=limit)
def test_datastore_put_and_get(client: datastore.Client): kind = "Task" name = "sampletask1" task_key = client.key(kind, name) task = datastore.Entity(key=task_key) task["description"] = "Buy milk" client.put(task) res = client.get(task_key) assert res == task
def select( client: datastore.Client, kind: str, projection: Iterator[str] = (), parent: Optional[Iterator[str]] = None, ) -> Iterator[datastore.key.Key]: query = client.query(kind=kind, projection=projection, ancestor=client.key(*parent)) return list(query.fetch())
def put_dog(id: hug.types.number, name: hug.types.text, age: hug.types.number): client = Client() key = client.key("Dog", id) dog_key = key dog_entity = Entity(dog_key) dog_entity["name"] = name dog_entity["age"] = age client.put(dog_entity) return {"id": dog_entity.key.id}
def editpost(postkey): form = PostForm() client = Client() post = client.get(Key('post', int(postkey), project=config.Config.GOOGLE_PROJECT)) if post['user_id'] != current_user.id: flash('Can not edit other user''s post') return redirect(url_for('index')) form.subject.data = post['subject'] form.text.data = post['text'] session['update_post_id'] = postkey return render_template('editpost.html', title='Home', form=form)
def create_datastore_client(): """Create datastore client testing or prod based on config""" from main import app if app.config["TESTING"]: from mock import mock import google.auth.credentials credentials = mock.Mock(spec=google.auth.credentials.Credentials) return Client(project=os.getenv("DATASTORE_PROJECT_ID"), credentials=credentials) else: return Client(project=os.getenv("DATASTORE_PROJECT_ID"))
def get_car_locations(db_client: datastore.Client, assigned_to_engineer=True, offset=None): """ Retrieve a list of carLocations from CarLocations :param db_client: The datastore Client. :param assigned_to_engineer: Determines wether to return locations linked to a car info object or not. :param offset: Maximum number of hours since the CarLocation was last updated. :rtype a list of CarLocation entities """ query = db_client.query(kind='CarLocation') if offset is not None: offset_date = datetime.datetime.utcnow() - datetime.timedelta( hours=offset) offset_date = offset_date.isoformat() query.add_filter('when', '>=', offset_date) query_iter = query.fetch() # Filter query on CarInfo tokens if assigned_to_engineer: engineers_tokens = get_engineers_tokens(db_client) query_iter = [ car_location for car_location in query_iter if is_assigned(car_location.key.id_or_name, engineers_tokens, True) ] else: query_iter = list(query_iter) return query_iter
def main(is_uid, users): # client = Client(project=config.project_id) creds_path = os.path.join( os.environ.get('BIKEBUDS_ENV'), 'service_keys/firebase-adminsdk.json', ) client = Client.from_service_account_json(creds_path) for u in users: logging.info('Deleting: %s', u) if is_uid: uid = u else: logging.info('Looking up: %s', u) uid = auth.get_user_by_email(u).uid try: key = client.key('User', uid) if client.get(key) is not None: children_query = client.query(ancestor=key) children_query.keys_only() client.delete_multi(child.key for child in children_query.fetch()) else: logging.warning('Could not cleanup datastore for: %s', u) except Exception: logging.warning('Could not delete datastore user: %s', u) try: auth.delete_user(uid) except auth.UserNotFoundError: logging.warning('Could not delete firebase user: %s', u) pass
def add_task(client: datastore.Client, description: str): # Create an incomplete key for an entity of kind "Task". An incomplete # key is one where Datastore will automatically generate an Id key = client.key("Task") # Create an unsaved Entity object, and tell Datastore not to index the # `description` field task = datastore.Entity(key, exclude_from_indexes=["description"]) # Apply new field values and save the Task entity to Datastore task.update({ "created": datetime.datetime.utcnow(), "description": description, "done": False, }) client.put(task) return task.key
def mark_done(client: datastore.Client, task_id: Union[str, int]): with client.transaction(): # Create a key for an entity of kind "Task", and with the supplied # `task_id` as its Id key = client.key("Task", task_id) # Use that key to load the entity task = client.get(key) if not task: raise ValueError(f"Task {task_id} does not exist.") # Update a field indicating that the associated # work has been completed task["done"] = True # Persist the change back to Datastore client.put(task)
def get_all_dogs(): dogs = [] client = Client() query = Query(client, kind="Dog") dogs = query.fetch() return {"dogs": dogs}
def index(): form = PostForm() client = Client() query = client.query(kind="post") query.order = ["-date_created"] posts = [] for task in query.fetch(10): posts.append( {'img_name': task["img_name"], 'subject': task["subject"], 'text': task["text"], 'date_created': task["date_created"], 'keyid': int(task.key.id), 'user_id': task["user_id"], 'user_name': getusername(task["user_id"])}) if posts: return render_template('index.html', title='Home', form=form, posts=posts) return render_template('index.html', title='Home', form=form)
def user(user_name): user = User().get_obj('user_name', user_name) client = Client() query = client.query(kind="post") query = query.add_filter("user_id", "=", user.id) query.order = ["-date_created"] posts = [] for task in query.fetch(10): posts.append( {'img_name': task["img_name"], 'subject': task["subject"], 'text': task["text"], 'date_created': task["date_created"], 'keyid': int(task.key.id), 'user_id': task["user_id"], 'user_name': user_name}) if posts: return render_template('user.html', user=user, posts=posts) return render_template('user.html', user=user)
def handleRest(urlarr, token, data, method): print('handled by REST') # unpack property id and name propname, *prop_id = urlarr prop_id = int(prop_id[0]) if prop_id else None # we don't allow puts or deletes on the whole collection if not prop_id and re.match(r"^DELETE|PUT$", method): return abort(405) # don't allow posts to an individual item if prop_id and method == "POST": return abort(409) # set up the client kind = GROUPNAME client = Client() # create the key if prop_id: # create a full key key = client.key(kind, token["email"], propname, prop_id) else: # create a partial key key = client.key(kind, token["email"], propname) # if GET return the data if method == "GET": if key.is_partial: ancestor = client.key(kind, token['email']) return get_rest_query(client, ancestor, propname) return get_rest(client, key) # if POST create new entry or PUT overwrite if re.match(r"^POST|PUT$", method): entity = Entity(key=key) entity.update(data) client.put(entity) # return 200 OK with entity id in the payload return json.dumps({}) # if DELETE remove entry if method == "DELETE": client.delete(key) return json.dumps({}) return abort(400)
def fetch_by_job(job_ids: Iterable[int], client: datastore.Client) -> Dict[int, Dict]: """ Fetches jobs from datastore by job id Arg: job_ids: Iterable of job identifiers client: datastore client for retrieving from Returns: dict of job result dictionaries, by job id """ # collect job id keys keys = [client.key('job', int(job_id)) for job_id in job_ids] # query for all entities simultaneously entities = client.get_multi(keys) # index entities by their identifier, turn into json-able dict, and return results = {entity.id: entity for entity in entities} return results
def find(client: datastore.Client, kind: str, projection: Iterator[str] = (), **params) -> str: query = client.query(kind=kind, projection=projection) for (k, v) in params.items(): query.add_filter(k, "=", v) try: return list(query.fetch(limit=1))[0] except IndexError: raise NotFoundError(kind=kind, params=params)
def node_req_to_entity(client: datastore.Client, o: NodeReq) -> datastore.Entity: assert o.operation_id is not None entity_key = client.key("NodeReq", o.operation_id) entity = datastore.Entity(key=entity_key) entity["cluster_id"] = o.cluster_id entity["job_id"] = o.job_id entity["status"] = o.status entity["node_class"] = o.node_class entity["sequence"] = o.sequence entity["instance_name"] = o.instance_name return entity
def store( client: datastore.Client, data: dict, parent=[], kind: Optional[str] = None, id: Optional[str] = None, ) -> str: key = None kind = data.get("$type", None) if kind is None else kind if kind is None: raise ValueError("No kind specified") if id is None: key = client.allocate_ids(client.key(*parent, kind), 1)[0] else: key = client.key(*parent, kind, id) entity = datastore.Entity(key=key) entity.update(data) client.put(entity) return key.id
def handlerAppData(urlarr, token, data, method): if len(urlarr) == 0: return abort(400) # Instantiates a client client = Client() # The kind for the new entity kind = GROUPNAME # Use a seperate entity for app data stored propname = "data" # The Cloud Datastore key for the new entity key = client.key(kind, urlarr[0]) if method == "GET": if not isAuthorized(token, "readonly"): return abort(404) return getData(client, key, propname) if not isAuthorized(token, "admin"): return abort(404) return setData(client, key, propname, data, method)
def get_car_info_tokens(db_client: datastore.Client): """Retrieve a list of tokens from CarInfo :param db_client: The datastore Client. :rtype a list of strings """ cars_query = db_client.query(kind='Engineer') car_tokens = [car_info['token'] for car_info in cars_query.fetch() if 'token' in car_info and car_info['token'] is not None and len(car_info['token']) > 0] return car_tokens
def to_datastore(ds: datastore.Client, kind: TipoEntidade, entidade: dict): """Converte os dados a serem gravados do Google Datastore para o formato padronizado de cada tipo de campo. Retorno: Entidade do Google Datastore. """ if not entidade: return None elif type(entidade) != dict: entidade = entidade.__dict__ # Varre os atributos da entidade atualizando os padrões de tipos de dados for atributo in entidade.keys(): if atributo == 'id': key = ds.key(kind.value, entidade['id']) # Tratamento de campos data e data/hora # elif atributo.startswith(('dt_','dth_')): # # Converte o campo datatime para string no padrão ISO # entidade[atributo] = entidade[atributo].isoformat() elif type(entidade[atributo]).__name__ == 'date': entidade[atributo] = entidade[atributo].isoformat() elif type(entidade[atributo]) == datetime: entidade[atributo] = entidade[atributo].isoformat() # Se entidade não possuia campo 'id' gera chave automática do datastore if not key: key = ds.key(kind.value) # entity = datastore.Entity( # key=key) , # exclude_from_indexes=['description']) # Cria uma entidade com a chave obtida entity = datastore.Entity(key=key) # Atualiza a entidade com os atributos recebidos no dictionary entity.update(entidade) return entity
def fetch_by_student(student_ids: Iterable[int], client: datastore.Client) -> Dict[int, Dict]: """ Fetches jobs from datastore by student id Arg: student_ids: Iterable of student identifiers client: datastore client for retrieving from Returns: dict of job result dictionaries, by student id """ # results dict, by student id results = {} for student_id in student_ids: # build query query = client.query(kind="job") query.add_filter("student_id", "=", int(student_id)) # query for entities entities = query.fetch() results[student_id] = {entity.id: entity for entity in entities} return results
def zap(client: datastore.Client, kind: str): assert_test_environment(client) keys = list(select_keys(client, kind)) client.delete_multi(keys)
def select( client: datastore.Client, kind: str, projection: Iterator[str] = ()) -> Iterator[datastore.key.Key]: query = client.query(kind=kind, projection=projection) return list(query.fetch())
def CLIENT(): global _CLIENT if _CLIENT is None: _CLIENT = Client(project=CONFIG['project']) return _CLIENT
import functools import time from google.cloud.datastore import Client __all__ = ("client", "transaction") client = Client() def transaction(read_only: bool = False, retry: bool = False, retry_timeout: float = 0.005): def wrap_wrap(function): @functools.wraps(function) def wrap(*args, **kwargs): while True: try: with client.transaction(read_only=read_only) as batch: result = function(*args, **kwargs, batch=batch) except Exception: # TODO narrow clause if retry: time.sleep(retry_timeout) continue else: return result else: return result return wrap