def test_key_only(self): from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key kind, name = 'PATH', 'NAME' project = 'PROJECT' key = Key(kind, name, project=project) entity = Entity(key=key) entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() expected_pb.key.partition_id.project_id = project path_elt = expected_pb.key.path.add() path_elt.kind = kind path_elt.name = name self._compareEntityProto(entity_pb, expected_pb)
def test_entity_w_key(self): from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key name = "foo" value = u"Foo" pb = self._makePB() key = Key("KIND", 123, project="PROJECT") entity = Entity(key=key) entity[name] = value self._call_fut(pb, entity) entity_pb = pb.entity_value self.assertEqual(entity_pb.key, key.to_protobuf()._pb) prop_dict = dict(entity_pb.properties.items()) self.assertEqual(len(prop_dict), 1) self.assertEqual(list(prop_dict.keys()), [name]) self.assertEqual(prop_dict[name].string_value, value)
def test_key_only(self): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.key import Key kind, name = "PATH", "NAME" project = "PROJECT" key = Key(kind, name, project=project) entity = Entity(key=key) entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() expected_pb.key.partition_id.project_id = project path_elt = expected_pb._pb.key.path.add() path_elt.kind = kind path_elt.name = name self._compare_entity_proto(entity_pb, expected_pb)
def test_simple_fields(self): from google.cloud.datastore_v1.types import entity as entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb entity = Entity() name1 = "foo" entity[name1] = value1 = 42 name2 = "bar" entity[name2] = value2 = u"some-string" entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() val_pb1 = _new_value_pb(expected_pb, name1) val_pb1.integer_value = value1 val_pb2 = _new_value_pb(expected_pb, name2) val_pb2.string_value = value2 self._compare_entity_proto(entity_pb, expected_pb)
def route_for_test(): route = Entity(ds_util.client.key('Route', 10285651)) route.update( { 'id': 10285651, 'timestamp': datetime.datetime.fromtimestamp(1503517240), 'description': 'East Peak, Reverse Alpine, Gestalt Haus', 'distance': 98353.19420993332, 'elevation_gain': 1829.1980834963906, 'name': 'The ̶N̶i̶g̶h̶t̶ Day is ̶D̶a̶r̶k̶ Mostly Cloudy, Probably and Full of ̶T̶e̶r̶r̶o̶r̶ Pickles', 'athlete': { 'id': 1021133, 'firstname': 'Rayco, A Shopping Cart 🛒', 'lastname': 'of All the Feelings', }, 'map': {'id': 10285651, 'summary_polyline': SUMMARY_POLYLINE}, } ) return route
def _output(df, _kind): ind = 0 all_entities = [] for i in df.T.to_dict().values(): _key = "{}_{}".format(i['user_id'], i['topic_id']) entity = Entity(key=client.key(_kind, _key)) entity.update(i) all_entities.append(entity) for entities in _get_batch(all_entities, n=500): batch = client.batch() batch.begin() for entity in entities: batch.put(entity) batch.commit() ind += 500
def test_simple_fields(self): from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _new_value_pb entity = Entity() name1 = 'foo' entity[name1] = value1 = 42 name2 = 'bar' entity[name2] = value2 = u'some-string' entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity() val_pb1 = _new_value_pb(expected_pb, name1) val_pb1.integer_value = value1 val_pb2 = _new_value_pb(expected_pb, name2) val_pb2.string_value = value2 self._compareEntityProto(entity_pb, expected_pb)
def test_entity_w_key(self): from google.cloud.datastore.entity import Entity from google.cloud.datastore.helpers import _property_tuples from google.cloud.datastore.key import Key name = 'foo' value = u'Foo' pb = self._makePB() key = Key('KIND', 123, project='PROJECT') entity = Entity(key=key) entity[name] = value self._call_fut(pb, entity) entity_pb = pb.entity_value self.assertEqual(entity_pb.key, key.to_protobuf()) prop_dict = dict(_property_tuples(entity_pb)) self.assertEqual(len(prop_dict), 1) self.assertEqual(list(prop_dict.keys()), [name]) self.assertEqual(prop_dict[name].string_value, value)
def add_to_group(request): if request.method == "POST": if request.content_type == 'text/plain': data = request.get_data() elif request.content_type == 'application/json': data = request.get_json() else: data = dict((k, v) for k, v in request.form.items()) for key in REQUIRED_KEYS: if key not in data: return '{} is a required'.format(key) client = datastore.Client() group = client.get(client.key('group', int(data['group_id']))) if not group: return 'group not found' if datetime.datetime.now( pytz.timezone('US/Pacific')).isoformat() >= group['end_date']: return 'that goal period has has already ended!' query = client.query(kind='goal2', ancestor=group.key, filters=[('email', '=', data['email'])]) if list(query.fetch()): return 'you already have recorded a goal in this group!' goal = Entity( client.key('group', group.id, 'goal2', int(uuid.uuid1().int % 1e16))) goal['name'] = data['name'] goal['email'] = data['email'] goal['description'] = data['description'] goal['motivation'] = data.get('motivation', None) goal['this_week'] = data.get('this_week', None) goal['completions'] = [] goal['opportunities'] = [] client.put(goal) return redirect((BASE_URL + "/view_group?group_id={}").format(group.id), code=302)
def test_dict_to_entity(self): from google.cloud.proto.datastore.v1 import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() entity['a'] = {'b': u'c'} entity_pb = self._call_fut(entity) expected_pb = entity_pb2.Entity( properties={ 'a': entity_pb2.Value( entity_value=entity_pb2.Entity( properties={ 'b': entity_pb2.Value( string_value='c', ), }, ), ), }, ) self.assertEqual(entity_pb, expected_pb)
def create_group(request): data = None if request.method == "GET": data = dict((k, v) for k, v in request.args.items()) if request.method == "POST": if request.content_type == 'text/plain': data = request.get_data() elif request.content_type == 'application/json': data = request.get_json() else: data = dict((k, v) for k, v in request.form.items()) print(request) print(data) for key in REQUIRED_KEYS: if key not in data: return '{} is a required'.format(key) client = datastore.Client() query = client.query(kind='group') query.add_filter('name', '=', data['name']) if list(query.fetch()): return 'goal already exists with this name' group = Entity(client.key('group', int(uuid.uuid1().int % 1e16))) group['name'] = data['name'] group['start_date'] = data['start_date'] group['end_date'] = data['end_date'] group['tracking_cadence'] = data['tracking_cadence'] client.put(group) return redirect( (BASE_URL + "/view_group?group_id={}").format(group.id), code=302 )
def test_dict_to_entity_recursive(self): from google.cloud.datastore_v1.proto import entity_pb2 from google.cloud.datastore.entity import Entity entity = Entity() entity['a'] = { 'b': { 'c': { 'd': 1.25, }, 'e': True, }, 'f': 10, } entity_pb = self._call_fut(entity) b_entity_pb = entity_pb2.Entity( properties={ 'c': entity_pb2.Value(entity_value=entity_pb2.Entity(properties={ 'd': entity_pb2.Value(double_value=1.25, ), }, ), ), 'e': entity_pb2.Value(boolean_value=True), }) expected_pb = entity_pb2.Entity(properties={ 'a': entity_pb2.Value(entity_value=entity_pb2.Entity(properties={ 'b': entity_pb2.Value(entity_value=b_entity_pb, ), 'f': entity_pb2.Value(integer_value=10, ), }, ), ), }, ) self.assertEqual(entity_pb, expected_pb)
def prep_value_for_database(self, value, index, model, column, connection): if value is None: raise IgnoreForIndexing([]) # If this a date or a datetime, or something that supports isoformat, then use that if hasattr(value, "isoformat"): value = value.isoformat() if _is_iterable(value): value = list( chain(*[self._generate_permutations(v) for v in value])) else: value = self._generate_permutations(value) if not value: raise IgnoreForIndexing([]) value = list(set(value)) # De-duplicate key = transaction._rpc(using=connection.alias).key( self._generate_kind_name(model, column), self.OPERATOR) entity = Entity(key) entity[self.INDEXED_COLUMN_NAME] = value return [entity]
def to_entity(cls, measure_group, parent=None): """ MeasureGetMeasGroup( attrib=<MeasureGetMeasGroupAttrib.MANUAL_USER_ENTRY: 2>, category=<MeasureGetMeasGroupCategory.REAL: 1>, created=<Arrow [2019-03-03T17:20:12-08:00]>, date=<Arrow [2018-07-19T16:20:00-07:00]>, deviceid=None, grpid=1385164716, measures=( MeasureGetMeasMeasure(type=<MeasureType.WEIGHT: 1>, unit=-2, value=7529),) ) """ attributes = dict() for m in measure_group.measures: if m.value is not None: attributes[m.type.name.lower()] = m.value * (10**m.unit) date = measure_group.date.datetime.replace( tzinfo=datetime.timezone.utc) entity = Entity( ds_util.client.key('Measure', int(date.timestamp()), parent=parent)) entity.update(attributes) entity['date'] = date return entity
def test_entity(self): from google.cloud.datastore.entity import Entity entity = Entity() name, value = self._callFUT(entity) self.assertEqual(name, 'entity_value') self.assertTrue(value is entity)
def to_entity(self) -> Entity: key = db.ds_client.key(self.key_type, self.id) entity = Entity(key=key) return entity
def test_one_week_ago( self, create_client_mock, ds_util_client_get_mock, ds_util_client_put_mock, get_now_mock, fcm_util_best_clients_mock, fcm_util_send_mock, ): now = datetime.datetime(2020, 9, 25, 7, 13, tzinfo=datetime.timezone.utc) get_now_mock.return_value = now measures = [] measures.insert(0, Entity()) measures[0].update({'date': now, 'weight': 30}) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 24, 7, 13, tzinfo=datetime.timezone.utc), 'weight': 35, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 23, 7, 13, tzinfo=datetime.timezone.utc), 'weight': 40, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 22, 7, 13, tzinfo=datetime.timezone.utc), 'weight': 45, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 19, 7, 13, tzinfo=datetime.timezone.utc), 'weight': 50, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 18, 7, 12, tzinfo=datetime.timezone.utc), 'weight': 54, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 18, 7, 13, tzinfo=datetime.timezone.utc), 'weight': 55, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 18, 7, 14, tzinfo=datetime.timezone.utc), 'weight': 56, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 9, 17, 7, 13, tzinfo=datetime.timezone.utc), 'weight': 60, }) measures.insert(0, Entity()) measures[0].update({ 'date': datetime.datetime(2020, 5, 18, 8, 12, tzinfo=datetime.timezone.utc), 'weight': 60, }) worker = self._setup_side_effects( create_client_mock, ds_util_client_get_mock, ds_util_client_put_mock, fcm_util_best_clients_mock, fcm_util_send_mock, measures, ) worker.sync() self._assert_send( fcm_util_send_mock, 'Down 24.0 kg from a week ago', 'You were 54.0 kg on Sep 18, 2020', )
def _pb_attr_value(val): """Given a value, return the protobuf attribute name and proper value. The Protobuf API uses different attribute names based on value types rather than inferring the type. This function simply determines the proper attribute name based on the type of the value provided and returns the attribute name as well as a properly formatted value. Certain value types need to be coerced into a different type (such as a `datetime.datetime` into an integer timestamp, or a `google.cloud.datastore.key.Key` into a Protobuf representation. This function handles that for you. .. note:: Values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. For example: >>> _pb_attr_value(1234) ('integer_value', 1234) >>> _pb_attr_value('my_string') ('string_value', 'my_string') :type val: :class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`, bool, float, integer, bytes, str, unicode, :class:`google.cloud.datastore.entity.Entity`, dict, list, :class:`google.cloud.datastore.helpers.GeoPoint`, NoneType :param val: The value to be scrutinized. :rtype: tuple :returns: A tuple of the attribute name and proper value type. """ if isinstance(val, datetime.datetime): name = 'timestamp' value = _datetime_to_pb_timestamp(val) elif isinstance(val, Key): name, value = 'key', val.to_protobuf() elif isinstance(val, bool): name, value = 'boolean', val elif isinstance(val, float): name, value = 'double', val elif isinstance(val, six.integer_types): name, value = 'integer', val elif isinstance(val, six.text_type): name, value = 'string', val elif isinstance(val, six.binary_type): name, value = 'blob', val elif isinstance(val, Entity): name, value = 'entity', val elif isinstance(val, dict): entity_val = Entity(key=None) entity_val.update(val) name, value = 'entity', entity_val elif isinstance(val, list): name, value = 'array', val elif isinstance(val, GeoPoint): name, value = 'geo_point', val.to_protobuf() elif val is None: name, value = 'null', struct_pb2.NULL_VALUE else: raise ValueError('Unknown protobuf attr type', type(val)) return name + '_value', value
def entity_from_protobuf(pb): """Factory method for creating an entity based on a protobuf. The protobuf should be one returned from the Cloud Datastore Protobuf API. :type pb: :class:`.entity_pb2.Entity` :param pb: The Protobuf representing the entity. :rtype: :class:`google.cloud.datastore.entity.Entity` :returns: The entity derived from the protobuf. """ if not getattr(pb, "_pb", False): # Coerce raw pb type into proto-plus pythonic type. proto_pb = entity_pb2.Entity(pb) pb = pb else: proto_pb = pb pb = pb._pb key = None if "key" in proto_pb: # Message field (Key) key = key_from_protobuf(proto_pb.key) entity_props = {} entity_meanings = {} exclude_from_indexes = [] for prop_name, value_pb in _property_tuples(proto_pb): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value # Check if the property has an associated meaning. is_list = isinstance(value, list) meaning = _get_meaning(value_pb, is_list=is_list) if meaning is not None: entity_meanings[prop_name] = (meaning, value) # Check if ``value_pb`` was excluded from index. Lists need to be # special-cased and we require all ``exclude_from_indexes`` values # in a list agree. if is_list and len(value) > 0: exclude_values = set( value_pb.exclude_from_indexes for value_pb in value_pb.array_value.values ) if len(exclude_values) != 1: raise ValueError( "For an array_value, subvalues must either " "all be indexed or all excluded from " "indexes." ) if exclude_values.pop(): exclude_from_indexes.append(prop_name) else: if value_pb.exclude_from_indexes: exclude_from_indexes.append(prop_name) entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes) entity.update(entity_props) entity._meanings.update(entity_meanings) return entity
def test_unfurl_from_datastore(self): client_mock = mock.Mock() service = Entity(ds_util.client.key('Service', 'strava')) service['credentials'] = {'access_token': 'validrefreshtoken'} client_mock.return_value = service query_mock = mock.Mock() query_mock.add_filter.return_value = None query_mock.fetch.return_value = [activity_entity_for_test(3046711547)] client_mock.query.return_value = query_mock url = 'https://www.strava.com/activities/3123195350' activity_unfurl = _unfurl_activity_from_datastore(client_mock, url) # This strips the API key, so we have to strip it from the expected output, too; for reasonable comparisons. if activity_unfurl.get('blocks', [{}])[0].get('accessory', {}).get('image_url'): activity_unfurl['blocks'][0]['accessory']['image_url'] = 'XYZ_URL' # This strips the API key, so we have to strip it from the actual output, too; for reasonable comparisons. expected = { 'blocks': [ { 'accessory': { 'alt_text': 'route map', 'image_url': 'XYZ_URL', 'type': 'image', }, 'text': { 'text': '<https://www.strava.com/activities/3123195350|*Activity ' '3046711547*> by ' '<https://www.strava.com/athletes/111|ActivityFirstName ' 'ActivityLastName>, August 23, 2017\n' 'Description: 3046711547', 'type': 'mrkdwn', }, 'type': 'section', }, { 'type': 'divider' }, { 'fields': [ { 'text': '*Distance:* 0.01mi', 'type': 'mrkdwn' }, { 'text': '*Elevation:* 984.0ft', 'type': 'mrkdwn' }, ], 'type': 'section', }, ] } self.assertDictEqual(dict(activity_unfurl), expected)
def from_key(cls, key: Key): service = ds_util.client.get(key) if not service: service = Entity(key) Service._set_defaults(service) return service
def to_entity(cls, measure, parent=None): date = datetime.datetime.strptime(measure['dateTime'], '%Y-%m-%d') entity = Entity( ds_util.client.key('Measure', date.strftime('%s'), parent=parent)) entity.update(dict(date=date, weight=float(measure['value']))) return entity
def test_installation_to_entity_to_installation(self): installation = Installation(app_id='app_id', user_id='user_id') entity = Entity() entity.update(installation.__dict__) from_entity = Installation(**entity) self.assertEqual(installation.__dict__, from_entity.__dict__)
def test_entity(self): from google.cloud.datastore.entity import Entity entity = Entity() name, value = self._call_fut(entity) self.assertEqual(name, 'entity_value') self.assertIs(value, entity)
def django_instance_to_entities(connection, fields, raw, instance, check_null=True, model=None): """ Converts a Django Model instance to an App Engine `Entity` Arguments: connection: Djangae appengine connection object fields: A list of fields to populate in the Entity raw: raw flag to pass to get_prepared_db_value instance: The Django model instance to convert check_null: Whether or not we should enforce NULL during conversion (throws an error if None is set on a non-nullable field) model: Model class to use instead of the instance one Returns: entity, [entity, entity, ...] Where the first result in the tuple is the primary entity, and the remaining entities are optionally descendents of the primary entity. This is useful for special indexes (e.g. contains) """ from gcloudc.db.backends.datastore.indexing import special_indexes_for_column, get_indexer, IgnoreForIndexing from gcloudc.db.backends.datastore import POLYMODEL_CLASS_ATTRIBUTE model = model or type(instance) inheritance_root = get_top_concrete_parent(model) db_table = get_datastore_kind(inheritance_root) def value_from_instance(_instance, _field): value = get_prepared_db_value(connection, _instance, _field, raw) # If value is None, but there is a default, and the field is not nullable then we should populate it # Otherwise thing get hairy when you add new fields to models if value is None and _field.has_default() and not _field.null: # We need to pass the default through get_db_prep_save to properly do the conversion # this is how value = _field.get_db_prep_save(_field.get_default(), connection) if check_null and (not _field.null and not _field.primary_key) and value is None: raise IntegrityError( "You can't set %s (a non-nullable field) to None!" % _field.name) is_primary_key = False if _field.primary_key and _field.model == inheritance_root: is_primary_key = True return value, is_primary_key field_values = {} primary_key = None descendents = [] fields_to_unindex = set() for field in fields: value, is_primary_key = value_from_instance(instance, field) if is_primary_key: primary_key = value else: field_values[field.column] = value # Add special indexed fields for index in special_indexes_for_column(model, field.column): indexer = get_indexer(field, index) unindex = False try: values = indexer.prep_value_for_database(value, index, model=model, column=field.column, connection=connection) except IgnoreForIndexing as e: # We mark this value as being wiped out for indexing unindex = True values = e.processed_value if not hasattr(values, "__iter__") or isinstance( values, (bytes, str)): values = [values] # If the indexer returns additional entities (instead of indexing a special column) # then just store those entities if indexer.PREP_VALUE_RETURNS_ENTITIES: descendents.extend(values) else: for i, v in enumerate(values): column = indexer.indexed_column_name( field.column, v, index) if unindex: fields_to_unindex.add(column) continue # If the column already exists in the values, then we convert it to a # list and append the new value if column in field_values: if not isinstance(field_values[column], list): field_values[column] = [field_values[column], v] else: field_values[column].append(v) else: # Otherwise we just set the column to the value field_values[column] = v args = [db_table] if primary_key is not None: args.append(primary_key) key = Key(*args, namespace=connection.namespace, project=connection.gcloud_project) entity = Entity(key) entity.update(field_values) if fields_to_unindex: entity._properties_to_remove = fields_to_unindex classes = get_concrete_db_tables(model) if len(classes) > 1: entity[POLYMODEL_CLASS_ATTRIBUTE] = list(set(classes)) return entity, descendents
def _params_entity(**kwargs): params_entity = Entity(ds_util.client.key('TaskParams')) params_entity.update(**kwargs) return params_entity