def test_find_installation(self, mock_client_get): workspace_key = ds_util.client.key('SlackWorkspace', 'enterprise_id-team_id') store = DatastoreInstallationStore(ds_util.client) installation_entity = Entity( ds_util.client.key('SlackInstaller', 'installer-latest', parent=workspace_key)) installation_entity.update( Installation( app_id='app_id', enterprise_id='enterprise_id', team_id='team_id', user_id='user_id', ).__dict__) mock_client_get.return_value = installation_entity found_installation = store.find_installation( enterprise_id='enterprise_id', team_id='team_id') self.assertIsNotNone(found_installation) self.assertIsInstance(found_installation, Installation) # Make sure we searched for the right key. key = mock_client_get.call_args[0][0] self.assertEqual( key, ds_util.client.key('SlackInstaller', 'installer-latest', parent=workspace_key), )
def save_to_db(self) -> None: new_entry = Entity(self.datastore_key) new_entry.update({ "prior_start_times": self._get_prior_start_times() + [self.started_at] }) datastore_client.put(new_entry)
def test_find_bot(self, mock_client_get): workspace_key = ds_util.client.key('SlackWorkspace', 'enterprise_id-team_id') store = DatastoreInstallationStore(ds_util.client) bot_entity = Entity( ds_util.client.key('SlackBot', 'bot-latest', parent=workspace_key)) bot_entity.update( Bot( app_id='app_id', bot_id='bot_id', bot_token='bot_token', bot_user_id='bot_user_id', installed_at=55.00, ).__dict__) mock_client_get.return_value = bot_entity found_bot = store.find_bot(enterprise_id='enterprise_id', team_id='team_id') self.assertIsNotNone(found_bot) self.assertIsInstance(found_bot, Bot) # Make sure we searched for the right key. key = mock_client_get.call_args[0][0] self.assertEqual( key, ds_util.client.key('SlackBot', 'bot-latest', parent=workspace_key))
def test_mock_datastore_get(self, mock_datastore_service): entity = Entity() entity.update({ 'title': 'Example Title', 'note_text': 'Example text', 'user': '******', 'last_modified_date': '', 'created_date': '' }) entities = list() entities.append(entity) mock_datastore_service.return_value.get.return_value = entities main.datastore = datastore.DatastoreService( ) # instantiate a new DatastoreService so it is replaced with mock r = self.app.post('/note/get', data=json.dumps({'user': '******'}), headers={'Content-Type': 'application/json'}) assert r.status_code == 200 assert r.json == { 'matches': [{ 'title': 'Example Title', 'note_text': 'Example text', 'last_modified_date': '' }] }
def test_installation_bot_to_entity_to_bot(self): installation = Installation(app_id='app_id', user_id='user_id') bot = installation.to_bot() entity = Entity() entity.update(bot.__dict__) from_entity = Bot(**entity) self.assertEqual(bot.__dict__, from_entity.__dict__)
def to_entity(cls, track, parent=None): props = copy.deepcopy(track) entity = Entity( ds_util.client.key('Track', track['url'], parent=parent), exclude_from_indexes=cls.EXCLUDE_FROM_INDEXES, ) entity.update(props) return entity
def to_entity(cls, item): attributes = { 'date': datetime.datetime.utcfromtimestamp(item['timestampGMT'] / 1000), 'fat_ratio': item['bodyFat'], 'weight': round(item['weight'] / 1000, 4), } entity = Entity(ds_util.client.key('Measure')) entity.update(attributes) return entity
def test_measures(self): measure_item = BODY_COMP['dateWeightList'][0] entity = GarminConverters.Measure.to_entity(measure_item) expected_entity = Entity(ds_util.client.key('Measure')) expected_entity.update({ 'date': datetime.datetime(2020, 3, 22, 1, 49), 'fat_ratio': None, 'weight': 59.4206, }) self.assertEqual(entity.items(), expected_entity.items())
def entity_from_protobuf(pb): """Factory method for creating an entity based on a protobuf. The protobuf should be one returned from the Cloud Datastore Protobuf API. :type pb: :class:`.entity_pb2.Entity` :param pb: The Protobuf representing the entity. :rtype: :class:`google.cloud.datastore.entity.Entity` :returns: The entity derived from the protobuf. """ if isinstance(pb, entity_pb2.Entity): pb = pb._pb key = None if pb.HasField("key"): # Message field (Key) key = key_from_protobuf(pb.key) entity_props = {} entity_meanings = {} exclude_from_indexes = [] for prop_name, value_pb in pb.properties.items(): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value # Check if the property has an associated meaning. is_list = isinstance(value, list) meaning = _get_meaning(value_pb, is_list=is_list) if meaning is not None: entity_meanings[prop_name] = (meaning, value) # Check if ``value_pb`` was excluded from index. Lists need to be # special-cased and we require all ``exclude_from_indexes`` values # in a list agree. if is_list and len(value) > 0: exclude_values = set(value_pb.exclude_from_indexes for value_pb in value_pb.array_value.values) if len(exclude_values) != 1: raise ValueError("For an array_value, subvalues must either " "all be indexed or all excluded from " "indexes.") if exclude_values.pop(): exclude_from_indexes.append(prop_name) else: if value_pb.exclude_from_indexes: exclude_from_indexes.append(prop_name) entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes) entity.update(entity_props) entity._meanings.update(entity_meanings) return entity
def test_bot_to_entity_to_bot(self): bot = Bot( app_id='app_id', bot_id='bot_id', bot_token='bot_token', bot_user_id='bot_user_id', installed_at=55.00, ) entity = Entity() entity.update(bot.__dict__) from_entity = Bot(**entity) self.assertEqual(bot.__dict__, from_entity.__dict__)
def to_entity(kind, properties, name=None, parent=None, include_in_indexes=tuple()): if name: key = ds_util.client.key(kind, name, parent=parent) else: key = ds_util.client.key(kind, parent=parent) entity = Entity(key) entity.update(properties) entity.exclude_from_indexes = entity.keys() - include_in_indexes return entity
def to_entity(cls, measure, parent=None): date = datetime.datetime.strptime( measure['date'] + ' ' + measure['time'], '%Y-%m-%d %H:%M:%S') entity = Entity( ds_util.client.key('Measure', date.strftime('%s'), parent=parent)) entity.update( dict( id=measure['logId'], date=date, weight=measure['weight'], fat_ratio=measure['fat'], )) return entity
def entity_from_protobuf(pb): """Factory method for creating an entity based on a protobuf. The protobuf should be one returned from the Cloud Datastore Protobuf API. :type pb: :class:`google.cloud.datastore._generated.entity_pb2.Entity` :param pb: The Protobuf representing the entity. :rtype: :class:`google.cloud.datastore.entity.Entity` :returns: The entity derived from the protobuf. """ key = None if pb.HasField('key'): # Message field (Key) key = key_from_protobuf(pb.key) entity_props = {} entity_meanings = {} exclude_from_indexes = [] for prop_name, value_pb in _property_tuples(pb): value = _get_value_from_value_pb(value_pb) entity_props[prop_name] = value # Check if the property has an associated meaning. is_list = isinstance(value, list) meaning = _get_meaning(value_pb, is_list=is_list) if meaning is not None: entity_meanings[prop_name] = (meaning, value) # Check if ``value_pb`` was excluded from index. Lists need to be # special-cased and we require all ``exclude_from_indexes`` values # in a list agree. if is_list: exclude_values = set(value_pb.exclude_from_indexes for value_pb in value_pb.array_value.values) if len(exclude_values) != 1: raise ValueError('For an array_value, subvalues must either ' 'all be indexed or all excluded from ' 'indexes.') if exclude_values.pop(): exclude_from_indexes.append(prop_name) else: if value_pb.exclude_from_indexes: exclude_from_indexes.append(prop_name) entity = Entity(key=key, exclude_from_indexes=exclude_from_indexes) entity.update(entity_props) entity._meanings.update(entity_meanings) return entity
def test_process_link_shared_called(self, slack_process_link_shared_mock): event_entity = Entity( ds_util.client.key('SubscriptionEvent', 'slack-E232eq2ee') ) event_entity.update(LINK_SHARED_EVENT) self.client.post( '/tasks/event', data=task_util.task_body_for_test(event=event_entity), ) # It doesn't matter what code gets returned, since the method returns # whatever _process_link_shared returns, which is a mock. Only test # that _process_link_shared is called. slack_process_link_shared_mock.assert_called_once()
def test_process_link_shared(self, mock_create_unfurls, mock_slack_client): mock_create_unfurls.return_value = {'http://example.com': 'unfurl'} event_entity = Entity( ds_util.client.key('SubscriptionEvent', 'slack-E232eq2ee') ) event_entity.update(LINK_SHARED_EVENT) r = self.client.post( '/tasks/event', data=task_util.task_body_for_test(event=event_entity), ) mock_slack_client.assert_called_once() responses.assertResponse(self, responses.OK, r)
def track_finished_for_test() -> Entity: track = Entity(ds_util.client.key('Track', 10285651)) track.update( { "info": { "gcAvatar": "https://s3.amazonaws.com/garmin-connect-prod/profile_images/avatar.png", "session": { "end": "2021-04-11T20:28:36.000Z", "position": { "lat": 37.77, "locationName": "San Francisco", "lon": -122.44, }, "publisher": { "connectUserProfileId": 123456, "identifier": "PUBLISHERIDPUBLISHERID", "nickname": "Joe LaPenna", "trackerId": "UA69B7XL", "type": "WEARABLE", }, "publisherState": "ACTIVE", "sessionId": "session-session", "sessionName": "04/11/21", "start": "2021-04-11T15:00:09.000Z", "subscriber": { "identifier": "*****@*****.**", "type": "EMAIL", }, "subscriberState": "ACTIVE", "token": "1618153228", "url": "https://livetrack.garmin.com/session/session-session/token/TOKENTOKEN", "userDisplayName": "Joe LaPenna", "viewable": "2021-04-11T20:28:36.000Z", }, "shortened": False, "unitId": 3996815102, "viewable": False, }, "start": datetime.datetime( 2021, 4, 11, 15, 00, tzinfo=datetime.timezone.utc ), "end": datetime.datetime(2021, 4, 11, 20, 28, tzinfo=datetime.timezone.utc), "status": 4, # FINISHED "url": "https://livetrack.garmin.com/session/session-session/token/TOKENTOKEN", "url_info": {"session": "session-session", "token": "TOKENTOKEN"}, } ) return track
def route_for_test(): route = Entity(ds_util.client.key('Route', 10285651)) route.update( { 'id': 10285651, 'timestamp': datetime.datetime.fromtimestamp(1503517240), 'description': 'East Peak, Reverse Alpine, Gestalt Haus', 'distance': 98353.19420993332, 'elevation_gain': 1829.1980834963906, 'name': 'The ̶N̶i̶g̶h̶t̶ Day is ̶D̶a̶r̶k̶ Mostly Cloudy, Probably and Full of ̶T̶e̶r̶r̶o̶r̶ Pickles', 'athlete': { 'id': 1021133, 'firstname': 'Rayco, A Shopping Cart 🛒', 'lastname': 'of All the Feelings', }, 'map': {'id': 10285651, 'summary_polyline': SUMMARY_POLYLINE}, } ) return route
def _output(df, _kind): ind = 0 all_entities = [] for i in df.T.to_dict().values(): _key = "{}_{}".format(i['user_id'], i['topic_id']) entity = Entity(key=client.key(_kind, _key)) entity.update(i) all_entities.append(entity) for entities in _get_batch(all_entities, n=500): batch = client.batch() batch.begin() for entity in entities: batch.put(entity) batch.commit() ind += 500
def test_process_event_task_no_duplicate( self, ds_util_client_get_mock, ds_util_client_put_mock, ds_util_client_query_mock, ds_util_client_delete_multi_mock, withings_create_client_mock, withings_tasks_weight_trend_mock, ): user = Entity(ds_util.client.key('User', 'someuser')) user['preferences'] = {'daily_weight_notif': True} service = Entity(ds_util.client.key('Service', 'withings', parent=user.key)) service['credentials'] = {'refresh_token': 'validrefreshtoken'} event_entity = Entity( ds_util.client.key('SubscriptionEvent', 'Event', parent=service.key) ) event_entity.update( { 'event_data': { 'startdate': '1', 'enddate': '1', } } ) # There are three gets we need to account for. def get_side_effect(key): if key.name == 'Event': return None elif key.name == 'withings': return service elif key.name == 'someuser': return user ds_util_client_get_mock.side_effect = get_side_effect worker = EventsWorker(service, event_entity) worker.sync() ds_util_client_put_mock.assert_any_call(event_entity) withings_tasks_weight_trend_mock.assert_called_once()
def save(self, installation: Installation): workspace_key = SlackWorkspace.key(installation.enterprise_id, installation.team_id, parent=self.parent) ds_util.client.put(Entity(workspace_key)) entity = Entity(SlackBot.key('bot-latest', parent=workspace_key)) entity.update(installation.to_bot().__dict__) response = self.client.put(entity) self.logger.debug(f"DS put response: {response}") # per workspace entity = Entity( SlackInstaller.key('installer-latest', parent=workspace_key)) entity.update(installation.__dict__) response = self.client.put(entity) self.logger.debug(f"DS put response: {response}") # per workspace per user u_id = installation.user_id or "NONE" entity = Entity( SlackInstaller.key(f'installer-{u_id}-latest', parent=workspace_key)) entity.update(installation.__dict__) response = self.client.put(entity) self.logger.debug(f"DS put response: {response}")
def to_entity(cls, measure_group, parent=None): """ MeasureGetMeasGroup( attrib=<MeasureGetMeasGroupAttrib.MANUAL_USER_ENTRY: 2>, category=<MeasureGetMeasGroupCategory.REAL: 1>, created=<Arrow [2019-03-03T17:20:12-08:00]>, date=<Arrow [2018-07-19T16:20:00-07:00]>, deviceid=None, grpid=1385164716, measures=( MeasureGetMeasMeasure(type=<MeasureType.WEIGHT: 1>, unit=-2, value=7529),) ) """ attributes = dict() for m in measure_group.measures: if m.value is not None: attributes[m.type.name.lower()] = m.value * (10**m.unit) date = measure_group.date.datetime.replace( tzinfo=datetime.timezone.utc) entity = Entity( ds_util.client.key('Measure', int(date.timestamp()), parent=parent)) entity.update(attributes) entity['date'] = date return entity
def _params_entity(**kwargs): params_entity = Entity(ds_util.client.key('TaskParams')) params_entity.update(**kwargs) return params_entity
def _pb_attr_value(val): """Given a value, return the protobuf attribute name and proper value. The Protobuf API uses different attribute names based on value types rather than inferring the type. This function simply determines the proper attribute name based on the type of the value provided and returns the attribute name as well as a properly formatted value. Certain value types need to be coerced into a different type (such as a `datetime.datetime` into an integer timestamp, or a `google.cloud.datastore.key.Key` into a Protobuf representation. This function handles that for you. .. note:: Values which are "text" ('unicode' in Python2, 'str' in Python3) map to 'string_value' in the datastore; values which are "bytes" ('str' in Python2, 'bytes' in Python3) map to 'blob_value'. For example: >>> _pb_attr_value(1234) ('integer_value', 1234) >>> _pb_attr_value('my_string') ('string_value', 'my_string') :type val: :class:`datetime.datetime`, :class:`google.cloud.datastore.key.Key`, bool, float, integer, bytes, str, unicode, :class:`google.cloud.datastore.entity.Entity`, dict, list, :class:`google.cloud.datastore.helpers.GeoPoint`, NoneType :param val: The value to be scrutinized. :rtype: tuple :returns: A tuple of the attribute name and proper value type. """ if isinstance(val, datetime.datetime): name = 'timestamp' value = _datetime_to_pb_timestamp(val) elif isinstance(val, Key): name, value = 'key', val.to_protobuf() elif isinstance(val, bool): name, value = 'boolean', val elif isinstance(val, float): name, value = 'double', val elif isinstance(val, six.integer_types): name, value = 'integer', val elif isinstance(val, six.text_type): name, value = 'string', val elif isinstance(val, six.binary_type): name, value = 'blob', val elif isinstance(val, Entity): name, value = 'entity', val elif isinstance(val, dict): entity_val = Entity(key=None) entity_val.update(val) name, value = 'entity', entity_val elif isinstance(val, list): name, value = 'array', val elif isinstance(val, GeoPoint): name, value = 'geo_point', val.to_protobuf() elif val is None: name, value = 'null', struct_pb2.NULL_VALUE else: raise ValueError('Unknown protobuf attr type', type(val)) return name + '_value', value
def test_installation_to_entity_to_installation(self): installation = Installation(app_id='app_id', user_id='user_id') entity = Entity() entity.update(installation.__dict__) from_entity = Installation(**entity) self.assertEqual(installation.__dict__, from_entity.__dict__)
def django_instance_to_entities(connection, fields, raw, instance, check_null=True, model=None): """ Converts a Django Model instance to an App Engine `Entity` Arguments: connection: Djangae appengine connection object fields: A list of fields to populate in the Entity raw: raw flag to pass to get_prepared_db_value instance: The Django model instance to convert check_null: Whether or not we should enforce NULL during conversion (throws an error if None is set on a non-nullable field) model: Model class to use instead of the instance one Returns: entity, [entity, entity, ...] Where the first result in the tuple is the primary entity, and the remaining entities are optionally descendents of the primary entity. This is useful for special indexes (e.g. contains) """ from gcloudc.db.backends.datastore.indexing import special_indexes_for_column, get_indexer, IgnoreForIndexing from gcloudc.db.backends.datastore import POLYMODEL_CLASS_ATTRIBUTE model = model or type(instance) inheritance_root = get_top_concrete_parent(model) db_table = get_datastore_kind(inheritance_root) def value_from_instance(_instance, _field): value = get_prepared_db_value(connection, _instance, _field, raw) # If value is None, but there is a default, and the field is not nullable then we should populate it # Otherwise thing get hairy when you add new fields to models if value is None and _field.has_default() and not _field.null: # We need to pass the default through get_db_prep_save to properly do the conversion # this is how value = _field.get_db_prep_save(_field.get_default(), connection) if check_null and (not _field.null and not _field.primary_key) and value is None: raise IntegrityError( "You can't set %s (a non-nullable field) to None!" % _field.name) is_primary_key = False if _field.primary_key and _field.model == inheritance_root: is_primary_key = True return value, is_primary_key field_values = {} primary_key = None descendents = [] fields_to_unindex = set() for field in fields: value, is_primary_key = value_from_instance(instance, field) if is_primary_key: primary_key = value else: field_values[field.column] = value # Add special indexed fields for index in special_indexes_for_column(model, field.column): indexer = get_indexer(field, index) unindex = False try: values = indexer.prep_value_for_database(value, index, model=model, column=field.column, connection=connection) except IgnoreForIndexing as e: # We mark this value as being wiped out for indexing unindex = True values = e.processed_value if not hasattr(values, "__iter__") or isinstance( values, (bytes, str)): values = [values] # If the indexer returns additional entities (instead of indexing a special column) # then just store those entities if indexer.PREP_VALUE_RETURNS_ENTITIES: descendents.extend(values) else: for i, v in enumerate(values): column = indexer.indexed_column_name( field.column, v, index) if unindex: fields_to_unindex.add(column) continue # If the column already exists in the values, then we convert it to a # list and append the new value if column in field_values: if not isinstance(field_values[column], list): field_values[column] = [field_values[column], v] else: field_values[column].append(v) else: # Otherwise we just set the column to the value field_values[column] = v args = [db_table] if primary_key is not None: args.append(primary_key) key = Key(*args, namespace=connection.namespace, project=connection.gcloud_project) entity = Entity(key) entity.update(field_values) if fields_to_unindex: entity._properties_to_remove = fields_to_unindex classes = get_concrete_db_tables(model) if len(classes) > 1: entity[POLYMODEL_CLASS_ATTRIBUTE] = list(set(classes)) return entity, descendents
def to_entity(cls, measure, parent=None): date = datetime.datetime.strptime(measure['dateTime'], '%Y-%m-%d') entity = Entity( ds_util.client.key('Measure', date.strftime('%s'), parent=parent)) entity.update(dict(date=date, weight=float(measure['value']))) return entity