Ejemplo n.º 1
0
    def testMultiple(self):
        """Tests that multiple allocations from the same sequence do
    not overlap.
    """
        allocs = [IdAllocator('type'), IdAllocator('type')]
        num_ids = 3000

        def _OnAllocated(id_lists):
            assert len(id_lists) == 2
            id_set1 = set(id_lists[0])
            id_set2 = set(id_lists[1])
            assert len(id_set1) == 3000
            assert len(id_set2) == 3000
            assert id_set1.isdisjoint(id_set2)
            self.stop()

        with util.ArrayBarrier(_OnAllocated) as b:
            with util.ArrayBarrier(b.Callback()) as b1:
                [
                    allocs[0].NextId(self._client, b1.Callback())
                    for i in xrange(num_ids)
                ]
            with util.ArrayBarrier(b.Callback()) as b2:
                [
                    allocs[1].NextId(self._client, b2.Callback())
                    for i in xrange(num_ids)
                ]
Ejemplo n.º 2
0
 def _OnQueryPosts(posts):
     with util.ArrayBarrier(partial(_OnQueryMetadata, posts)) as b:
         for post in posts:
             with util.ArrayBarrier(b.Callback()) as metadata_b:
                 post_id = Post.ConstructPostId(post.episode_id,
                                                post.photo_id)
                 Photo.Query(client,
                             hash_key=post.photo_id,
                             col_names=None,
                             callback=metadata_b.Callback())
                 UserPost.Query(client,
                                hash_key=user_id,
                                range_key=post_id,
                                col_names=None,
                                callback=metadata_b.Callback(),
                                must_exist=False)
Ejemplo n.º 3
0
    def testCompletedAfterException(self):
        """Raise exception and then make the barrier callback."""
        val = [0]

        def _Exception(type_, value_, traceback):
            logging.info("Exception")
            val[0] += 1
            self.io_loop.add_callback(self.stop)

        def _Completed():
            logging.info("Completed")
            val[0] += 1
            self.io_loop.add_callback(self.stop)

        def _RaiseException(completed_cb):
            self.io_loop.add_callback(partial(completed_cb, 1))
            raise KeyError('key')

        with util.ArrayBarrier(_Completed, on_exception=_Exception) as b:
            self.io_loop.add_callback(partial(_RaiseException, b.Callback()))
            self.io_loop.add_callback(partial(_RaiseException, b.Callback()))

        self.wait()
        self.assertEqual(val[0], 1,
                         'Both _Completed and _Exception were called.')
Ejemplo n.º 4
0
    def testIndexing(self):
        """Tests indexing of multiple objects with overlapping field values.
    Creates 100 users, then queries for specific items.
    """
        given_names = ['Spencer', 'Peter', 'Brian', 'Chris']
        family_names = ['Kimball', 'Mattis', 'McGinnis', 'Schoenbohm']
        emails = [
            '*****@*****.**', '*****@*****.**',
            '*****@*****.**', '*****@*****.**',
            '*****@*****.**', '*****@*****.**',
            '*****@*****.**', '*****@*****.**',
            '*****@*****.**'
        ]

        num_users = 100

        def _QueryAndVerify(users, barrier_cb, col, value):
            def _Verify(q_users):
                logging.debug('querying for %s=%s yielded %d matches' %
                              (col, value, len(q_users)))
                for u in q_users:
                    # Exclude users created by base class.
                    if u.user_id not in [
                            self._user.user_id, self._user2.user_id
                    ]:
                        self.assertEqual(getattr(users[u.user_id], col), value)
                barrier_cb()

            User.IndexQuery(self._client, ('user.%s={v}' % col, {
                'v': value
            }),
                            col_names=None,
                            callback=_Verify)

        def _OnCreateUsers(user_list):
            users = dict([(u.user_id, u) for u in user_list])
            with util.Barrier(self.stop) as b:
                [
                    _QueryAndVerify(users, b.Callback(), 'given_name', value)
                    for value in given_names
                ]
                [
                    _QueryAndVerify(users, b.Callback(), 'family_name', value)
                    for value in family_names
                ]
                [
                    _QueryAndVerify(users, b.Callback(), 'email', value)
                    for value in emails
                ]

        with util.ArrayBarrier(_OnCreateUsers) as b:
            for i in xrange(num_users):
                kwargs = {
                    'user_id': i + 10,
                    'given_name': random.choice(given_names),
                    'family_name': random.choice(family_names),
                    'email': random.choice(emails),
                }
                user = User.CreateFromKeywords(**kwargs)
                user.Update(self._client, partial(b.Callback(), user))
Ejemplo n.º 5
0
  def ListClientLogs(cls, user_id, start_timestamp, end_timestamp, filter, callback):
    """Queries S3 based on specified "user_id", and the specified
    array of ISO date strings. The results are filtered according to
    the regular expression "filter". Returns an array of {filename,
    URL} objects for each date in "iso_dates".
    """
    obj_store = ObjectStore.GetInstance(ObjectStore.USER_LOG)

    def _OnListDates(date_listings):
      """Assemble {filename, url} objects for each date listing."""
      filter_re = re.compile(filter or '.*')
      callback([{'filename': key, 'url': obj_store.GenerateUrl(key)}
                for logs in date_listings for key in logs if filter_re.search(key)])

    with util.ArrayBarrier(_OnListDates) as b:
      iso_dates = set()
      t = start_timestamp
      while t < end_timestamp:
        iso_dates.add(ClientLog._IsoDate(t))
        t += constants.SECONDS_PER_DAY
      iso_dates.add(ClientLog._IsoDate(end_timestamp))
      iso_dates = sorted(iso_dates)

      for iso_date in iso_dates:
        ClientLog._ListAllKeys(obj_store, ClientLog._LogKeyPrefix(user_id, iso_date), b.Callback())
Ejemplo n.º 6
0
    def Alert(self, scenario, message):
        """Send an SMS Alert if one has not been sent too recently."""
        logging.info('Alert was called from scenario %s with message %s',
                     scenario.name, message)

        if self._alert_hook is not None:
            self._alert_hook(scenario, message)
            return

        now = time.time()
        if now < self._last_alert + MINIMUM_ALERT_SPACING:
            # An alert has been sent inside of the minimum spacing threshold for alerts.
            logging.info(
                'Alert not sent because another alert had been sent within the past %d seconds: %s',
                MINIMUM_ALERT_SPACING, message)
            return

        def _OnSendError(t, v, tb):
            logging.error('Error sending alert SMS message.',
                          exc_info=(t, v, tb))

        def _OnSendSuccess(messages):
            self._last_alert = now
            for m in messages:
                logging.info(m)

        with util.ArrayBarrier(_OnSendSuccess, _OnSendError) as b:
            truncated_message = message[:MAX_TEXT_SIZE]
            for number in ALERT_PHONE_NUMBERS:
                SMSManager.Instance().SendSMS(b.Callback(),
                                              number,
                                              truncated_message,
                                              description=truncated_message)
Ejemplo n.º 7
0
 def Evaluate(self, client, callback, start_key, consistent_read, param_dict):
   """Recursively evaluates the query tree via a depth- first
   traversal. Returns the result set, defined by the data delivered
   via the IndexTermNodes and then operated on by the OpNodes.
   """
   with util.ArrayBarrier(partial(self._SetOperation, callback)) as b:
     self._left.Evaluate(client, b.Callback(), start_key, consistent_read, param_dict)
     self._right.Evaluate(client, b.Callback(), start_key, consistent_read, param_dict)
Ejemplo n.º 8
0
    def Transform(self, client, photo, callback):
        from viewfinder.backend.storage.s3_object_store import S3ObjectStore

        def _SetPhotoMD5Values(md5_values):
            tn_md5, med_md5, full_md5, orig_md5 = md5_values

            assert photo.tn_md5 == tn_md5 or photo.tn_md5 is None, photo
            photo.tn_md5 = tn_md5

            assert photo.med_md5 == med_md5 or photo.med_md5 is None, photo
            photo.med_md5 = med_md5

            assert photo.full_md5 == full_md5 or photo.full_md5 is None, photo
            photo.full_md5 = full_md5

            assert photo.orig_md5 == orig_md5 or photo.orig_md5 is None, photo
            photo.orig_md5 = orig_md5

            photo.placemark = photo.placemark

            self._LogUpdate(photo)

            if Version._mutate_items:
                photo.Update(client, partial(callback, photo))
            else:
                callback(photo)

        def _OnFetchHead(head_callback, response):
            # Etag is the hex string encoded MD5 hash of the photo.
            if response.code != 404:
                etag = response.headers['Etag'][1:-1]
            else:
                etag = None
            head_callback(etag)

        def _SendHeadRequest(photo_id, suffix, head_callback):
            object_store = S3ObjectStore('photos-viewfinder-co')
            url = object_store.GenerateUrl(photo_id + suffix, method='HEAD')
            http_client = httpclient.AsyncHTTPClient()
            http_client.fetch(url,
                              method='HEAD',
                              callback=partial(_OnFetchHead, head_callback))

        client_data = photo.client_data
        if client_data is None or 'tn_md5' not in client_data:
            if not Version._allow_s3_queries:
                callback(photo)
                return
            # Get MD5 values by issuing HEAD against S3.
            with util.ArrayBarrier(_SetPhotoMD5Values) as b:
                _SendHeadRequest(photo.photo_id, '.t', b.Callback())
                _SendHeadRequest(photo.photo_id, '.m', b.Callback())
                _SendHeadRequest(photo.photo_id, '.f', b.Callback())
                _SendHeadRequest(photo.photo_id, '.o', b.Callback())
        else:
            # Get MD5 values by extracting from client_data.
            _SetPhotoMD5Values((client_data['tn_md5'], client_data['med_md5'],
                                photo.full_md5, photo.orig_md5))
Ejemplo n.º 9
0
    def testEmptyBarrier(self):
        val = [False]

        def _Callback(exp_results, results):
            self.stop()
            self.assertEqual(results, exp_results)
            val[0] = True

        with util.ArrayBarrier(partial(_Callback, [])):
            pass
        self.wait()
        self.assertTrue(val[0])
Ejemplo n.º 10
0
        def _QueryAndVerify(cls, barrier_cb, query_expr, start_key, end_key,
                            limit):
            def _FindIndex(list, db_key):
                for i, item in enumerate(list):
                    if item.GetKey() == db_key:
                        return i
                return -1

            def _Verify(results):
                all_items, some_items, some_item_keys = results

                # Ensure that IndexQuery and IndexQueryKeys return consistent results.
                assert len(some_items) == len(some_item_keys)
                assert [u.GetKey() for u in some_items] == some_item_keys

                # Ensure that right subset was returned.
                start_index = _FindIndex(
                    all_items, start_key) + 1 if start_key is not None else 0
                end_index = _FindIndex(
                    all_items,
                    end_key) if end_key is not None else len(all_items)
                if limit is not None and start_index + limit < end_index:
                    end_index = start_index + limit

                assert len(some_items) == end_index - start_index, (
                    len(some_items), start_index, end_index)
                for expected_item, actual_item in zip(
                        all_items[start_index:end_index], some_items):
                    expected_dict = expected_item._asdict()
                    actual_dict = actual_item._asdict()
                    self.assertEqual(expected_dict, actual_dict)

                barrier_cb()

            with util.ArrayBarrier(_Verify) as b:
                cls.IndexQuery(self._client,
                               query_expr,
                               None,
                               b.Callback(),
                               limit=None)
                cls.IndexQuery(self._client,
                               query_expr,
                               None,
                               b.Callback(),
                               start_index_key=start_key,
                               end_index_key=end_key,
                               limit=limit)
                cls.IndexQueryKeys(self._client,
                                   query_expr,
                                   b.Callback(),
                                   start_index_key=start_key,
                                   end_index_key=end_key,
                                   limit=limit)
Ejemplo n.º 11
0
    def testCreate(self):
        alloc = IdAllocator('type', 13)
        num_ids = 3000

        def _OnAllocated(ids):
            id_set = set(ids)
            assert len(id_set) == num_ids
            self.stop()

        with util.ArrayBarrier(_OnAllocated) as b:
            [
                alloc.NextId(self._client, callback=b.Callback())
                for i in xrange(num_ids)
            ]
Ejemplo n.º 12
0
    def testCompact(self):
        val = [False]

        def _Callback(exp_results, results):
            self.stop()
            self.assertEqual(exp_results, results)
            val[0] = True

        with util.ArrayBarrier(partial(_Callback, [2]), compact=True) as b:
            b.Callback()(None)
            b.Callback()(2)
            b.Callback()(None)
        self.wait()
        self.assertTrue(val[0])
Ejemplo n.º 13
0
    def testResultsBarrier(self):
        val = [False]

        def _Callback(exp_results, results):
            self.stop()
            self.assertEqual(results, exp_results)
            val[0] = True

        with util.ArrayBarrier(partial(_Callback, [1, 2, 3])) as b:
            b.Callback()(1)
            b.Callback()(2)
            b.Callback()(3)
        self.wait()
        self.assertTrue(val[0])
Ejemplo n.º 14
0
    def testArrayBarrier(self):
        val = [False]

        def _Callback(exp_results, results):
            self.stop()
            self.assertEqual(exp_results, results)
            val[0] = True

        with util.ArrayBarrier(partial(_Callback,
                                       ['cb1', 'cb2', 'cb3', 'cb4'])) as b:
            b.Callback()('cb1')
            b.Callback()('cb2')
            b.Callback()('cb3')
            b.Callback()('cb4')
        self.wait()
        self.assertTrue(val[0])
Ejemplo n.º 15
0
    def Transform(self, client, viewpoint, callback):
        from activity import Activity
        from viewpoint import Viewpoint

        def _OnQuery(followers_activities):
            (follower_ids, last_key), activities = followers_activities

            activities = [
                activity for activity in activities if activity.name == 'share'
            ]

            if len(activities) > 0:
                # Find the share activity with the lowest timestamp.
                oldest_activity = None
                for activity in activities:
                    if oldest_activity is None or activity.timestamp < oldest_activity.timestamp:
                        oldest_activity = activity
                    activity.name = 'share_existing'

                # Override oldest activity as share_new and add followers.
                oldest_activity.name = 'share_new'
                act_dict = json.loads(activities[-1].json)
                act_dict['follower_ids'] = [
                    f_id for f_id in follower_ids if f_id != viewpoint.user_id
                ]
                oldest_activity.json = json.dumps(act_dict)

                # Update all activities.
                with util.Barrier(partial(callback, viewpoint)) as b:
                    for activity in activities:
                        self._LogUpdate(activity)

                        if Version._mutate_items:
                            activity.Update(client, b.Callback())
                        else:
                            b.Callback()()
            else:
                callback(viewpoint)

        with util.ArrayBarrier(_OnQuery) as b:
            Viewpoint.QueryFollowerIds(client, viewpoint.viewpoint_id,
                                       b.Callback())
            Activity.RangeQuery(client, viewpoint.viewpoint_id, None, None,
                                None, b.Callback())
Ejemplo n.º 16
0
  def testNotificationRaces(self):
    """Concurrently create many notifications to force races."""
    op = Operation(1, 'o123')
    with util.ArrayBarrier(self.stop) as b:
      for i in xrange(10):
        Notification.CreateForUser(self._client,
                                   op,
                                   1,
                                   'test',
                                   callback=b.Callback(),
                                   invalidate={'invalid': True},
                                   activity_id='a123',
                                   viewpoint_id='v%d' % i,
                                   inc_badge=True)
    notifications = self.wait()

    for i, notification in enumerate(notifications):
      self.assertEqual(notification.user_id, 1)
      self.assertEqual(notification.name, 'test')
      self.assertEqual(notification.activity_id, 'a123')
      self.assertEqual(notification.viewpoint_id, 'v%d' % i)
      self.assertEqual(notification.badge, i + 1)
Ejemplo n.º 17
0
    def _OnListTables(result):
      """First callback with results of a list-tables command.
      Creates a results barrier which will collect all table schemas
      and return 'callback' on successful verification of all tables.
      """
      # Create and/or verifies all tables in schema.
      with util.ArrayBarrier(callback) as b:
        read_capacity = 0
        write_capacity = 0
        for table in self._tables.values():
          read_capacity += table.read_units
          write_capacity += table.write_units
          if table.name not in result.tables:
            if verify_only:
              b.Callback()((table.name, None))
            else:
              logging.debug('creating table %s...' % table.name)
              client.CreateTable(table=table.name, hash_key_schema=table.hash_key_schema,
                                 range_key_schema=table.range_key_schema,
                                 read_units=table.read_units, write_units=table.write_units,
                                 callback=partial(_OnCreateTable, table, b.Callback()))
          else:
            _VerifyTable(table, b.Callback())

      # Warn of vestigial tables.
      for table in result.tables:
        if table.lower() not in self._tables:
          logging.warning('vestigial table %s exists in DB, not in schema' % table)
          if options.options.delete_vestigial and options.options.localdb:
            logging.warning('deleting vestigial table %s')
            client.DeleteTable(table=table, callback=util.NoCallback)

      # Cost metric.
      def _CostPerMonth(units, read=True):
        return 30 * 24 * 0.01 * (units / (50 if read else 10))

      logging.debug('total tables: %d' % len(self._tables))
      logging.debug('total read capacity: %d, $%.2f/month' % (read_capacity, _CostPerMonth(read_capacity, True)))
      logging.debug('total write capacity: %d, $%.2f/month' % (write_capacity, _CostPerMonth(write_capacity, False)))
Ejemplo n.º 18
0
    def Transform(self, client, viewpoint, callback):
        from activity import Activity
        from followed import Followed
        from viewpoint import Viewpoint

        def _OnQuery(activities_followers):
            activities, (follower_ids, last_key) = activities_followers

            with util.Barrier(partial(callback, viewpoint)) as b:
                old_timestamp = viewpoint.last_updated

                if len(activities) > 0:
                    new_timestamp = max(a.timestamp for a in activities)
                else:
                    # Viewpoint has no activities.
                    new_timestamp = 0

                viewpoint.last_updated = new_timestamp
                self._LogUpdate(viewpoint)

                for follower_id in follower_ids:
                    logging.info(
                        'Followed (user_id=%s, viewpoint_id=%s): %s => %s' %
                        (follower_id, viewpoint.viewpoint_id,
                         Followed._TruncateToDay(old_timestamp),
                         Followed._TruncateToDay(new_timestamp)))

                    if Version._mutate_items:
                        Followed.UpdateDateUpdated(client, follower_id,
                                                   viewpoint.viewpoint_id,
                                                   old_timestamp,
                                                   new_timestamp, b.Callback())

        with util.ArrayBarrier(_OnQuery) as b:
            Activity.RangeQuery(client, viewpoint.viewpoint_id, None, None,
                                None, b.Callback())
            Viewpoint.QueryFollowerIds(client, viewpoint.viewpoint_id,
                                       b.Callback())
Ejemplo n.º 19
0
    def disabled_t_estPlacemarkQueries(self):
        """Tests placemark queries."""
        def _QueryAndVerify(episode_ids, barrier_cb, search, matches):
            def _Verify(keys):
                ids = [key.hash_key for key in keys]
                self.assertEqual(len(ids), len(matches))
                [self.assertTrue(episode_ids[m] in ids) for m in matches]
                barrier_cb()

            Episode.IndexQueryKeys(self._client, ('episode.placemark={s}', {
                's': search
            }),
                                   callback=_Verify)

        def _OnCreate(locations, episodes):
            with util.Barrier(self.stop) as b:
                episode_ids = dict([(v.title, v.episode_id) for v in episodes])
                _QueryAndVerify(episode_ids, b.Callback(), 'Broadway',
                                ['kimball ph'])
                _QueryAndVerify(episode_ids, b.Callback(), '682 Broadway',
                                ['kimball ph'])
                _QueryAndVerify(episode_ids, b.Callback(), 'Broadway 682', [])
                _QueryAndVerify(episode_ids, b.Callback(),
                                'new york, ny, united states', [
                                    'kimball ph', 'bond st sushi',
                                    'viewfinder', 'soho house', 'google'
                                ])
                _QueryAndVerify(episode_ids, b.Callback(), 'new york, ny', [
                    'kimball ph', 'bond st sushi', 'viewfinder', 'soho house',
                    'google'
                ])
                _QueryAndVerify(
                    episode_ids, b.Callback(), 'NY, United States', [
                        'kimball ph', 'bond st sushi', 'viewfinder',
                        'soho house', 'google', 'kimball east', 'surf lodge'
                    ])
                _QueryAndVerify(episode_ids, b.Callback(), 'United States', [
                    'kimball ph', 'bond st sushi', 'viewfinder', 'soho house',
                    'google', 'kimball east', 'surf lodge'
                ])
                _QueryAndVerify(episode_ids, b.Callback(), 'Bahamas',
                                ['atlantis'])
                _QueryAndVerify(episode_ids, b.Callback(), 'Dominican',
                                ['casa kimball'])
                _QueryAndVerify(episode_ids, b.Callback(),
                                'Dominican Republic', ['casa kimball'])
                _QueryAndVerify(episode_ids, b.Callback(), 'Cabrera',
                                ['casa kimball'])
                _QueryAndVerify(episode_ids, b.Callback(), 'DR',
                                ['casa kimball'])

        locations = {
            'kimball ph':
            Placemark('US', 'United States', 'NY', 'New York', 'NoHo',
                      'Broadway', '682'),
            'bond st sushi':
            Placemark('US', 'United States', 'NY', 'New York', 'NoHo',
                      'Bond St', '6'),
            'viewfinder':
            Placemark('US', 'United States', 'NY', 'New York', 'SoHo',
                      'Grand St', '154'),
            'soho house':
            Placemark('US', 'United States', 'NY', 'New York',
                      'Meatpacking District', '9th Avenue', '29-35'),
            'google':
            Placemark('US', 'United States', 'NY', 'New York', 'Chelsea',
                      '8th Avenue', '111'),
            'kimball east':
            Placemark('US', 'United States', 'NY', 'East Hampton',
                      'Northwest Harbor', 'Milina', '35'),
            'surf lodge':
            Placemark('US', 'United States', 'NY', 'Montauk', '',
                      'Edgemere St', '183'),
            'casa kimball':
            Placemark('DR', 'Dominican Republic', 'Maria Trinidad Sanchez',
                      'Cabrera', 'Orchid Bay Estates', '', '5-6'),
            'atlantis':
            Placemark('BS', 'Bahamas', '', 'Paradise Island', '', '', '')
        }
        with util.ArrayBarrier(partial(_OnCreate, locations)) as b:
            device_episode_id = 0
            for place, placemark in locations.items():
                device_episode_id += 1
                timestamp = time.time()
                episode_id = Episode.ConstructEpisodeId(
                    timestamp, 1, device_episode_id)
                episode = Episode.CreateFromKeywords(
                    timestamp=timestamp,
                    episode_id=episode_id,
                    user_id=self._user.user_id,
                    viewpoint_id=self._user.private_vp_id,
                    publish_timestamp=timestamp,
                    title=place,
                    placemark=placemark)
                episode.Update(self._client, b.Callback())
Ejemplo n.º 20
0
    def disabled_t_estLocationQueries(self):
        """Tests location queries."""
        def _QueryAndVerify(episode_ids, barrier_cb, loc_search, matches):
            def _Verify(keys):
                ids = [key.hash_key for key in keys]
                self.assertEqual(len(ids), len(matches))
                [self.assertTrue(episode_ids[m] in ids) for m in matches]
                barrier_cb()

            Episode.IndexQueryKeys(self._client, 'episode.location="%f,%f,%f"' % \
                                   (loc_search[0], loc_search[1], loc_search[2]), callback=_Verify)

        def _OnCreate(locations, episodes):
            with util.Barrier(self.stop) as b:
                episode_ids = dict([(v.title, v.episode_id) for v in episodes])
                # Exact search.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(40.727657, -73.994583, 30),
                                ['kimball ph'])
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(41.044048, -71.950622, 100),
                                ['surf lodge'])
                # A super-small search area, centered in middle of Great Jones Alley.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(40.727267, -73.994443, 10), [])
                # Widen the search area to 50m, centered in middle of Great Jones Alley.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(40.727267, -73.994443, 50),
                                ['kimball ph', 'bond st sushi'])
                # Union square with a 2km radius.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(40.736462, -73.990517, 2000), [
                                    'kimball ph', 'bond st sushi',
                                    'viewfinder', 'soho house', 'google'
                                ])
                # The Dominican Republic.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(19.041349, -70.427856, 75000),
                                ['casa kimball'])
                # The Caribbean.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(22.593726, -76.662598, 800000),
                                ['casa kimball', 'atlantis'])
                # Long Island.
                _QueryAndVerify(episode_ids, b.Callback(),
                                Location(40.989228, -72.144470, 40000),
                                ['kimball east', 'surf lodge'])

        locations = {
            'kimball ph': Location(40.727657, -73.994583, 50.0),
            'bond st sushi': Location(40.726901, -73.994358, 50.0),
            'viewfinder': Location(40.720169, -73.998756, 200.0),
            'soho house': Location(40.740616, -74.005880, 200.0),
            'google': Location(40.740974, -74.002115, 500.0),
            'kimball east': Location(41.034184, -72.210603, 50.0),
            'surf lodge': Location(41.044048, -71.950622, 100.0),
            'casa kimball': Location(19.636848, -69.896602, 100.0),
            'atlantis': Location(25.086104, -77.323065, 1000.0)
        }
        with util.ArrayBarrier(partial(_OnCreate, locations)) as b:
            device_episode_id = 0
            for place, location in locations.items():
                device_episode_id += 1
                timestamp = time.time()
                episode_id = Episode.ConstructEpisodeId(
                    timestamp, 1, device_episode_id)
                episode = Episode.CreateFromKeywords(
                    timestamp=timestamp,
                    episode_id=episode_id,
                    user_id=self._user.user_id,
                    viewpoint_id=self._user.private_vp_id,
                    publish_timestamp=timestamp,
                    title=place,
                    location=location)
                episode.Update(self._client, b.Callback())