def test_dump_log(self):
     log_dir = 'log_dir/'
     ndb.put_multi(self.fixtures[:2])
     datastore_to_gcs.dump_log(TestModel, self.bucket, log_dir)
     log_files = gcs.list_objects(self.bucket, log_dir)
     self.assertEqual(len(log_files), 1)
     isoformat_regex = '\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d\.json'
     self.assertRegexpMatches(log_files[0], isoformat_regex)
     log_files = gcs.list_objects(self.bucket, log_dir)
     pprint(log_files)
     log_items = gcs.download_object(self.bucket, log_dir + log_files[0])
     pprint(log_items)
     self.assertEqual(len(log_items), 2)
     # Add more items, expect no overlap
     sleep(1)
     self.fixtures[2].put()
     self.fixtures[3].put()
     datastore_to_gcs.dump_log(TestModel, self.bucket, log_dir)
     log_files = gcs.list_objects(self.bucket, log_dir)
     self.assertEqual(len(log_files), 2)
     log_files = gcs.list_objects(self.bucket, log_dir)
     log_items = list(
         itertools.chain(*[
             gcs.download_object(self.bucket, log_dir + lf)
             for lf in log_files
         ]))
     pprint(log_items)
     self.assertEqual(len(log_items), 4)
 def test_nothing_new_to_update(self):
     self.initial_dump()
     initial_result = gcs.download_object(self.bucket, self.object_name)
     self.assertIn(self.object_name, gcs.list_objects(self.bucket))
     # Don't change anything but run update -- should leave file as is
     self.run_datastore_to_gcs_update()
     updated_result = gcs.download_object(self.bucket, self.object_name)
     self.assertTrue(initial_result == updated_result,
                     msg='{}\ndoes not match{}'.format(
                         pformat(updated_result), pformat(initial_result)))
 def test_initial_dump(self):
     self.initial_dump()
     # Assert that the transferred object exists
     self.assertIn(self.object_name, gcs.list_objects(self.bucket))
     transferred_items = gcs.download_object(self.bucket, self.object_name)
     # Assert that it has the right number of items
     self.assertEqual(len(transferred_items), 2)
     # Assert that field names are right
     self.assertTrue('email' in transferred_items[0])
     self.assertTrue('id' in transferred_items[0])
     self.assertTrue('last_modified' in transferred_items[0])
Example #4
0
 def test_download_object(self):
     filename = self.upload_json()
     downloaded_object = gcs.download_object(self.bucket, filename)
     # Check that it's a DataDict
     print "DOWNLOADED"
     pprint(downloaded_object)
     self.assertIsInstance(downloaded_object[0], util.DataDict)
     print "\nFIXTURE"
     data_dict_fixture = [
         util.DataDict(fixture) for fixture in self.dict_fixtures
     ]
     pprint(data_dict_fixture)
     self.assertTrue(downloaded_object == data_dict_fixture)
Example #5
0
 def test_download_object_filter_fields(self):
     filename = self.upload_json()
     fields = ['id', 'nested.inner']
     downloaded_object = gcs.download_object(self.bucket, filename, fields)
     self.assertEqual(len(downloaded_object), len(self.dict_fixtures))
     self.assertIsInstance(downloaded_object[0]['id'], unicode)
     self.assertIsInstance(downloaded_object[0]['nested']['inner'], int)
     first_fixture_filtered = util.DataDict({
         'id':
         self.dict_fixtures[0]['id'],
         'nested': {
             'inner': self.dict_fixtures[0]['nested']['inner']
         }
     })
     self.assertDictEqual(downloaded_object[0], first_fixture_filtered)
Example #6
0
 def test_download_object_model(self):
     filename = self.upload_model_json()
     downloaded_object = gcs.download_object(self.bucket,
                                             filename,
                                             object_class=FixtureModel)
     # Check that it's a FixtureModel
     self.assertIsInstance(downloaded_object[0], FixtureModel)
     # Fudge the dates
     for d in downloaded_object:
         d.dt = dateutil.parser.parse(d.dt).date()
     print "DOWNLOADED"
     pprint([d.__dict__ for d in downloaded_object])
     print "\nFIXTURE"
     pprint([f.__dict__ for f in self.model_fixtures])
     self.assertTrue(
         all([
             d.__dict__ == f.__dict__ for (d, f) in itertools.izip(
                 downloaded_object, self.model_fixtures)
         ]))
 def test_modify_existing(self):
     self.initial_dump()
     new_email = '*****@*****.**'
     orig_results = TestModel.query(
         TestModel.email == self.fixtures[6].email).fetch()
     self.assertEqual(len(orig_results), 1)
     orig = orig_results[0]
     print 'ORIG:'
     print orig
     orig.email = new_email
     orig.put()
     self.run_datastore_to_gcs_update()
     transferred_items = gcs.download_object(self.bucket, self.object_name)
     new_results = [
         e for e in transferred_items if e['id'] == orig.key.id()
     ]
     self.assertEqual(1, len(new_results))
     modified = new_results[0]
     print 'MODIFIED:'
     print modified
     self.assertEqual(modified['email'], new_email)
 def test_add_new_items(self):
     self.initial_dump()  # Adds 2 items
     self.add_new_items()  # Adds 3 items
     transferred_items = gcs.download_object(self.bucket, self.object_name)
     pprint(transferred_items)
     self.assertEqual(len(transferred_items), 5)