def _assertUploadResult(self, result, filename, size):
    self.assertEquals(True, result['successful'])
    # check the file name is the same..
    self.assertEquals(filename, result['name'])
    # check file size is the same..
    self.assertEquals(size, result['size'])
    # validate the uuid..
    self.assertTrue(len(result['uuid']) > 0)

    file_info = gcs.stat(gae_gcs.get_gcs_filename(result['uuid']))
    # check filename is in metadata as well as FileUploadResult
    self.assertEquals(file_info.metadata['x-goog-meta-filename'], filename)
    # check filesize is the same when retrieving information on the file
    self.assertEquals(file_info.st_size, size)
 def test_blobstore_sanity_check(self):
   test_uuid = str(uuid.uuid4())
   bucket_filename = gae_gcs.get_gcs_filename(test_uuid)
   gcs_file = gcs.open(bucket_filename,
                       'w',
                       content_type='application/octet-stream',
                       options = {
                           'x-goog-meta-filename': 'test_filename'
                       })
   self.assertNotEquals(None, gcs_file)
   gcs_file.write('test blob data..')
   gcs_file.close()
   stats = gcs.stat(bucket_filename)
   self.assertEquals(
       'test_filename', stats.metadata['x-goog-meta-filename']
   )
def test_upload(uploads):
  entities = []
  try:
    for upload in uploads:
      entity = TestModel(
        test_uuid=upload.uuid)
      entities.append(entity)
      file_info = upload.file_info
      logging.info('upload.file_info: %s', file_info)
    ndb.put_multi(entities)
  except:
    # rollback the operation and delete the blobs,
    # so they are not orphaned..
    for upload in uploads:
      gcs.delete(gae_gcs.get_gcs_filename(upload.uuid))
    raise Exception('Saving file upload info to datastore failed..')
  return json.dumps(uploads.to_dict())