예제 #1
0
 def test_upload_blobstore_retry_500(self):
   content = 'blob_content'
   s = hashlib.sha1(content).hexdigest()
   path = 'http://example.com:80/'
   data = [('token', 'foo bar')]
   content_type, body = isolateserver_archive.encode_multipart_formdata(
       data[:], [('content', s, 'blob_content')])
   self._requests = [
     (
       path + 'gen_url?foo#bar',
       {'data': data[:]},
       StringIO.StringIO('an_url/'),
     ),
     (
       'an_url/',
       {'data': body, 'content_type': content_type, 'retry_50x': False},
       # Let's say an HTTP 500 was returned.
       None,
     ),
     # In that case, a new url must be generated since the last one may have
     # been "consumed".
     (
       path + 'gen_url?foo#bar',
       {'data': data[:]},
       StringIO.StringIO('an_url/'),
     ),
     (
       'an_url/',
       {'data': body, 'content_type': content_type, 'retry_50x': False},
       StringIO.StringIO('ok42'),
     ),
   ]
   result = isolateserver_archive.upload_hash_content_to_blobstore(
       path + 'gen_url?foo#bar', data[:], s, content)
   self.assertEqual('ok42', result)
예제 #2
0
 def test_upload_blobstore_simple(self):
     content = 'blob_content'
     s = hashlib.sha1(content).hexdigest()
     path = 'http://example.com:80/'
     data = [('token', 'foo bar')]
     content_type, body = isolateserver_archive.encode_multipart_formdata(
         data[:], [('content', s, 'blob_content')])
     self._requests = [
         (
             path + 'gen_url?foo#bar',
             {
                 'data': data[:]
             },
             StringIO.StringIO('an_url/'),
         ),
         (
             'an_url/',
             {
                 'data': body,
                 'content_type': content_type,
                 'retry_50x': False
             },
             StringIO.StringIO('ok42'),
         ),
     ]
     result = isolateserver_archive.upload_hash_content_to_blobstore(
         path + 'gen_url?foo#bar', data[:], s, content)
     self.assertEqual('ok42', result)
예제 #3
0
  def test_large(self):
    content = ''
    compressed = ''
    while (
        len(compressed) <= isolateserver_archive.MIN_SIZE_FOR_DIRECT_BLOBSTORE):
      # The goal here is to generate a file, once compressed, is at least
      # MIN_SIZE_FOR_DIRECT_BLOBSTORE.
      content += ''.join(chr(random.randint(0, 255)) for _ in xrange(20*1024))
      compressed = zlib.compress(
          content, isolateserver_archive.compression_level('foo.txt'))

    s = hashlib.sha1(content).hexdigest()
    infiles = {
      'foo.txt': {
        's': len(content),
        'h': s,
      },
    }
    path = 'http://random/'
    sha1encoded = binascii.unhexlify(s)
    content_type, body = isolateserver_archive.encode_multipart_formdata(
                [('token', 'foo bar')], [('content', s, compressed)])

    self._requests = [
      (path + 'content/get_token', {}, StringIO.StringIO('foo bar')),
      (
        path + 'content/contains/default-gzip?token=foo%20bar',
        {'data': sha1encoded, 'content_type': 'application/octet-stream'},
        StringIO.StringIO('\0'),
      ),
      (
        path + 'content/generate_blobstore_url/default-gzip/%s' % s,
        {'data': [('token', 'foo bar')]},
        StringIO.StringIO('an_url/'),
      ),
      (
        'an_url/',
        {'data': body, 'content_type': content_type, 'retry_50x': False},
        StringIO.StringIO('ok'),
      ),
    ]

    old_read_and_compress = isolateserver_archive.read_and_compress
    try:
      isolateserver_archive.read_and_compress = lambda x, y: compressed
      result = isolateserver_archive.upload_sha1_tree(
            base_url=path,
            indir=os.getcwd(),
            infiles=infiles,
            namespace='default-gzip')
    finally:
      isolateserver_archive.read_and_compress = old_read_and_compress

    self.assertEqual(0, result)
예제 #4
0
  def test_large(self):
    content = ''
    compressed = ''
    while (
        len(compressed) <= isolateserver_archive.MIN_SIZE_FOR_DIRECT_BLOBSTORE):
      # The goal here is to generate a file, once compressed, is at least
      # MIN_SIZE_FOR_DIRECT_BLOBSTORE.
      content += ''.join(chr(random.randint(0, 255)) for _ in xrange(20*1024))
      compressed = zlib.compress(
          content, isolateserver_archive.compression_level('foo.txt'))

    s = hashlib.sha1(content).hexdigest()
    infiles = {
      'foo.txt': {
        's': len(content),
        'h': s,
      },
    }
    path = 'http://random/'
    sha1encoded = binascii.unhexlify(s)
    content_type, body = isolateserver_archive.encode_multipart_formdata(
                [('token', 'foo bar')], [('content', s, compressed)])

    self._requests = [
      (path + 'content/get_token', {}, StringIO.StringIO('foo bar')),
      (
        path + 'content/contains/default-gzip?token=foo%20bar',
        {'data': sha1encoded, 'content_type': 'application/octet-stream'},
        StringIO.StringIO('\0'),
      ),
      (
        path + 'content/generate_blobstore_url/default-gzip/%s' % s,
        {'data': [('token', 'foo bar')]},
        StringIO.StringIO('an_url/'),
      ),
      (
        'an_url/',
        {'data': body, 'content_type': content_type, 'retry_50x': False},
        StringIO.StringIO('ok'),
      ),
    ]

    self.mock(isolateserver_archive, 'read_and_compress',
              lambda x, y: compressed)
    result = isolateserver_archive.upload_sha1_tree(
          base_url=path,
          indir=os.getcwd(),
          infiles=infiles,
          namespace='default-gzip')

    self.assertEqual(0, result)
 def test_upload_blobstore_simple(self):
   content = 'blob_content'
   s = hashlib.sha1(content).hexdigest()
   path = 'http://example.com:80/'
   data = [('token', 'foo bar')]
   content_type, body = isolateserver_archive.encode_multipart_formdata(
       data[:], [('content', s, 'blob_content')])
   self._requests = [
     (
       path + 'gen_url?foo#bar',
       {'data': data[:]},
       StringIO.StringIO('an_url/'),
     ),
     (
       'an_url/',
       {'data': body, 'content_type': content_type, 'retry_50x': False},
       StringIO.StringIO('ok42'),
     ),
   ]
   result = isolateserver_archive.upload_hash_content_to_blobstore(
       path + 'gen_url?foo#bar', data[:], s, content)
   self.assertEqual('ok42', result)