Exemplo n.º 1
0
    def testGetFileCopy(self):
        """Just create a download cache, and GetFileCopy from it."""

        file_a = os.path.join(self.tempdir, 'foo')
        file_b = os.path.join(self.tempdir, 'bar')

        cache = download_cache.DownloadCache(self.cache_dir)

        # Fetch non-existent files.
        cache.GetFileCopy(self.uri_a, file_a)
        cache.GetFileCopy(self.uri_a, file_b)

        with open(file_a, 'r') as f:
            contents_a = f.read()

        with open(file_b, 'r') as f:
            contents_b = f.read()

        self.assertEqual(contents_a, contents_b)

        # Fetch and overwrite existent files.
        cache.GetFileCopy(self.uri_b, file_a)
        cache.GetFileCopy(self.uri_b, file_b)

        with open(file_a, 'r') as f:
            contents_a = f.read()

        with open(file_b, 'r') as f:
            contents_b = f.read()

        self.assertEqual(contents_a, contents_b)
Exemplo n.º 2
0
    def testPurgeLogic(self):
        cache = download_cache.DownloadCache(self.cache_dir)

        cache.GetFileObject(self.uri_a).close()
        cache.GetFileObject(self.uri_b).close()

        # The default cache logic should leave these files untouched, since
        # they are less than a day old.
        cache.Purge()
        self._validateCacheContents(cache, (self.hash_a, self.hash_b))

        # Purge until the cache is empty.
        cache.Purge(cache_size=0)
        self._validateCacheContents(cache, ())

        # Refetch two files.
        cache.GetFileObject(self.uri_a).close()
        cache.GetFileObject(self.uri_b).close()

        # Change the timestamp so uri_a hasn't been used for a very long time.
        os.utime(os.path.join(self.cache_dir, 'cache', self.hash_a), (2, 2))

        # Purge files that haven't been used recently.
        cache.Purge(max_age=1000)
        self._validateCacheContents(cache, (self.hash_b, ))
Exemplo n.º 3
0
    def testThreadedGetFileMultiplePurge(self):
        """Do fetches and purges in a multiprocess environment.

       Ensure all processes complete, and return the right local file.
    """
        pool = multiprocessing.Pool(processes=30)

        requests = [(self.uri_a, self.cache_dir), (self.uri_b, self.cache_dir),
                    (None, self.cache_dir)] * 10

        # Create a tuple of the three args we want to pass to inProcess test,
        # use map semantics as a convenient way to run in parallel.
        results = pool.map(_inProcessGetFile, requests)

        # Fetch it ourselves and verify the results.
        cache = download_cache.DownloadCache(self.cache_dir)

        with cache.GetFileObject(self.uri_a) as f:
            contents_a = f.read()

        with cache.GetFileObject(self.uri_b) as f:
            contents_b = f.read()

        self._verifyFileContents(cache, self.uri_a)
        self._verifyFileContents(cache, self.uri_b)

        # Ensure that every process gave back the expected result.
        expected = [contents_a, contents_b, None] * 10
        self.assertEqual(results, expected)
Exemplo n.º 4
0
    def testFetchFunc(self):
        """Test getting files with a custome fetch function."""

        call_count = [0]

        def dummyFetchFunction(uri, cache_file):
            """Write the uri into the file to have verifiable content"""
            call_count[0] += 1
            osutils.WriteFile(cache_file, uri)

        cache = download_cache.DownloadCache(self.tempdir)
        self.assertEqual(call_count[0], 0)
        cache.GetFileObject(self.dummy_uri, dummyFetchFunction)
        self.assertEqual(call_count[0], 1)
        with cache.GetFileObject(self.dummy_uri, dummyFetchFunction) as f:
            self.assertEqual(f.read(), self.dummy_uri)
        self.assertEqual(call_count[0], 1)

        cache.GetFileObject(self.dummy_uri2, dummyFetchFunction)
        self.assertEqual(call_count[0], 2)
        with cache.GetFileObject(self.dummy_uri2, dummyFetchFunction) as f:
            self.assertEqual(f.read(), self.dummy_uri2)
        self.assertEqual(call_count[0], 2)

        with cache.GetFileObject(self.dummy_uri, dummyFetchFunction) as f:
            self.assertEqual(f.read(), self.dummy_uri)
        with cache.GetFileObject(self.dummy_uri2, dummyFetchFunction) as f:
            self.assertEqual(f.read(), self.dummy_uri2)
        self.assertEqual(call_count[0], 2)
Exemplo n.º 5
0
    def testGetFileObject(self):
        """Just create a download cache, and GetFile on it."""

        cache = download_cache.DownloadCache(self.cache_dir)

        # Fetch a file
        with cache.GetFileObject(self.uri_a) as f:
            self.assertIsInstance(f, file)
        self._verifyFileContents(cache, self.uri_a)
        self._validateCacheContents(cache, (self.hash_a, ))

        # Fetch a different file
        with cache.GetFileObject(self.uri_b) as f:
            self.assertIsInstance(f, file)
        self._verifyFileContents(cache, self.uri_b)
        self._validateCacheContents(cache, (self.hash_a, self.hash_b))

        # Fetch the first file a second time.
        cache.GetFileObject(self.uri_a).close()
        self._verifyFileContents(cache, self.uri_a)

        # There should be only 2 files in the cache.
        self._validateCacheContents(cache, (self.hash_a, self.hash_b))

        # Fetch a larger file
        cache.GetFileObject(self.uri_large).close()
        self._verifyFileContents(cache, self.uri_large)

        # There should be 3 files in the cache.
        self._validateCacheContents(
            cache, (self.hash_a, self.hash_b, self.hash_large))
Exemplo n.º 6
0
  def testGetFileObject(self):
    """Just create a download cache, and GetFile on it."""

    cache = download_cache.DownloadCache(self.cache_dir)

    # Fetch a file
    with cache.GetFileObject(self.uri_a) as f:
      # Use the .read() member as a proxy for is a file-like object.
      self.assertTrue(hasattr(f, 'read'))
    self._verifyFileContents(cache, self.uri_a)
    self._validateCacheContents(cache, (self.hash_a,))

    # Fetch a different file
    with cache.GetFileObject(self.uri_b) as f:
      # Use the .read() member as a proxy for is a file-like object.
      self.assertTrue(hasattr(f, 'read'))
    self._verifyFileContents(cache, self.uri_b)
    self._validateCacheContents(cache, (self.hash_a, self.hash_b))

    # Fetch the first file a second time.
    cache.GetFileObject(self.uri_a).close()
    self._verifyFileContents(cache, self.uri_a)

    # There should be only 2 files in the cache.
    self._validateCacheContents(cache, (self.hash_a, self.hash_b))

    # Fetch a larger file
    cache.GetFileObject(self.uri_large).close()
    self._verifyFileContents(cache, self.uri_large)

    # There should be 3 files in the cache.
    self._validateCacheContents(cache,
                                (self.hash_a, self.hash_b, self.hash_large))
Exemplo n.º 7
0
    def __init__(self,
                 payload,
                 work_dir,
                 sign=False,
                 verify=False,
                 private_key=None):
        """Init for PaygenPayload.

    Args:
      payload: An instance of gspaths.Payload describing the payload to
               generate.
      work_dir: A working directory inside the chroot to put temporary files.
          This can NOT be shared among different runs of PaygenPayload otherwise
          there would be file collisions. Among the things that may go into this
          direcotry are intermediate image files, extracted partitions,
          different logs and metadata files, payload and metadata hashes along
          with their signatures, the payload itself, postinstall config file,
          intermediate files that is generated by the signer, etc.
      sign: Boolean saying if the payload should be signed (normally, you do).
      verify: whether the payload should be verified after being generated
      private_key: If passed, the payload will be signed with that private key.
                   If also verify is True, the public key is extracted from the
                   private key and is used for verification.
    """
        self.payload = payload
        self.work_dir = work_dir
        self._verify = verify
        self._private_key = private_key
        self._public_key = None

        self.src_image_file = os.path.join(work_dir, 'src_image.bin')
        self.tgt_image_file = os.path.join(work_dir, 'tgt_image.bin')

        self.partition_names = None
        self.tgt_partitions = None
        self.src_partitions = None

        self._appid = ''

        self.payload_file = os.path.join(work_dir, 'delta.bin')
        self.log_file = os.path.join(work_dir, 'delta.log')
        self.description_file = os.path.join(work_dir, 'delta.json')
        self.metadata_size = 0
        self.metadata_hash_file = os.path.join(work_dir, 'metadata_hash')
        self.payload_hash_file = os.path.join(work_dir, 'payload_hash')

        self._postinst_config_file = os.path.join(work_dir, 'postinst_config')

        self.signer = None
        if sign:
            self._SetupSigner(payload.build)

        # This cache dir will be shared with other processes, but we need our own
        # instance of the cache manager to properly coordinate.
        self._cache = download_cache.DownloadCache(
            self._FindCacheDir(), cache_size=PaygenPayload.CACHE_SIZE)
Exemplo n.º 8
0
  def testPickleUnpickle(self):
    cache = download_cache.DownloadCache(self.tempdir)
    pickle_path = os.path.join(self.tempdir, 'cache.pickle')

    # Do pickle dump.
    with open(pickle_path, 'wb') as pickle_fh:
      pickle.dump(cache, pickle_fh)

    # Load pickle file.
    with open(pickle_path, 'rb') as pickle_fh:
      pickle.load(pickle_fh)
Exemplo n.º 9
0
def _inProcessFetchIntoCache(uri_tempdir):
    """In a sub-process, call DownloadCache._UriToCacheFile."""
    try:
        uri, tempdir = uri_tempdir
        process_cache = download_cache.DownloadCache(tempdir)
        file_name = process_cache._UriToCacheFile(uri)
        with process_cache._PurgeLock(shared=True, blocking=True):
            return process_cache._FetchIntoCache(uri, file_name)
    except Exception:
        traceback.print_exc()
        raise
Exemplo n.º 10
0
    def testSetupCacheDirty(self):
        """Test _SetupCache with a dirty directory."""
        # Create some unexpected directories.
        for make_dir in ['foo/bar/stuff', 'bar']:
            os.makedirs(os.path.join(self.cache_dir, make_dir))

        # Touch some unexpected files.
        for touch_file in ['bogus', 'foo/bogus']:
            file(os.path.join(self.cache_dir, touch_file), 'w').close()

        # Create a cache, and see
        cache = download_cache.DownloadCache(self.cache_dir)
        self._validateCacheContents(cache, ())
Exemplo n.º 11
0
    def testGetFileInTempFile(self):
        """Just create a download cache, and GetFileInTempFile on it."""

        cache = download_cache.DownloadCache(self.cache_dir)

        # Fetch a file
        file_t = cache.GetFileInTempFile(self.uri_a)

        with cache.GetFileObject(self.uri_a) as f:
            contents_a = f.read()

        with file_t as f:
            contents_t = f.read()

        self.assertEqual(contents_t, contents_a)
        self.assertEqual(contents_t, gslib.Cat(self.uri_a))
Exemplo n.º 12
0
def _inProcessGetFile(uri_tempdir):
    """In a sub-process, call DownloadCache.GetFile."""

    try:
        uri, tempdir = uri_tempdir
        process_cache = download_cache.DownloadCache(tempdir, cache_size=0)

        # If there is a URI, fetch it, else wipe.
        if uri:
            with process_cache.GetFileObject(uri) as f:
                return f.read()
        else:
            process_cache.Purge()
            return None
    except Exception:
        traceback.print_exc()
        raise
Exemplo n.º 13
0
    def testContextMgr(self):
        """Make sure we behave properly with 'with'."""

        # Create an instance, and use it in a with
        precache = download_cache.DownloadCache(self.cache_dir, cache_size=0)

        with precache as cache:
            # Assert the instance didn't change.
            self.assertIs(precache, cache)

            # Download a file.
            cache.GetFileObject(self.uri_a).close()

            self._validateCacheContents(cache, (self.hash_a, ))

        # After the with exited, which should have purged everything.
        self._validateCacheContents(cache, ())
Exemplo n.º 14
0
def _GenerateSinglePayload(payload, work_dir, sign, dry_run):
    """Generate a single payload.

  This is intended to be safe to call inside a new process.

  Args:
    payload: gspath.Payload object defining the payloads to generate.
    work_dir: Working directory for payload generation.
    sign: boolean to decide if payload should be signed.
    dry_run: boolean saying if this is a dry run.
  """
    # This cache dir will be shared with other processes, but we need our
    # own instance of the cache manager to properly coordinate.
    cache_dir = paygen_payload_lib.FindCacheDir()
    with download_cache.DownloadCache(
            cache_dir, cache_size=PaygenBuild.CACHE_SIZE) as cache:
        # Actually generate the payload.
        paygen_payload_lib.CreateAndUploadPayload(payload,
                                                  cache,
                                                  work_dir=work_dir,
                                                  sign=sign,
                                                  dry_run=dry_run)
Exemplo n.º 15
0
    def testThreadedGetFile(self):
        """Spin off multiple processes and call GetFile.

       Ensure all processes complete, and return the same local file.
    """
        pool = multiprocessing.Pool(processes=10)

        # Create a tuple of the three args we want to pass to inProcess test,
        # use map semantics as a convenient way to run in parallel.
        results = pool.map(_inProcessGetFile,
                           [(self.uri_a, self.cache_dir)] * 20)

        # Fetch it ourselves and verify the results.
        cache = download_cache.DownloadCache(self.cache_dir)
        self._verifyFileContents(cache, self.uri_a)

        with cache.GetFileObject(self.uri_a) as f:
            contents_a = f.read()

        # Ensure that every process gave back the expected result.
        expected = [contents_a] * 20
        self.assertEqual(results, expected)
Exemplo n.º 16
0
    def testCacheFileNames(self):
        """Make sure that some of the files we create have the expected names."""
        cache = download_cache.DownloadCache(self.cache_dir)

        expected_cache_lock = os.path.join(self.cache_dir, 'cache.lock')
        expected_cache = os.path.join(
            self.cache_dir, 'cache/3ba505fc7774455169af6f50b7964dff')

        expected_lock = os.path.join(self.cache_dir,
                                     'lock/3ba505fc7774455169af6f50b7964dff')

        # Make sure a cache content file is named as expected.
        self.assertEqual(cache._UriToCacheFile('gs://bucket/of/awesome'),
                         expected_cache)

        # Make sure the lock file for a cache content file is named as expected.
        with cache._CacheFileLock(expected_cache) as file_lock:
            self.assertEqual(file_lock.path, expected_lock)

        with cache._PurgeLock() as purge_lock:
            self.assertEqual(purge_lock.path, expected_cache_lock)

        with cache._CacheFileLock(expected_cache) as cache_file_lock:
            self.assertEqual(cache_file_lock.path, expected_lock)
Exemplo n.º 17
0
 def setUpClass(cls):
     cls.cache_dir = tempfile.mkdtemp(prefix='crostools-unittest-cache')
     cls.cache = download_cache.DownloadCache(cls.cache_dir)
Exemplo n.º 18
0
 def testSetupCacheClean(self):
     """Test _SetupCache with a clean directory."""
     # Create a cache, and see if it has expected contents.
     cache = download_cache.DownloadCache(self.cache_dir)
     self._validateCacheContents(cache, ())