def test_split(self):
        self._execute('archive',
                      'split.isolate',
                      ['-V', 'DEPTH', '.', '-V', 'PRODUCT_DIR', 'files1'],
                      False,
                      cwd=os.path.join(ROOT_DIR, 'tests', 'isolate'))
        # Reimplement _expected_hash_tree():
        tree = self._gen_expected_tree(None)
        isolated_base = self.isolated[:-len('.isolated')]
        isolated_hashes = [
            unicode(
                isolateserver.hash_file(isolated_base + '.0.isolated', ALGO)),
            unicode(
                isolateserver.hash_file(isolated_base + '.1.isolated', ALGO)),
        ]
        tree.extend(isolated_hashes)
        self.assertEqual(sorted(tree), map(unicode, self._result_tree()))

        # Reimplement _expected_isolated():
        files = self._gen_files(None, None, False)
        expected = {
            u'algo': u'sha-1',
            u'command': [u'python', u'split.py'],
            u'files': {
                u'split.py': files['split.py']
            },
            u'includes': isolated_hashes,
            u'os': unicode(isolate.get_flavor()),
            u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
            u'version': u'1.0',
        }
  def test_split(self):
    self._execute(
        'hashtable',
        'split.isolate',
        [
          '--path-variable', 'DEPTH', '.',
          '--path-variable', 'PRODUCT_DIR', 'files1',
        ],
        False,
        cwd=os.path.join(ROOT_DIR, 'tests', 'isolate'))
    # Reimplement _expected_hash_tree():
    tree = self._gen_expected_tree(None)
    isolated_base = self.isolated[:-len('.isolated')]
    isolated_hashes = [
      unicode(isolateserver.hash_file(isolated_base + '.0.isolated', ALGO)),
      unicode(isolateserver.hash_file(isolated_base + '.1.isolated', ALGO)),
    ]
    tree.extend(isolated_hashes)
    self.assertEqual(sorted(tree), map(unicode, self._result_tree()))

    # Reimplement _expected_isolated():
    files = self._gen_files(None, None, False)
    expected = {
      u'algo': u'sha-1',
      u'command': [u'python', u'split.py'],
      u'files': {u'split.py': files['split.py']},
      u'includes': isolated_hashes,
      u'os': unicode(isolate.get_flavor()),
      u'relative_cwd': unicode(RELATIVE_CWD[self.case()]),
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self.assertEqual(expected, json.load(open(self.isolated, 'r')))

    key = os.path.join(u'test', 'data', 'foo.txt')
    expected = {
      u'algo': u'sha-1',
      u'files': {key: files[key]},
      u'os': unicode(isolate.get_flavor()),
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self.assertEqual(
        expected, json.load(open(isolated_base + '.0.isolated', 'r')))

    key = os.path.join(u'files1', 'subdir', '42.txt')
    expected = {
      u'algo': u'sha-1',
      u'files': {key: files[key]},
      u'os': unicode(isolate.get_flavor()),
      u'version': unicode(isolate.isolateserver.ISOLATED_FILE_VERSION),
    }
    self.assertEqual(
        expected, json.load(open(isolated_base + '.1.isolated', 'r')))
 def _download_given_files(self, files):
   """Tries to download the files from the server."""
   args = ['download', '--target', self.rootdir]
   file_hashes = [isolateserver.hash_file(f, hashlib.sha1) for f in files]
   for f in file_hashes:
     args.extend(['--file', f, f])
   self._run(args)
   # Assert the files are present.
   actual = [
       isolateserver.hash_file(os.path.join(self.rootdir, f), hashlib.sha1)
       for f in os.listdir(self.rootdir)
   ]
   self.assertEqual(sorted(file_hashes), sorted(actual))
 def _download_given_files(self, files):
     """Tries to download the files from the server."""
     args = ['download', '--target', self.rootdir]
     file_hashes = [isolateserver.hash_file(f, hashlib.sha1) for f in files]
     for f in file_hashes:
         args.extend(['--file', f, f])
     self._run(args)
     # Assert the files are present.
     actual = [
         isolateserver.hash_file(os.path.join(self.rootdir, f),
                                 hashlib.sha1)
         for f in os.listdir(self.rootdir)
     ]
     self.assertEqual(sorted(file_hashes), sorted(actual))
 def _gen_expected_tree(self, empty_file):
     expected = [
         unicode(v['h'])
         for v in self._gen_files(False, empty_file, False).itervalues()
     ]
     expected.append(unicode(isolateserver.hash_file(self.isolated, ALGO)))
     return expected
 def test_all_items_invalid(self):
   out = self._test_all_items_invalid('archive')
   expected = (
       '%s  isolate_smoke_test.isolated\n' %
       isolateserver.hash_file(self.isolated, ALGO))
   self.assertEqual(expected, out)
   self._expected_hash_tree(None)
def archive(isolate_server, namespace, isolated, algo, verbose):
  """Archives a .isolated and all the dependencies on the CAC."""
  logging.info('archive(%s, %s, %s)', isolate_server, namespace, isolated)
  tempdir = None
  if file_path.is_url(isolate_server):
    command = 'archive'
    flag = '--isolate-server'
  else:
    command = 'hashtable'
    flag = '--outdir'

  print('Archiving: %s' % isolated)
  try:
    cmd = [
      sys.executable,
      os.path.join(ROOT_DIR, 'isolate.py'),
      command,
      flag, isolate_server,
      '--namespace', namespace,
      '--isolated', isolated,
    ]
    cmd.extend(['--verbose'] * verbose)
    logging.info(' '.join(cmd))
    if subprocess.call(cmd, verbose):
      return
    return isolateserver.hash_file(isolated, algo)
  finally:
    if tempdir:
      shutil.rmtree(tempdir)
Example #8
0
def archive(isolate_server, namespace, isolated, algo, verbose):
    """Archives a .isolated and all the dependencies on the CAC."""
    logging.info("archive(%s, %s, %s)", isolate_server, namespace, isolated)
    tempdir = None
    if file_path.is_url(isolate_server):
        command = "archive"
        flag = "--isolate-server"
    else:
        command = "hashtable"
        flag = "--outdir"

    print ("Archiving: %s" % isolated)
    try:
        cmd = [
            sys.executable,
            os.path.join(ROOT_DIR, "isolate.py"),
            command,
            flag,
            isolate_server,
            "--namespace",
            namespace,
            "--isolated",
            isolated,
        ]
        cmd.extend(["--verbose"] * verbose)
        logging.info(" ".join(cmd))
        if subprocess.call(cmd, verbose):
            return
        return isolateserver.hash_file(isolated, algo)
    finally:
        if tempdir:
            shutil.rmtree(tempdir)
Example #9
0
def archive(isolate_server, namespace, isolated, algo, verbose):
    """Archives a .isolated and all the dependencies on the CAC."""
    logging.info('archive(%s, %s, %s)', isolate_server, namespace, isolated)
    tempdir = None
    if file_path.is_url(isolate_server):
        command = 'archive'
        flag = '--isolate-server'
    else:
        command = 'hashtable'
        flag = '--outdir'

    print('Archiving: %s' % isolated)
    try:
        cmd = [
            sys.executable,
            os.path.join(ROOT_DIR, 'isolate.py'),
            command,
            flag,
            isolate_server,
            '--namespace',
            namespace,
            '--isolated',
            isolated,
        ]
        cmd.extend(['--verbose'] * verbose)
        logging.info(' '.join(cmd))
        if subprocess.call(cmd, verbose):
            return
        return isolateserver.hash_file(isolated, algo)
    finally:
        if tempdir:
            shutil.rmtree(tempdir)
Example #10
0
class IsolateTempdir(unittest.TestCase):
    def setUp(self):
        super(IsolateTempdir, self).setUp()
        self.tempdir = tempfile.mkdtemp(prefix='isolate_smoke_')
        self.isolated = os.path.join(self.tempdir,
                                     'isolate_smoke_test.isolated')

    def tearDown(self):
        try:
            logging.debug(self.tempdir)
            shutil.rmtree(self.tempdir)
        finally:
            super(IsolateTempdir, self).tearDown()

    def _gen_files(self, read_only, empty_file, with_time):
        """Returns a dict of files like calling isolate.process_input() on each
    file.

    Arguments:
    - read_only: Mark all the 'm' modes without the writeable bit.
    - empty_file: Add a specific empty file (size 0).
    - with_time: Include 't' timestamps. For saved state .state files.
    """
        root_dir = ROOT_DIR
        if RELATIVE_CWD[self.case()] == '.':
            root_dir = os.path.join(root_dir, 'tests', 'isolate')

        files = dict((unicode(f), {}) for f in DEPENDENCIES[self.case()])

        for relfile, v in files.iteritems():
            filepath = os.path.join(root_dir, relfile)
            filestats = os.lstat(filepath)
            is_link = stat.S_ISLNK(filestats.st_mode)
            if not is_link:
                v[u's'] = filestats.st_size
                if sys.platform != 'win32':
                    v[u'm'] = _fix_file_mode(relfile, read_only)
            if with_time:
                # Used to skip recalculating the hash. Use the most recent update
                # time.
                v[u't'] = int(round(filestats.st_mtime))
            if is_link:
                v[u'l'] = os.readlink(filepath)  # pylint: disable=E1101
            else:
                # Upgrade the value to unicode so diffing the structure in case of
                # test failure is easier, since the basestring type must match,
                # str!=unicode.
                v[u'h'] = unicode(isolateserver.hash_file(filepath, ALGO))

        if empty_file:
            item = files[empty_file]
            item['h'] = unicode(HASH_NULL)
            if sys.platform != 'win32':
                item['m'] = 288
            item['s'] = 0
            if with_time:
                item['T'] = True
                item.pop('t', None)
        return files
Example #11
0
 def _gen_expected_tree(self, empty_file):
   expected = [
     unicode(v['h'])
     for v in self._gen_files(False, empty_file, False).itervalues()
   ]
   expected.append(
       unicode(isolateserver.hash_file(self.isolated, ALGO)))
   return expected
Example #12
0
    def test_delete_quite_corrupted_cache_entry(self):
        # Test that an entry with an invalid file size properly gets removed and
        # fetched again. This test case also check for file modes.
        isolated_file = os.path.join(self.data_dir, 'file_with_size.isolated')
        isolated_hash = isolateserver.hash_file(isolated_file, ALGO)
        file1_hash = self._store('file1.txt')
        # Note that <tempdir>/table/<file1_hash> has 640 mode.

        # Run the test once to generate the cache.
        out, err, returncode = self._run(
            self._generate_args_with_isolated(isolated_file))
        if VERBOSE:
            print out
            print err
        self.assertEqual(0, returncode)
        expected = {
            '.': (040775, 040755, 040777),
            'state.json': (0100664, 0100644, 0100666),
            # The reason for 0100666 on Windows is that the file node had to be
            # modified to delete the hardlinked node. The read only bit is reset on
            # load.
            file1_hash: (0100400, 0100400, 0100666),
            isolated_hash: (0100400, 0100400, 0100444),
        }
        self.assertTreeModes(self.cache, expected)

        # Modify one of the files in the cache to be invalid.
        cached_file_path = os.path.join(self.cache, file1_hash)
        previous_mode = os.stat(cached_file_path).st_mode
        os.chmod(cached_file_path, 0600)
        old_content = read_content(cached_file_path)
        write_content(cached_file_path, old_content + ' but now invalid size')
        os.chmod(cached_file_path, previous_mode)
        logging.info('Modified %s', cached_file_path)
        # Ensure that the cache has an invalid file.
        self.assertNotEqual(
            os.stat(os.path.join(self.data_dir, 'file1.txt')).st_size,
            os.stat(cached_file_path).st_size)

        # Rerun the test and make sure the cache contains the right file afterwards.
        out, err, returncode = self._run(
            self._generate_args_with_isolated(isolated_file))
        if VERBOSE:
            print out
            print err
        self.assertEqual(0, returncode)
        expected = {
            '.': (040700, 040700, 040777),
            'state.json': (0100600, 0100600, 0100666),
            file1_hash: (0100400, 0100400, 0100666),
            isolated_hash: (0100400, 0100400, 0100444),
        }
        self.assertTreeModes(self.cache, expected)

        self.assertEqual(
            os.stat(os.path.join(self.data_dir, 'file1.txt')).st_size,
            os.stat(cached_file_path).st_size)
        self.assertEqual(old_content, read_content(cached_file_path))
  def _store(self, filename):
    """Stores a test data file in the table.

    Returns its sha-1 hash.
    """
    filepath = os.path.join(self.data_dir, filename)
    h = isolateserver.hash_file(filepath, ALGO)
    shutil.copyfile(filepath, os.path.join(self.table, h))
    return h
  def test_delete_quite_corrupted_cache_entry(self):
    # Test that an entry with an invalid file size properly gets removed and
    # fetched again. This test case also check for file modes.
    isolated_file = os.path.join(self.data_dir, 'file_with_size.isolated')
    isolated_hash = isolateserver.hash_file(isolated_file, ALGO)
    file1_hash = self._store('file1.txt')
    # Note that <tempdir>/table/<file1_hash> has 640 mode.

    # Run the test once to generate the cache.
    out, err, returncode = self._run(self._generate_args_with_isolated(
        isolated_file))
    if VERBOSE:
      print out
      print err
    self.assertEqual(0, returncode)
    expected = {
      '.': (040775, 040755, 040777),
      'state.json': (0100664, 0100644, 0100666),
      # The reason for 0100666 on Windows is that the file node had to be
      # modified to delete the hardlinked node. The read only bit is reset on
      # load.
      file1_hash: (0100400, 0100400, 0100666),
      isolated_hash: (0100400, 0100400, 0100444),
    }
    self.assertTreeModes(self.cache, expected)

    # Modify one of the files in the cache to be invalid.
    cached_file_path = os.path.join(self.cache, file1_hash)
    previous_mode = os.stat(cached_file_path).st_mode
    os.chmod(cached_file_path, 0600)
    old_content = read_content(cached_file_path)
    write_content(cached_file_path, old_content + ' but now invalid size')
    os.chmod(cached_file_path, previous_mode)
    logging.info('Modified %s', cached_file_path)
    # Ensure that the cache has an invalid file.
    self.assertNotEqual(
        os.stat(os.path.join(self.data_dir, 'file1.txt')).st_size,
        os.stat(cached_file_path).st_size)

    # Rerun the test and make sure the cache contains the right file afterwards.
    out, err, returncode = self._run(self._generate_args_with_isolated(
        isolated_file))
    if VERBOSE:
      print out
      print err
    self.assertEqual(0, returncode)
    expected = {
      '.': (040700, 040700, 040777),
      'state.json': (0100600, 0100600, 0100666),
      file1_hash: (0100400, 0100400, 0100666),
      isolated_hash: (0100400, 0100400, 0100444),
    }
    self.assertTreeModes(self.cache, expected)

    self.assertEqual(os.stat(os.path.join(self.data_dir, 'file1.txt')).st_size,
                     os.stat(cached_file_path).st_size)
    self.assertEqual(old_content, read_content(cached_file_path))
Example #15
0
    def _store(self, filename):
        """Stores a test data file in the table.

    Returns its sha-1 hash.
    """
        filepath = os.path.join(self.data_dir, filename)
        h = isolateserver.hash_file(filepath, ALGO)
        shutil.copyfile(filepath, os.path.join(self.table, h))
        return h
Example #16
0
 def test_symlink_partial(self):
   self._execute('archive', 'symlink_partial.isolate', [], False)
   # Construct our own tree.
   expected = [
     str(v['h'])
     for v in self._gen_files(False, None, False).itervalues() if 'h' in v
   ]
   expected.append(isolateserver.hash_file(self.isolated, ALGO))
   self.assertEqual(sorted(expected), self._result_tree())
   self._expect_results(['symlink_partial.py'], None, None, None)
 def test_symlink_partial(self):
     self._execute('archive', 'symlink_partial.isolate', [], False)
     # Construct our own tree.
     expected = [
         str(v['h'])
         for v in self._gen_files(False, None, False).itervalues()
         if 'h' in v
     ]
     expected.append(isolateserver.hash_file(self.isolated, ALGO))
     self.assertEqual(sorted(expected), self._result_tree())
     self._expect_results(['symlink_partial.py'], None, None, None)
Example #18
0
 def test_archive(self):
   self._execute('archive', ['--isolate', self.filename()], False)
   files = sorted([
     os.path.join(
         'hashtable',
         isolateserver.hash_file(os.path.join(ROOT_DIR, 'isolate.py'), ALGO)),
     os.path.join(
         'hashtable',
         isolateserver.hash_file(
             os.path.join(ROOT_DIR, 'tests', 'isolate', 'touch_root.py'),
             ALGO)),
     os.path.join(
         'hashtable',
         isolateserver.hash_file(os.path.join(self.isolated), ALGO)),
     'isolate_smoke_test.isolated',
     'isolate_smoke_test.isolated.state',
     os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
     os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
     os.path.join('root', 'isolate.py'),
   ])
   self.assertEqual(files, list_files_tree(self.tempdir))
  def _archive_given_files(self, files):
    """Given a list of files, call isolateserver.py with them. Then
    verify they are all on the server."""
    args = [
        sys.executable,
        os.path.join(ROOT_DIR, 'isolateserver.py'),
        'archive',
        '--isolate-server', ISOLATE_SERVER,
        '--namespace', self.namespace
    ]
    if '-v' in sys.argv:
      args.append('--verbose')
    args.extend(os.path.join(TEST_DATA_DIR, filename) for filename in files)

    self.assertEqual(0, subprocess.call(args))

    # Try to download the files from the server.
    file_hashes = [
        isolateserver.hash_file(os.path.join(TEST_DATA_DIR, f), hashlib.sha1)
        for f in files
    ]
    for i in range(len(files)):
      download_url = '%scontent/retrieve/%s/%s' % (
          ISOLATE_SERVER, self.namespace, file_hashes[i])

      downloaded_file = isolateserver.net.url_read(download_url, retry_404=True)
      self.assertTrue(downloaded_file is not None,
                      'File %s was missing from the server' % files[i])

    # Ensure the files are listed as present on the server.
    contains_hash_url = '%scontent/contains/%s?token=%s&from_smoke_test=1' % (
        ISOLATE_SERVER, self.namespace, self.token)

    body = ''.join(binascii.unhexlify(h) for h in file_hashes)
    expected = chr(1) * len(files)
    MAX_ATTEMPTS = 10
    for i in xrange(MAX_ATTEMPTS):
      # AppEngine's database is eventually consistent and isolateserver do not
      # use transaction for performance reasons, so even if one request was able
      # to retrieve the file, an subsequent may not see it! So retry a few time
      # until the database becomes consistent with regard to these entities.
      response = isolateserver.net.url_read(
          contains_hash_url,
          data=body,
          content_type='application/octet-stream')
      if response == expected:
        break
      # GAE is exposing its internal data inconsistency.
      if i != (MAX_ATTEMPTS - 1):
        print('Visible datastore inconsistency, retrying.')
        time.sleep(0.1)
    self.assertEqual(expected, response)
 def test_archive(self):
     self._execute('archive', ['--isolate', self.filename()], False)
     files = sorted([
         os.path.join(
             'hashtable',
             isolateserver.hash_file(os.path.join(ROOT_DIR, 'isolate.py'),
                                     ALGO)),
         os.path.join(
             'hashtable',
             isolateserver.hash_file(
                 os.path.join(ROOT_DIR, 'tests', 'isolate',
                              'touch_root.py'), ALGO)),
         os.path.join(
             'hashtable',
             isolateserver.hash_file(os.path.join(self.isolated), ALGO)),
         'isolate_smoke_test.isolated',
         'isolate_smoke_test.isolated.state',
         os.path.join('root', 'tests', 'isolate', 'touch_root.isolate'),
         os.path.join('root', 'tests', 'isolate', 'touch_root.py'),
         os.path.join('root', 'isolate.py'),
     ])
     self.assertEqual(files, list_files_tree(self.tempdir))
 def test_result(self):
   # Loads an arbitrary .isolated on the file system.
   isolated = os.path.join(self.data_dir, 'repeated_files.isolated')
   expected = [
     'state.json',
     self._store('file1.txt'),
     self._store('file1_copy.txt'),
     self._store('repeated_files.py'),
     isolateserver.hash_file(isolated, ALGO),
   ]
   out, err, returncode = self._run(
       self._generate_args_with_isolated(isolated))
   if not VERBOSE:
     self.assertEqual('Success\n', out, (out, err))
   self.assertEqual(0, returncode)
   actual = list_files_tree(self.cache)
   self.assertEqual(sorted(set(expected)), actual)
Example #22
0
 def test_result(self):
     # Loads an arbitrary .isolated on the file system.
     isolated = os.path.join(self.data_dir, 'repeated_files.isolated')
     expected = [
         'state.json',
         self._store('file1.txt'),
         self._store('file1_copy.txt'),
         self._store('repeated_files.py'),
         isolateserver.hash_file(isolated, ALGO),
     ]
     out, err, returncode = self._run(
         self._generate_args_with_isolated(isolated))
     if not VERBOSE:
         self.assertEqual('Success\n', out, (out, err))
     self.assertEqual(0, returncode)
     actual = list_files_tree(self.cache)
     self.assertEqual(sorted(set(expected)), actual)
Example #23
0
  def _gen_files(self, read_only, empty_file, with_time):
    """Returns a dict of files like calling isolate.process_input() on each
    file.

    Arguments:
    - read_only: Mark all the 'm' modes without the writeable bit.
    - empty_file: Add a specific empty file (size 0).
    - with_time: Include 't' timestamps. For saved state .state files.
    """
    root_dir = ROOT_DIR
    if RELATIVE_CWD[self.case()] == '.':
      root_dir = os.path.join(root_dir, 'tests', 'isolate')

    files = dict((unicode(f), {}) for f in DEPENDENCIES[self.case()])

    for relfile, v in files.iteritems():
      filepath = os.path.join(root_dir, relfile)
      filestats = os.lstat(filepath)
      is_link = stat.S_ISLNK(filestats.st_mode)
      if not is_link:
        v[u's'] = filestats.st_size
        if isolate.get_flavor() != 'win':
          v[u'm'] = self._fix_file_mode(relfile, read_only)
      if with_time:
        # Used to skip recalculating the hash. Use the most recent update
        # time.
        v[u't'] = int(round(filestats.st_mtime))
      if is_link:
        v[u'l'] = os.readlink(filepath)  # pylint: disable=E1101
      else:
        # Upgrade the value to unicode so diffing the structure in case of
        # test failure is easier, since the basestring type must match,
        # str!=unicode.
        v[u'h'] = unicode(isolateserver.hash_file(filepath, ALGO))

    if empty_file:
      item = files[empty_file]
      item['h'] = unicode(HASH_NULL)
      if sys.platform != 'win32':
        item['m'] = 288
      item['s'] = 0
      if with_time:
        item['T'] = True
        item.pop('t', None)
    return files
Example #24
0
def archive(isolated, isolate_server, os_slave, algo, verbose):
  """Archives a .isolated and all the dependencies on the CAC."""
  tempdir = None
  try:
    logging.info('archive(%s, %s)', isolated, isolate_server)
    cmd = [
      sys.executable,
      os.path.join(ROOT_DIR, 'isolate.py'),
      'archive',
      '--outdir', isolate_server,
      '--isolated', isolated,
      '-V', 'OS', PLATFORM_MAPPING_ISOLATE[os_slave],
    ]
    cmd.extend(['--verbose'] * verbose)
    logging.info(' '.join(cmd))
    if subprocess.call(cmd, verbose):
      return
    return isolateserver.hash_file(isolated, algo)
  finally:
    if tempdir:
      shutil.rmtree(tempdir)
  def test_simple(self):
    # Create a directory with nothing in it and progressively add more stuff.
    isolate = os.path.join(self.srcdir, 'gtest_fake_pass.isolate')
    condition = 'OS=="linux" and chromeos==1'
    with open(isolate, 'w') as f:
      # Write a minimal .isolate file.
      f.write(str({
        'conditions': [
          [condition, {
            'variables': {
              'command': [
                'run_test_cases.py', 'gtest_fake_pass.py',
              ],
            },
          }],
        ],
      }))
    def _copy(filename):
      shutil.copy(
          os.path.join(BASE_DIR, 'gtest_fake', filename),
          os.path.join(self.srcdir, filename))
    _copy('gtest_fake_base.py')
    _copy('gtest_fake_pass.py')
    shutil.copy(
        os.path.join(GOOGLETEST_DIR, 'run_test_cases.py'),
        os.path.join(self.srcdir, 'run_test_cases.py'))
    # Deploy run_isolated with dependencies as zip into srcdir.
    run_isolated.get_as_zip_package(executable=False).zip_into_file(
        os.path.join(self.srcdir, 'run_isolated.zip'))

    logging.debug('1. Create a .isolated file out of the .isolate file.')
    isolated = os.path.join(self.srcdir, 'gtest_fake_pass.isolated')
    out = self._run(
        [
          os.path.join(ROOT_DIR, 'isolate.py'),
          'check', '-i', isolate, '-s', isolated,
          '--config-variable', 'OS', 'linux',
          '--config-variable', 'chromeos', '1',
        ])
    if not VERBOSE:
      self.assertEqual('', out)

    logging.debug('2. Run fix_test_cases.py on it.')
    # Give up on looking at stdout.
    cmd = [
      os.path.join(GOOGLETEST_DIR, 'fix_test_cases.py'),
      '-s', isolated,
      '--trace-blacklist', '.*\\.run_test_cases',
    ]
    _ = self._run(cmd)

    logging.debug('3. Asserting the content of the .isolated file.')
    with open(isolated) as f:
      actual_isolated = json.load(f)
    gtest_fake_base_py = os.path.join(self.srcdir, 'gtest_fake_base.py')
    gtest_fake_pass_py = os.path.join(self.srcdir, 'gtest_fake_pass.py')
    run_isolated_zip = os.path.join(self.srcdir, 'run_isolated.zip')
    run_test_cases_py = os.path.join(self.srcdir, 'run_test_cases.py')
    algo = hashlib.sha1
    expected_isolated = {
      u'algo': u'sha-1',
      u'command': [u'run_test_cases.py', u'gtest_fake_pass.py'],
      u'files': {
        u'gtest_fake_base.py': {
          u'm': 416,
          u'h': unicode(isolateserver.hash_file(gtest_fake_base_py, algo)),
          u's': os.stat(gtest_fake_base_py).st_size,
        },
        u'gtest_fake_pass.py': {
          u'm': 488,
          u'h': unicode(isolateserver.hash_file(gtest_fake_pass_py, algo)),
          u's': os.stat(gtest_fake_pass_py).st_size,
        },
        u'run_isolated.zip': {
          u'm': 416,
          u'h': unicode(isolateserver.hash_file(run_isolated_zip, algo)),
          u's': os.stat(run_isolated_zip).st_size,
        },
        u'run_test_cases.py': {
          u'm': 488,
          u'h': unicode(isolateserver.hash_file(run_test_cases_py, algo)),
          u's': os.stat(run_test_cases_py).st_size,
        },
      },
      u'relative_cwd': u'.',
      u'version': unicode(isolateserver.ISOLATED_FILE_VERSION),
    }
    if sys.platform == 'win32':
      for value in expected_isolated['files'].itervalues():
        self.assertTrue(value.pop('m'))
    self.assertEqual(expected_isolated, actual_isolated)

    # Now verify the .isolate file was updated! (That's the magical part where
    # you say wow!)
    with open(isolate) as f:
      actual = eval(f.read(), {'__builtins__': None}, None)
    expected = {
      'conditions': [
        [condition, {
          'variables': {
            'command': [
              'run_test_cases.py', 'gtest_fake_pass.py'
            ],
            'isolate_dependency_tracked': [
              'gtest_fake_base.py',
              'gtest_fake_pass.py',
              'run_isolated.zip',
              'run_test_cases.py',
            ],
          },
        }],
      ],
    }
    self.assertEqual(expected, actual)
class IsolateModeBase(IsolateBase):
    def _expect_no_tree(self):
        self.assertFalse(os.path.exists(self.outdir))

    def _result_tree(self):
        return list_files_tree(self.outdir)

    def _expected_tree(self):
        """Verifies the files written in the temporary directory."""
        self.assertEqual(sorted(DEPENDENCIES[self.case()]),
                         self._result_tree())

    @staticmethod
    def _fix_file_mode(filename, read_only):
        """4 modes are supported, 0750 (rwx), 0640 (rw), 0550 (rx), 0440 (r)."""
        min_mode = 0440
        if not read_only:
            min_mode |= 0200
        return (min_mode | 0110) if filename.endswith('.py') else min_mode

    def _gen_files(self, read_only, empty_file, with_time):
        """Returns a dict of files like calling isolate.process_input() on each
    file.

    Arguments:
    - read_only: Mark all the 'm' modes without the writeable bit.
    - empty_file: Add a specific empty file (size 0).
    - with_time: Include 't' timestamps. For saved state .state files.
    """
        root_dir = ROOT_DIR
        if RELATIVE_CWD[self.case()] == '.':
            root_dir = os.path.join(root_dir, 'tests', 'isolate')

        files = dict((unicode(f), {}) for f in DEPENDENCIES[self.case()])

        for relfile, v in files.iteritems():
            filepath = os.path.join(root_dir, relfile)
            filestats = os.lstat(filepath)
            is_link = stat.S_ISLNK(filestats.st_mode)
            if not is_link:
                v[u's'] = filestats.st_size
                if isolate.get_flavor() != 'win':
                    v[u'm'] = self._fix_file_mode(relfile, read_only)
            if with_time:
                # Used to skip recalculating the hash. Use the most recent update
                # time.
                v[u't'] = int(round(filestats.st_mtime))
            if is_link:
                v[u'l'] = os.readlink(filepath)  # pylint: disable=E1101
            else:
                # Upgrade the value to unicode so diffing the structure in case of
                # test failure is easier, since the basestring type must match,
                # str!=unicode.
                v[u'h'] = unicode(isolateserver.hash_file(filepath, ALGO))

        if empty_file:
            item = files[empty_file]
            item['h'] = unicode(HASH_NULL)
            if sys.platform != 'win32':
                item['m'] = 288
            item['s'] = 0
            if with_time:
                item['T'] = True
                item.pop('t', None)
        return files
 def test_all_items_invalid(self):
     out = self._test_all_items_invalid('archive')
     expected = ('%s  isolate_smoke_test.isolated\n' %
                 isolateserver.hash_file(self.isolated, ALGO))
     self.assertEqual(expected, out)
     self._expected_hash_tree(None)
    def test_simple(self):
        # Create a directory with nothing in it and progressively add more stuff.
        isolate = os.path.join(self.srcdir, 'gtest_fake_pass.isolate')
        condition = 'OS=="linux" and chromeos==1'
        with open(isolate, 'w') as f:
            # Write a minimal .isolate file.
            f.write(
                str({
                    'conditions': [
                        [
                            condition, {
                                'variables': {
                                    'command': [
                                        'run_test_cases.py',
                                        'gtest_fake_pass.py',
                                    ],
                                },
                            }
                        ],
                    ],
                }))

        def _copy(filename):
            shutil.copy(os.path.join(BASE_DIR, 'gtest_fake', filename),
                        os.path.join(self.srcdir, filename))

        _copy('gtest_fake_base.py')
        _copy('gtest_fake_pass.py')
        shutil.copy(os.path.join(GOOGLETEST_DIR, 'run_test_cases.py'),
                    os.path.join(self.srcdir, 'run_test_cases.py'))
        # Deploy run_isolated with dependencies as zip into srcdir.
        run_isolated.get_as_zip_package(executable=False).zip_into_file(
            os.path.join(self.srcdir, 'run_isolated.zip'))

        logging.debug('1. Create a .isolated file out of the .isolate file.')
        isolated = os.path.join(self.srcdir, 'gtest_fake_pass.isolated')
        out = self._run([
            os.path.join(ROOT_DIR, 'isolate.py'),
            'check',
            '-i',
            isolate,
            '-s',
            isolated,
            '--config-variable',
            'OS',
            'linux',
            '--config-variable',
            'chromeos',
            '1',
        ])
        if not VERBOSE:
            self.assertEqual('', out)

        logging.debug('2. Run fix_test_cases.py on it.')
        # Give up on looking at stdout.
        cmd = [
            os.path.join(GOOGLETEST_DIR, 'fix_test_cases.py'),
            '-s',
            isolated,
            '--trace-blacklist',
            '.*\\.run_test_cases',
        ]
        _ = self._run(cmd)

        logging.debug('3. Asserting the content of the .isolated file.')
        with open(isolated) as f:
            actual_isolated = json.load(f)
        gtest_fake_base_py = os.path.join(self.srcdir, 'gtest_fake_base.py')
        gtest_fake_pass_py = os.path.join(self.srcdir, 'gtest_fake_pass.py')
        run_isolated_zip = os.path.join(self.srcdir, 'run_isolated.zip')
        run_test_cases_py = os.path.join(self.srcdir, 'run_test_cases.py')
        algo = hashlib.sha1
        expected_isolated = {
            u'algo': u'sha-1',
            u'command': [u'run_test_cases.py', u'gtest_fake_pass.py'],
            u'files': {
                u'gtest_fake_base.py': {
                    u'm':
                    416,
                    u'h':
                    unicode(isolateserver.hash_file(gtest_fake_base_py, algo)),
                    u's':
                    os.stat(gtest_fake_base_py).st_size,
                },
                u'gtest_fake_pass.py': {
                    u'm':
                    488,
                    u'h':
                    unicode(isolateserver.hash_file(gtest_fake_pass_py, algo)),
                    u's':
                    os.stat(gtest_fake_pass_py).st_size,
                },
                u'run_isolated.zip': {
                    u'm': 416,
                    u'h':
                    unicode(isolateserver.hash_file(run_isolated_zip, algo)),
                    u's': os.stat(run_isolated_zip).st_size,
                },
                u'run_test_cases.py': {
                    u'm': 488,
                    u'h':
                    unicode(isolateserver.hash_file(run_test_cases_py, algo)),
                    u's': os.stat(run_test_cases_py).st_size,
                },
            },
            u'relative_cwd': u'.',
            u'version': unicode(isolateserver.ISOLATED_FILE_VERSION),
        }
        if sys.platform == 'win32':
            for value in expected_isolated['files'].itervalues():
                self.assertTrue(value.pop('m'))
        self.assertEqual(expected_isolated, actual_isolated)

        # Now verify the .isolate file was updated! (That's the magical part where
        # you say wow!)
        with open(isolate) as f:
            actual = eval(f.read(), {'__builtins__': None}, None)
        expected = {
            'conditions': [
                [
                    condition, {
                        'variables': {
                            'command':
                            ['run_test_cases.py', 'gtest_fake_pass.py'],
                            'isolate_dependency_tracked': [
                                'gtest_fake_base.py',
                                'gtest_fake_pass.py',
                                'run_isolated.zip',
                                'run_test_cases.py',
                            ],
                        },
                    }
                ],
            ],
        }
        self.assertEqual(expected, actual)