Ejemplo n.º 1
0
    def test_UseCachedResultsFalse(self):
        # Check that the use_cached_results=False does indeed cause computations
        # to be redone, even when present in the cache.
        with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('UseCachedResultsFalse', work_dir)
            self._tally = 0

            def Copy(logger, subst, src, dst):
                self._tally += 1
                shutil.copyfile(subst.SubstituteAbsPaths(src),
                                subst.SubstituteAbsPaths(dst))

            o = once.Once(storage=pynacl.fake_storage.FakeStorage(),
                          use_cached_results=False,
                          system_summary='test')
            o.Run('test', self._input_dirs, self._output_dirs[0], [
                command.Runnable(None, Copy, '%(input0)s/in0',
                                 '%(output)s/out')
            ])
            o.Run('test', self._input_dirs, self._output_dirs[1], [
                command.Runnable(None, Copy, '%(input0)s/in0',
                                 '%(output)s/out')
            ])
            self.assertEquals(2, self._tally)
            self.assertEquals(
                pynacl.file_tools.ReadFile(self._input_files[0]),
                pynacl.file_tools.ReadFile(self._output_files[0]))
            self.assertEquals(
                pynacl.file_tools.ReadFile(self._input_files[0]),
                pynacl.file_tools.ReadFile(self._output_files[1]))
Ejemplo n.º 2
0
    def test_HitsCacheSecondTime(self):
        # Test that the computation is not performed on a second instance.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('HitsCacheSecondTime', work_dir)
            self._tally = 0

            def check_call(cmd, **kwargs):
                self._tally += 1
                subprocess.check_call(cmd, **kwargs)

            self._url = None

            def stash_url(urls):
                self._url = urls

            o = once.Once(storage=fake_storage.FakeStorage(),
                          check_call=check_call,
                          print_url=stash_url)
            o.Run('test', self._input_dirs, self._output_dirs[0], [
                command.Copy('%(input0)s/in0', '%(output)s/out', cwd=work_dir)
            ])
            initial_url = self._url
            self._url = None
            o.Run('test', self._input_dirs, self._output_dirs[1], [
                command.Copy('%(input0)s/in0', '%(output)s/out', cwd=work_dir)
            ])
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[0]))
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[1]))
            self.assertEquals(1, self._tally)
            self.assertEquals(initial_url, self._url)
Ejemplo n.º 3
0
    def __init__(self, packages, args):
        """Constructor.

    Args:
      packages: A dictionary with the following format:
        {
          '<package name>': {
            REPO_SRC_INFO,
            'commands':
              [<list of command.Command objects to run>],
            'dependencies':  # optional
              [<list of package depdenencies>],
            'unpack_commands':  # optional
              [<list of command.Command objects for unpacking inputs
                before they are hashed>'],
            'hashed_inputs':  # optional
              [<list of paths to use for build signature>],
          },
        }
        REPO_SRC_INFO is either:
         'git_url': '<git repo url>',
         'git_revision': '<git hex digest to sync>',
        OR:
         'tar_src': '<root relative path to source tarball>',
      args: sys.argv[1:] or equivalent.
    """
        self._packages = packages
        self.DecodeArgs(packages, args)
        self.SetupLogging()
        self._build_once = once.Once(
            use_cached_results=self._options.use_cached_results,
            cache_results=self._options.cache_results,
            print_url=PrintAnnotatorURL,
            storage=self.CreateStorage())
Ejemplo n.º 4
0
    def test_HitsCacheSecondTime(self):
        # Test that the computation is not performed on a second instance.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('HitsCacheSecondTime', work_dir)
            self._tally = 0

            def Copy(subst, src, dst):
                self._tally += 1
                shutil.copyfile(subst.SubstituteAbsPaths(src),
                                subst.SubstituteAbsPaths(dst))

            self._url = None

            def stash_url(urls):
                self._url = urls

            o = once.Once(storage=fake_storage.FakeStorage(),
                          print_url=stash_url,
                          system_summary='test')
            o.Run('test', self._input_dirs, self._output_dirs[0],
                  [command.Runnable(Copy, '%(input0)s/in0', '%(output)s/out')])
            initial_url = self._url
            self._url = None
            o.Run('test', self._input_dirs, self._output_dirs[1],
                  [command.Runnable(Copy, '%(input0)s/in0', '%(output)s/out')])
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[0]))
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[1]))
            self.assertEquals(1, self._tally)
            self.assertEquals(initial_url, self._url)
Ejemplo n.º 5
0
    def test_UseCachedResultsFalse(self):
        # Check that the use_cached_results=False does indeed cause computations
        # to be redone, even when present in the cache.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('UseCachedResultsFalse', work_dir)
            self._tally = 0

            def check_call(cmd, **kwargs):
                subprocess.check_call(cmd, **kwargs)
                self._tally += 1

            o = once.Once(storage=fake_storage.FakeStorage(),
                          use_cached_results=False,
                          check_call=check_call)
            o.Run('test', self._input_dirs, self._output_dirs[0], [
                command.Copy('%(input0)s/in0', '%(output)s/out', cwd=work_dir)
            ])
            o.Run('test', self._input_dirs, self._output_dirs[1], [
                command.Copy('%(input0)s/in0', '%(output)s/out', cwd=work_dir)
            ])
            self.assertEquals(2, self._tally)
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[0]))
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[1]))
Ejemplo n.º 6
0
 def test_CachedCommandRecorded(self):
     with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('CachedCommand', work_dir)
         o = once.Once(storage=pynacl.fake_storage.FakeStorage(),
                       system_summary='test')
         o.Run('test', self._input_dirs, self._output_dirs[0],
               [command.Copy('%(input0)s/in0', '%(output)s/out')])
         self.assertEquals(len(o.GetCachedCloudItems()), 1)
Ejemplo n.º 7
0
 def test_FirstTime(self):
     # Test that the computation is always performed if the cache is empty.
     with working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('FirstTime', work_dir)
         o = once.Once(storage=fake_storage.FakeStorage(),
                       system_summary='test')
         o.Run('test', self._input_dirs, self._output_dirs[0],
               [command.Copy('%(input0)s/in0', '%(output)s/out')])
         self.assertEquals('FirstTimedata0',
                           file_tools.ReadFile(self._output_files[0]))
Ejemplo n.º 8
0
 def test_OutputsFlushPathHashCache(self):
     # Test that commands that output to a directory that has an input hash
     # value cached raise an error indicating an input output cycle.
     with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('CacheFlush', work_dir)
         o = once.Once(storage=pynacl.fake_storage.FakeStorage(),
                       system_summary='test')
         self.assertRaises(
             once.UserError, o.Run, 'test', self._input_dirs,
             self._input_dirs['input0'],
             [command.Copy('%(input0)s/in0', '%(output)s/out')])
Ejemplo n.º 9
0
 def test_Mkdir(self):
     # Test the Mkdir convenience wrapper works.
     with working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('Mkdir', work_dir)
         foo = os.path.join(work_dir, 'foo')
         o = once.Once(storage=fake_storage.FakeStorage(),
                       cache_results=False,
                       system_summary='test')
         o.Run('test', self._input_dirs, foo,
               [command.Mkdir('%(output)s/hi')])
         self.assertTrue(os.path.isdir(os.path.join(foo, 'hi')))
Ejemplo n.º 10
0
    def test_NumCores(self):
        # Test that the core count is substituted. Since we don't know how many
        # cores the test machine will have, just check that it's an integer.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('NumCores', work_dir)
            o = once.Once(storage=fake_storage.FakeStorage(),
                          system_summary='test')

            def CheckCores(subst):
                self.assertNotEquals(0, int(subst.Substitute('%(cores)s')))

            o.Run('test', {}, self._output_dirs[0],
                  [command.Runnable(CheckCores)])
Ejemplo n.º 11
0
 def test_CacheResultsFalse(self):
     # Check that setting cache_results=False prevents results from being written
     # to the cache.
     with working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('CacheResultsFalse', work_dir)
         storage = fake_storage.FakeStorage()
         o = once.Once(storage=storage, cache_results=False)
         o.Run('test', self._input_dirs, self._output_dirs[0], [
             command.Copy('%(input0)s/in0', '%(output)s/out', cwd=work_dir)
         ])
         self.assertEquals(0, storage.ItemCount())
         self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                           file_tools.ReadFile(self._output_files[0]))
Ejemplo n.º 12
0
 def test_Command(self):
     # Test a plain command.
     with working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('Command', work_dir)
         o = once.Once(storage=fake_storage.FakeStorage(),
                       system_summary='test')
         o.Run('test', self._input_dirs, self._output_dirs[0], [
             command.Command([
                 sys.executable, '-c',
                 'import sys; open(sys.argv[1], "wb").write("hello")',
                 '%(output)s/out'
             ])
         ])
         self.assertEquals('hello',
                           file_tools.ReadFile(self._output_files[0]))
Ejemplo n.º 13
0
    def test_FailsWhenWritingFails(self):
        # Check that once doesn't eat the storage layer failures for writes.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('FailsWhenWritingFails', work_dir)

            def call(cmd, **kwargs):
                # Cause gsutil commands to fail.
                return 1

            bad_storage = gsd_storage.GSDStorage(gsutil=['mygsutil'],
                                                 write_bucket='mybucket',
                                                 read_buckets=[],
                                                 call=call)
            o = once.Once(storage=bad_storage, system_summary='test')
            self.assertRaises(
                gsd_storage.GSDStorageError, o.Run, 'test', self._input_dirs,
                self._output_dirs[0],
                [command.Copy('%(input0)s/in0', '%(output)s/out')])
Ejemplo n.º 14
0
    def test_RecomputeHashMatches(self):
        # Test that things don't get stored to the output cache if they exist
        # already.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            # Setup test data in input0, input1 using memory storage.
            self.GenerateTestData('RecomputeHashMatches', work_dir)
            fs = fake_storage.FakeStorage()
            ds = directory_storage.DirectoryStorageAdapter(storage=fs)
            o = once.Once(storage=fs)

            # Run the computation (compute the length of a file) from input0 to
            # output0.
            o.Run('test', self._input_dirs, self._output_dirs[0], [
                self.FileLength(
                    '%(input0)s/in0', '%(output)s/out', cwd=work_dir)
            ])

            # Check that 2 writes have occurred. One to write a mapping from in->out,
            # and one for the output data.
            self.assertEquals(2, fs.WriteCount())

            # Run the computation again from input1 to output1.
            # (These should have the same length.)
            o.Run('test', self._input_dirs, self._output_dirs[1], [
                self.FileLength(
                    '%(input1)s/in1', '%(output)s/out', cwd=work_dir)
            ])

            # Write count goes up by one as an in->out hash is added,
            # but no new output is stored (as it is the same).
            self.assertEquals(3, fs.WriteCount())

            # Check that the test is still valid:
            #   - in0 and in1 have equal length.
            #   - out0 and out1 have that length in them.
            #   - out0 and out1 agree.
            self.assertEquals(
                str(len(file_tools.ReadFile(self._input_files[0]))),
                file_tools.ReadFile(self._output_files[0]))
            self.assertEquals(
                str(len(file_tools.ReadFile(self._input_files[1]))),
                file_tools.ReadFile(self._output_files[1]))
            self.assertEquals(file_tools.ReadFile(self._output_files[0]),
                              file_tools.ReadFile(self._output_files[1]))
Ejemplo n.º 15
0
 def test_RunConditionsFalse(self):
     # Test that a command uses run conditions to decide whether or not to run.
     with pynacl.working_directory.TemporaryWorkingDirectory() as work_dir:
         self.GenerateTestData('Command', work_dir)
         o = once.Once(storage=pynacl.fake_storage.FakeStorage(),
                       system_summary='test')
         o.Run('test', self._input_dirs, self._output_dirs[0], [
             command.Command([
                 sys.executable, '-c',
                 'import sys; open(sys.argv[1], "wb").write("hello")',
                 '%(output)s/out'
             ],
                             run_cond=lambda cmd_opts: True),
             command.Command([
                 sys.executable, '-c',
                 'import sys; open(sys.argv[1], "wb").write("not hello")',
                 '%(output)s/out'
             ],
                             run_cond=lambda cmd_opts: False)
         ])
         self.assertEquals(
             'hello', pynacl.file_tools.ReadFile(self._output_files[0]))
Ejemplo n.º 16
0
    def test_UnpackCommands(self):
        # Test that unpack commnds get run first and hashed_inputs get
        # used when present.
        with working_directory.TemporaryWorkingDirectory() as work_dir:
            self.GenerateTestData('UnpackCommands', work_dir)
            self._tally = 0

            def check_call(cmd, **kwargs):
                self._tally += 1
                subprocess.check_call(cmd, **kwargs)

            o = once.Once(storage=fake_storage.FakeStorage(),
                          check_call=check_call)
            alt_inputs = {'input0': os.path.join(work_dir, 'alt_input')}
            unpack_commands = [
                command.Copy('%(input0)s/in0', alt_inputs['input0'])
            ]
            commands = [
                command.Copy('%(input0)s', '%(output)s/out', cwd=work_dir)
            ]
            o.Run('test',
                  self._input_dirs,
                  self._output_dirs[0],
                  commands=commands,
                  unpack_commands=unpack_commands,
                  hashed_inputs=alt_inputs)
            o.Run('test',
                  self._input_dirs,
                  self._output_dirs[1],
                  commands=commands,
                  unpack_commands=unpack_commands,
                  hashed_inputs=alt_inputs)
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[0]))
            self.assertEquals(file_tools.ReadFile(self._input_files[0]),
                              file_tools.ReadFile(self._output_files[1]))
            self.assertEquals(3, self._tally)
Ejemplo n.º 17
0
  def __init__(self, packages, args):
    """Constructor.

    Args:
      packages: A dictionary with the following format. There are two types of
                packages: source and build (described below).
        {
          '<package name>': {
            'type': 'source',
                # Source packages are for sources; in particular remote sources
                # where it is not known whether they have changed until they are
                # synced (it can also or for tarballs which need to be
                # unpacked). Source package commands are run unconditionally
                # unless sync is skipped via the command-line option. Source
                # package contents are not memoized.
            'dependencies':  # optional
              [<list of package depdenencies>],
            'output_dirname': # optional
              '<directory name>', # Name of the directory to checkout sources
              # into (a subdirectory of the global source directory); defaults
              # to the package name.
            'commands':
              [<list of command.Runnable objects to run>],
            'inputs': # optional
              {<mapping whose keys are names, and whose values are files or
                directories (e.g. checked-in tarballs) used as input. Since
                source targets are unconditional, this is only useful as a
                convenience for commands, which may refer to the inputs by their
                key name>},
           },
           '<package name>': {
            'type': 'build',
                # Build packages are memoized, and will build only if their
                # inputs have changed. Their inputs consist of the output of
                # their package dependencies plus any file or directory inputs
                # given by their 'inputs' member
            'dependencies':  # optional
              [<list of package depdenencies>],
            'inputs': # optional
              {<mapping whose keys are names, and whose values are files or
                directories (e.g. checked-in tarballs) used as input>},
            'output_subdir': # optional
              '<directory name>', # Name of a subdir to be created in the output
               # directory, into which all output will be placed. If not present
               # output will go into the root of the output directory.
            'commands':
              [<list of command.Command objects to run>],
              # Objects that have a 'skip_for_incremental' attribute that
              # evaluates to True will not be run on incremental builds unless
              # the working directory is empty.
          },
        }
      args: sys.argv[1:] or equivalent.
    """
    self._packages = packages
    self.DecodeArgs(packages, args)
    self._build_once = once.Once(
        use_cached_results=self._options.use_cached_results,
        cache_results=self._options.cache_results,
        print_url=PrintAnnotatorURL,
        storage=self.CreateStorage())
    self._signature_file = None
    if self._options.emit_signatures is not None:
      if self._options.emit_signatures == '-':
        self._signature_file = sys.stdout
      else:
        self._signature_file = open(self._options.emit_signatures, 'w')
Ejemplo n.º 18
0
    def __init__(self, packages, package_targets, args):
        """Constructor.

    Args:
      packages: A dictionary with the following format. There are two types of
                packages: source and build (described below).
        {
          '<package name>': {
            'type': 'source',
                # Source packages are for sources; in particular remote sources
                # where it is not known whether they have changed until they are
                # synced (it can also or for tarballs which need to be
                # unpacked). Source package commands are run unconditionally
                # unless sync is skipped via the command-line option. Source
                # package contents are not memoized.
            'dependencies':  # optional
              [<list of package depdenencies>],
            'output_dirname': # optional
              '<directory name>', # Name of the directory to checkout sources
              # into (a subdirectory of the global source directory); defaults
              # to the package name.
            'commands':
              [<list of command.Runnable objects to run>],
            'inputs': # optional
              {<mapping whose keys are names, and whose values are files or
                directories (e.g. checked-in tarballs) used as input. Since
                source targets are unconditional, this is only useful as a
                convenience for commands, which may refer to the inputs by their
                key name>},
          },
          '<package name>': {
            'type': 'build', [or 'build_noncanonical']
                # Build packages are memoized, and will build only if their
                # inputs have changed. Their inputs consist of the output of
                # their package dependencies plus any file or directory inputs
                # given by their 'inputs' member
                # build_noncanonical packages are memoized in the same way, but
                # their cache storage keys get the build platform name appended.
                # This means they can be built by multiple bots without
                # collisions, but only one will be canonical.
            'dependencies':  # optional
              [<list of package depdenencies>],
            'inputs': # optional
              {<mapping whose keys are names, and whose values are files or
                directories (e.g. checked-in tarballs) used as input>},
            'output_subdir': # optional
              '<directory name>', # Name of a subdir to be created in the output
               # directory, into which all output will be placed. If not present
               # output will go into the root of the output directory.
            'commands':
              [<list of command.Command objects to run>],
          },
          '<package name>': {
            'type': 'work',
              # Work packages have the same keys as build packages. However,
              # they are intended to be intermediate targets, and are not
              # memoized or included for package_version.py. Therefore they will
              # always run, regardless of whether their inputs have changed or
              # of whether source syncing is skipped via the command line.
            <same keys as build-type packages>
          },
        }
      package_targets: A dictionary with the following format. This is a
                       description of output package targets the packages are
                       built for. Each output package should contain a list of
                       <package_name> referenced in the previous "packages"
                       dictionary. This list of targets is expected to stay
                       the same from build to build, so it should include
                       package names even if they aren't being built. A package
                       target is usually the platform, such as "$OS_$ARCH",
                       while the output package is usually the toolchain name,
                       such as "nacl_arm_glibc".
        {
          '<package_target>': {
            '<output_package>':
              [<list of package names included in output package>]
          }
        }
      args: sys.argv[1:] or equivalent.
    """
        self._packages = packages
        self._package_targets = package_targets
        self.DecodeArgs(packages, args)
        self._build_once = once.Once(
            use_cached_results=self._options.use_cached_results,
            cache_results=self._options.cache_results,
            print_url=PrintAnnotatorURL,
            storage=self.CreateStorage(),
            extra_paths=self.ExtraSubstitutionPaths())
        self._signature_file = None
        if self._options.emit_signatures is not None:
            if self._options.emit_signatures == '-':
                self._signature_file = sys.stdout
            else:
                self._signature_file = open(self._options.emit_signatures, 'w')