Exemple #1
0
  def setUpClass(cls):
    cls.origin = safe_mkdtemp()
    with pushd(cls.origin):
      subprocess.check_call(['git', 'init', '--bare'])

    cls.gitdir = safe_mkdtemp()
    cls.worktree = safe_mkdtemp()

    cls.readme_file = os.path.join(cls.worktree, 'README')

    with environment_as(GIT_DIR=cls.gitdir, GIT_WORK_TREE=cls.worktree):
      cls.init_repo('depot', cls.origin)

      touch(cls.readme_file)
      subprocess.check_call(['git', 'add', 'README'])
      subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b'])
      subprocess.check_call(['git', 'tag', 'first'])
      subprocess.check_call(['git', 'push', '--tags', 'depot', 'master'])
      subprocess.check_call(['git', 'branch', '--set-upstream', 'master', 'depot/master'])

      with safe_open(cls.readme_file, 'w') as readme:
        readme.write('Hello World.')
      subprocess.check_call(['git', 'commit', '-am', 'Update README.'])

    cls.clone2 = safe_mkdtemp()
    with pushd(cls.clone2):
      cls.init_repo('origin', cls.origin)
      subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])

      with safe_open(os.path.realpath('README'), 'a') as readme:
        readme.write('--')
      subprocess.check_call(['git', 'commit', '-am', 'Update README 2.'])
      subprocess.check_call(['git', 'push', '--tags', 'origin', 'master'])

    cls.git = Git(gitdir=cls.gitdir, worktree=cls.worktree)
Exemple #2
0
 def workspace(self, *buildfiles):
   with temporary_dir() as root_dir:
     with BuildRoot().temporary(root_dir):
       with pushd(root_dir):
         for buildfile in buildfiles:
           touch(os.path.join(root_dir, buildfile))
         yield os.path.realpath(root_dir)
Exemple #3
0
    def installer(req):
      # Attempt to obtain the egg from the local cache.  If it's an exact match, we can use it.
      # If it's not an exact match, then if it's been resolved sufficiently recently, we still
      # use it.
      dist = egg_obtainer.obtain(req)
      if dist and (requirement_is_exact(req) or now - os.path.getmtime(dist.location) < ttl):
        return dist

      # Failed, so follow through to "remote" resolution
      source_translator = SourceTranslator(
           interpreter=interpreter,
           use_2to3=getattr(req, 'use_2to3', False),
           **shared_options)
      translator = ChainedTranslator(egg_translator, source_translator)
      obtainer = Obtainer(
          crawler,
          [Fetcher([req.repository])] if getattr(req, 'repository', None) else fetchers,
          translator)
      dist = obtainer.obtain(req)
      if dist:
        try:
          touch(dist.location)
        except OSError:
          pass
      return dist
Exemple #4
0
    def installer(req):
      # Attempt to obtain the egg from the local cache.  If it's an exact match, we can use it.
      # If it's not an exact match, then if it's been resolved sufficiently recently, we still
      # use it.
      dist = egg_obtainer.obtain(req)
      if dist and (requirement_is_exact(req) or now - os.path.getmtime(dist.location) < ttl):
        return dist

      # Failed, so follow through to "remote" resolution
      source_translator = SourceTranslator(
           interpreter=interpreter,
           use_2to3=getattr(req, 'use_2to3', False),
           **shared_options)
      translator = ChainedTranslator(egg_translator, source_translator)
      obtainer = Obtainer(
          crawler,
          [Fetcher([req.repository])] if getattr(req, 'repository', None) else fetchers,
          translator)
      dist = obtainer.obtain(req)
      if dist:
        try:
          touch(dist.location)
        except OSError:
          pass
      return dist
def test_fnmatch():
    with Fileset.over(['.txt']):
        assert leq(Fileset.zglobs('*.txt'))
        assert leq(Fileset.zglobs('?.txt'))
        assert leq(Fileset.zglobs('[].txt'))
        assert leq(Fileset.zglobs('.*'), '.txt')
        assert leq(Fileset.zglobs('*.py', '.*'), '.txt')
        assert leq(Fileset.rglobs(''))
        assert leq(Fileset.rglobs('*.txt'))
        assert leq(Fileset.rglobs('?.txt'))
        assert leq(Fileset.rglobs('[].txt'))
        assert leq(Fileset.rglobs('.*'), '.txt')
        assert leq(Fileset.rglobs('*.py', '.*'), '.txt')
        assert leq(Fileset.rglobs('.*', '.*'), '.txt')

    with Fileset.over(['a.txt']):
        for operation in (Fileset.rglobs, Fileset.zglobs):
            assert leq(operation('*.txt'), 'a.txt')
            assert leq(operation('?.txt'), 'a.txt')
            assert leq(operation('[abcd].txt'), 'a.txt')

    with temporary_dir() as tempdir:
        touch(os.path.join(tempdir, '.txt'))
        assert leq(Fileset.globs('.txt', root=tempdir), '.txt')
        assert leq(Fileset.globs('*.txt', root=tempdir))
        assert leq(Fileset.globs('', root=tempdir))
Exemple #6
0
    def _bootstrap_ivy(self, bootstrap_jar_path):
        if not os.path.exists(bootstrap_jar_path):
            with temporary_file() as bootstrap_jar:
                fetcher = Fetcher()
                checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
                try:
                    log.info('\nDownloading %s' % self._bootstrap_jar_url)
                    # TODO: Capture the stdout of the fetcher, instead of letting it output
                    # to the console directly.
                    fetcher.download(
                        self._bootstrap_jar_url,
                        listener=fetcher.ProgressListener().wrap(checksummer),
                        path_or_fd=bootstrap_jar,
                        timeout=self._timeout)
                    log.info('sha1: %s' % checksummer.checksum)
                    bootstrap_jar.close()
                    touch(bootstrap_jar_path)
                    shutil.move(bootstrap_jar.name, bootstrap_jar_path)
                except fetcher.Error as e:
                    raise self.Error(
                        'Problem fetching the ivy bootstrap jar! %s' % e)

        return Ivy(bootstrap_jar_path,
                   ivy_settings=self._ivy_settings,
                   ivy_cache_dir=self.ivy_cache_dir)
Exemple #7
0
def test_fnmatch():
    with Fileset.over([".txt"]):
        assert leq(Fileset.zglobs("*.txt"))
        assert leq(Fileset.zglobs("?.txt"))
        assert leq(Fileset.zglobs("[].txt"))
        assert leq(Fileset.zglobs(".*"), ".txt")
        assert leq(Fileset.zglobs("*.py", ".*"), ".txt")
        assert leq(Fileset.rglobs(""))
        assert leq(Fileset.rglobs("*.txt"))
        assert leq(Fileset.rglobs("?.txt"))
        assert leq(Fileset.rglobs("[].txt"))
        assert leq(Fileset.rglobs(".*"), ".txt")
        assert leq(Fileset.rglobs("*.py", ".*"), ".txt")
        assert leq(Fileset.rglobs(".*", ".*"), ".txt")

    with Fileset.over(["a.txt"]):
        for operation in (Fileset.rglobs, Fileset.zglobs):
            assert leq(operation("*.txt"), "a.txt")
            assert leq(operation("?.txt"), "a.txt")
            assert leq(operation("[abcd].txt"), "a.txt")

    with temporary_dir() as tempdir:
        touch(os.path.join(tempdir, ".txt"))
        assert leq(Fileset.globs(".txt", root=tempdir), ".txt")
        assert leq(Fileset.globs("*.txt", root=tempdir))
        assert leq(Fileset.globs("", root=tempdir))
Exemple #8
0
 def workspace(self, *buildfiles):
     with temporary_dir() as root_dir:
         with BuildRoot().temporary(root_dir):
             with pushd(root_dir):
                 for buildfile in buildfiles:
                     touch(os.path.join(root_dir, buildfile))
                 yield os.path.realpath(root_dir)
 def distribution(self, files=None, executables=None):
   with temporary_dir() as jdk:
     for f in maybe_list(files or ()):
       touch(os.path.join(jdk, f))
     for exe in maybe_list(executables or (), expected_type=self.EXE):
       path = os.path.join(jdk, exe.name)
       with safe_open(path, 'w') as fp:
         fp.write(exe.contents or '')
       chmod_plus_x(path)
     yield jdk
Exemple #10
0
 def distribution(self, files=None, executables=None):
     with temporary_dir() as jdk:
         for f in maybe_list(files or ()):
             touch(os.path.join(jdk, f))
         for exe in maybe_list(executables or (), expected_type=self.EXE):
             path = os.path.join(jdk, exe.name)
             with safe_open(path, 'w') as fp:
                 fp.write(exe.contents or '')
             chmod_plus_x(path)
         yield jdk
Exemple #11
0
def profile_classpath(profile,
                      java_runner=None,
                      config=None,
                      ivy_jar=None,
                      ivy_settings=None,
                      workunit_factory=None):
    # TODO(John Sirois): consider rework when ant backend is gone and there is no more need to share
    # path structure

    java_runner = java_runner or runjava_indivisible

    config = config or Config.load()

    profile_dir = config.get('ivy-profiles', 'workdir')
    profile_libdir = os.path.join(profile_dir, '%s.libs' % profile)
    profile_check = '%s.checked' % profile_libdir
    if not os.path.exists(profile_check):
        # TODO(John Sirois): refactor IvyResolve to share ivy invocation command line bits
        ivy_classpath = [ivy_jar] if ivy_jar else config.getlist(
            'ivy', 'classpath')

        safe_mkdir(profile_libdir)
        ivy_settings = ivy_settings or config.get('ivy', 'ivy_settings')
        ivy_xml = os.path.join(profile_dir, '%s.ivy.xml' % profile)
        ivy_opts = [
            '-settings',
            ivy_settings,
            '-ivy',
            ivy_xml,

            # TODO(John Sirois): this pattern omits an [organisation]- prefix to satisfy IDEA jar naming
            # needs for scala - isolate this hack to idea.py where it belongs
            '-retrieve',
            '%s/[artifact]-[revision](-[classifier]).[ext]' % profile_libdir,
            '-sync',
            '-symlink',
            '-types',
            'jar',
            'bundle',
            '-confs',
            'default'
        ]
        result = java_runner(classpath=ivy_classpath,
                             main='org.apache.ivy.Main',
                             workunit_factory=workunit_factory,
                             workunit_name='%s:bootstrap' % profile,
                             opts=ivy_opts)
        if result != 0:
            raise TaskError('Failed to load profile %s, ivy exit code %d' %
                            (profile, result))
        touch(profile_check)

    return [
        os.path.join(profile_libdir, jar) for jar in os.listdir(profile_libdir)
    ]
Exemple #12
0
    def test_via_pantsini(self):
        with temporary_dir() as root:
            root = os.path.realpath(root)
            touch(os.path.join(root, 'pants.ini'))
            with pushd(root):
                self.assertEqual(root, BuildRoot().path)

            BuildRoot().reset()
            child = os.path.join(root, 'one', 'two')
            safe_mkdir(child)
            with pushd(child):
                self.assertEqual(root, BuildRoot().path)
Exemple #13
0
 def fill(self, slice_):
   log.debug('Disk filling %s' % slice_)
   if slice_.length == 0:
     return False
   touch(slice_.filename)
   with open(slice_.filename, 'r+b') as fp:
     if os.path.getsize(slice_.filename) < slice_.stop:
       fp.seek(slice_.stop - 1, 0)
       # write a sentinel byte which will fill the file at least up to stop.
       fp.write(b'\x00')
       return True
   return False
  def test_via_pantsini(self):
    with temporary_dir() as root:
      root = os.path.realpath(root)
      touch(os.path.join(root, 'pants.ini'))
      with pushd(root):
        self.assertEqual(root, BuildRoot().path)

      BuildRoot().reset()
      child = os.path.join(root, 'one', 'two')
      safe_mkdir(child)
      with pushd(child):
        self.assertEqual(root, BuildRoot().path)
Exemple #15
0
 def test_round_trip(prefix=None):
   with temporary_dir() as fromdir:
     safe_mkdir(os.path.join(fromdir, 'a/b/c'))
     touch(os.path.join(fromdir, 'a/b/d/e.txt'))
     with temporary_dir() as archivedir:
       archive = archiver.create(fromdir, archivedir, 'archive', prefix=prefix)
       with temporary_dir() as todir:
         archiver.extract(archive, todir)
         fromlisting = listtree(fromdir)
         if prefix:
           fromlisting = set(os.path.join(prefix, x) for x in fromlisting)
           if empty_dirs:
             fromlisting.add(prefix)
         self.assertEqual(fromlisting, listtree(todir))
Exemple #16
0
        def do_mount(source, destination):
            log.info('Mounting %s into task filesystem at %s.' %
                     (source, destination))

            # If we're mounting a file into the task filesystem, the mount call will fail if the mount
            # point doesn't exist. In that case we'll create an empty file to mount over.
            if os.path.isfile(source) and not os.path.exists(destination):
                safe_mkdir(os.path.dirname(destination))
                touch(destination)
            else:
                safe_mkdir(destination)

            # This mount call is meant to mimic what mesos does when mounting into the container. C.f.
            # https://github.com/apache/mesos/blob/c3228f3c3d1a1b2c145d1377185cfe22da6079eb/src/slave/containerizer/mesos/isolators/filesystem/linux.cpp#L521-L528
            subprocess.check_call(
                ['mount', '-n', '--rbind', source, destination])
Exemple #17
0
def profile_classpath(profile, java_runner=None, config=None, ivy_jar=None, ivy_settings=None, workunit_factory=None):
    # TODO(John Sirois): consider rework when ant backend is gone and there is no more need to share
    # path structure

    java_runner = java_runner or runjava_indivisible

    config = config or Config.load()

    profile_dir = config.get("ivy-profiles", "workdir")
    profile_libdir = os.path.join(profile_dir, "%s.libs" % profile)
    profile_check = "%s.checked" % profile_libdir
    if not os.path.exists(profile_check):
        # TODO(John Sirois): refactor IvyResolve to share ivy invocation command line bits
        ivy_classpath = [ivy_jar] if ivy_jar else config.getlist("ivy", "classpath")

        safe_mkdir(profile_libdir)
        ivy_settings = ivy_settings or config.get("ivy", "ivy_settings")
        ivy_xml = os.path.join(profile_dir, "%s.ivy.xml" % profile)
        ivy_opts = [
            "-settings",
            ivy_settings,
            "-ivy",
            ivy_xml,
            # TODO(John Sirois): this pattern omits an [organisation]- prefix to satisfy IDEA jar naming
            # needs for scala - isolate this hack to idea.py where it belongs
            "-retrieve",
            "%s/[artifact]-[revision](-[classifier]).[ext]" % profile_libdir,
            "-sync",
            "-symlink",
            "-types",
            "jar",
            "bundle",
            "-confs",
            "default",
        ]
        result = java_runner(
            classpath=ivy_classpath,
            main="org.apache.ivy.Main",
            workunit_factory=workunit_factory,
            workunit_name="%s:bootstrap" % profile,
            opts=ivy_opts,
        )
        if result != 0:
            raise TaskError("Failed to load profile %s, ivy exit code %s" % (profile, str(result)))
        touch(profile_check)

    return [os.path.join(profile_libdir, jar) for jar in os.listdir(profile_libdir)]
Exemple #18
0
  def execute_single_compilation(self, versioned_targets, cp):
    compilation_id = Target.maybe_readable_identify(versioned_targets.targets)

    # TODO: Use the artifact cache. In flat mode we may want to look for the artifact for all targets,
    # not just the invalid ones, as it might be more likely to be present. Or we could look for both.

    if self._flatten:
      # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This
      # allows us to build different targets in different invocations without losing dependency information
      # from any of them.
      depfile = os.path.join(self._depfile_dir, 'dependencies.flat')
    else:
      # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each
      # compilation will read in the entire depfile, add its stuff to it and write it out again).
      depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies'

    if not versioned_targets.valid:
      self.context.log.info('Compiling targets %s' % str(versioned_targets.targets))
      sources_by_target, processors, fingerprint = self.calculate_sources(versioned_targets.targets)
      if sources_by_target:
        sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
        if not sources:
          touch(depfile)  # Create an empty depfile, since downstream code may assume that one exists.
          self.context.log.warn('Skipping java compile for targets with no sources:\n  %s' %
                                '\n  '.join(str(t) for t in sources_by_target.keys()))
        else:
          classpath = [jar for conf, jar in cp if conf in self._confs]
          result = self.compile(classpath, sources, fingerprint, depfile)
          if result != 0:
            default_message = 'Unexpected error - %s returned %d' % (_JMAKE_MAIN, result)
            raise TaskError(_JMAKE_ERROR_CODES.get(result, default_message))

        if processors:
          # Produce a monolithic apt processor service info file for further compilation rounds
          # and the unit test classpath.
          processor_info_file = os.path.join(self._classes_dir, _PROCESSOR_INFO_FILE)
          if os.path.exists(processor_info_file):
            with safe_open(processor_info_file, 'r') as f:
              for processor in f:
                processors.add(processor.strip())
          self.write_processor_info(processor_info_file, processors)

    # Read in the deps created either just now or by a previous compiler run on these targets.
    deps = Dependencies(self._classes_dir)
    deps.load(depfile)
    self._deps.merge(deps)
Exemple #19
0
    def setUp(self):
        self.base_dir = tempfile.mkdtemp()

        def generate_path(name):
            return os.path.join(self.base_dir, name)

        test_class_path = generate_path('com/twitter/Test.class')
        duplicate_class_path = generate_path(
            'com/twitter/commons/Duplicate.class')
        unique_class_path = generate_path('org/apache/Unique.class')
        unicode_class_path = generate_path('cucumber/api/java/zh_cn/假如.class')

        touch(test_class_path)
        touch(duplicate_class_path)
        touch(unique_class_path)
        touch(unicode_class_path)

        def generate_jar(path, *class_name):
            with closing(ZipFile(generate_path(path), 'w')) as zipfile:
                for clazz in class_name:
                    zipfile.write(clazz)
                return zipfile.filename

        @contextmanager
        def jars():
            test_jar = generate_jar('test.jar', test_class_path,
                                    duplicate_class_path)
            jar_with_duplicates = generate_jar('dups.jar',
                                               duplicate_class_path,
                                               unique_class_path)
            jar_without_duplicates = generate_jar('no_dups.jar',
                                                  unique_class_path)
            jar_with_unicode = generate_jar('unicode_class.jar',
                                            unicode_class_path)

            yield test_jar, jar_with_duplicates, jar_without_duplicates, jar_with_unicode

        with jars() as jars:
            test_jar, jar_with_duplicates, jar_without_duplicates, jar_with_unicode = jars
            self.path_with_duplicates = {
                'com/twitter/Test.class':
                set([test_jar]),
                'com/twitter/commons/Duplicate.class':
                set([test_jar, jar_with_duplicates]),
                'org/apache/Unique.class':
                set([jar_with_duplicates]),
                'cucumber/api/java/zh_cn/假如.class':
                set([jar_with_unicode]),
            }
            self.path_without_duplicates = {
                'com/twitter/Test.class': set([test_jar]),
                'com/twitter/commons/Duplicate.class': set([test_jar]),
                'org/apache/Unique.class': set([jar_without_duplicates]),
                'cucumber/api/java/zh_cn/假如.class': set([jar_with_unicode]),
            }
Exemple #20
0
 def test_round_trip(prefix=None):
     with temporary_dir() as fromdir:
         safe_mkdir(os.path.join(fromdir, 'a/b/c'))
         touch(os.path.join(fromdir, 'a/b/d/e.txt'))
         with temporary_dir() as archivedir:
             archive = archiver.create(fromdir,
                                       archivedir,
                                       'archive',
                                       prefix=prefix)
             with temporary_dir() as todir:
                 archiver.extract(archive, todir)
                 fromlisting = listtree(fromdir)
                 if prefix:
                     fromlisting = set(
                         os.path.join(prefix, x) for x in fromlisting)
                     if empty_dirs:
                         fromlisting.add(prefix)
                 self.assertEqual(fromlisting, listtree(todir))
Exemple #21
0
    def setUpClass(cls):
        BuildFileTest.base_dir = tempfile.mkdtemp()

        # Seed a BUILD outside the build root that should not be detected
        touch(os.path.join(BuildFileTest.base_dir, 'BUILD'))

        BuildFileTest.root_dir = os.path.join(BuildFileTest.base_dir, 'root')

        BuildFileTest.touch('grandparent/parent/BUILD')
        BuildFileTest.touch('grandparent/parent/BUILD.twitter')
        BuildFileTest.makedirs('grandparent/parent/BUILD.dir')
        BuildFileTest.makedirs('grandparent/BUILD')
        BuildFileTest.touch('BUILD')
        BuildFileTest.touch('BUILD.twitter')
        BuildFileTest.touch('grandparent/parent/child1/BUILD')
        BuildFileTest.touch('grandparent/parent/child1/BUILD.twitter')
        BuildFileTest.touch('grandparent/parent/child2/child3/BUILD')
        BuildFileTest.makedirs('grandparent/parent/child2/BUILD')
        BuildFileTest.makedirs('grandparent/parent/child4')
    def setUpClass(cls):
        BuildFileTest.base_dir = tempfile.mkdtemp()

        # Seed a BUILD outside the build root that should not be detected
        touch(os.path.join(BuildFileTest.base_dir, "BUILD"))

        BuildFileTest.root_dir = os.path.join(BuildFileTest.base_dir, "root")

        BuildFileTest.touch("grandparent/parent/BUILD")
        BuildFileTest.touch("grandparent/parent/BUILD.twitter")
        BuildFileTest.makedirs("grandparent/parent/BUILD.dir")
        BuildFileTest.makedirs("grandparent/BUILD")
        BuildFileTest.touch("BUILD")
        BuildFileTest.touch("BUILD.twitter")
        BuildFileTest.touch("grandparent/parent/child1/BUILD")
        BuildFileTest.touch("grandparent/parent/child1/BUILD.twitter")
        BuildFileTest.touch("grandparent/parent/child2/child3/BUILD")
        BuildFileTest.makedirs("grandparent/parent/child2/BUILD")
        BuildFileTest.makedirs("grandparent/parent/child4")
Exemple #23
0
    def do_mount(source, destination):
      log.info('Mounting %s into task filesystem at %s.' % (source, destination))

      # If we're mounting a file into the task filesystem, the mount call will fail if the mount
      # point doesn't exist. In that case we'll create an empty file to mount over.
      if os.path.isfile(source) and not os.path.exists(destination):
        safe_mkdir(os.path.dirname(destination))
        touch(destination)
      else:
        safe_mkdir(destination)

      # This mount call is meant to mimic what mesos does when mounting into the container. C.f.
      # https://github.com/apache/mesos/blob/c3228f3c3d1a1b2c145d1377185cfe22da6079eb/src/slave/containerizer/mesos/isolators/filesystem/linux.cpp#L521-L528
      subprocess.check_call([
          'mount',
          '-n',
          '--rbind',
          source,
          destination])
Exemple #24
0
  def setUpClass(cls):
    BuildFileTest.base_dir = tempfile.mkdtemp()

    # Seed a BUILD outside the build root that should not be detected
    touch(os.path.join(BuildFileTest.base_dir, 'BUILD'))

    BuildFileTest.root_dir = os.path.join(BuildFileTest.base_dir, 'root')

    BuildFileTest.touch('grandparent/parent/BUILD')
    BuildFileTest.touch('grandparent/parent/BUILD.twitter')
    # Tricky!  This is a directory
    BuildFileTest.makedirs('grandparent/parent/BUILD.dir')
    BuildFileTest.makedirs('grandparent/BUILD')
    BuildFileTest.touch('BUILD')
    BuildFileTest.touch('BUILD.twitter')
    BuildFileTest.touch('grandparent/parent/child1/BUILD')
    BuildFileTest.touch('grandparent/parent/child1/BUILD.twitter')
    BuildFileTest.touch('grandparent/parent/child2/child3/BUILD')
    BuildFileTest.makedirs('grandparent/parent/child2/BUILD')
    BuildFileTest.makedirs('grandparent/parent/child4')
  def setUp(self):
    self.base_dir = tempfile.mkdtemp()

    def generate_path(name):
      return os.path.join(self.base_dir, name)

    test_class_path = generate_path('com/twitter/Test.class')
    duplicate_class_path = generate_path('com/twitter/commons/Duplicate.class')
    unique_class_path = generate_path('org/apache/Unique.class')

    touch(test_class_path)
    touch(duplicate_class_path)
    touch(unique_class_path)

    def generate_jar(path, *class_name):
      with closing(ZipFile(generate_path(path), 'w')) as zipfile:
        for clazz in class_name:
          zipfile.write(clazz)
        return zipfile.filename

    @contextmanager
    def jars():
      test_jar = generate_jar('test.jar', test_class_path, duplicate_class_path)
      jar_with_duplicates = generate_jar('dups.jar', duplicate_class_path, unique_class_path)
      jar_without_duplicates = generate_jar('no_dups.jar', unique_class_path)

      jars = []
      jars.append(test_jar)
      jars.append(jar_with_duplicates)
      jars.append(jar_without_duplicates)
      yield jars

    with jars() as jars:
      self.path_with_duplicates = [jars[0], jars[1]]
      self.path_without_duplicates = [jars[0], jars[2]]
Exemple #26
0
def profile_classpath(profile, java_runner=None, config=None, ivy_jar=None, ivy_settings=None,
                      workunit_factory=None):
  # TODO(John Sirois): consider rework when ant backend is gone and there is no more need to share
  # path structure

  java_runner = java_runner or runjava_indivisible

  config = config or Config.load()

  profile_dir = config.get('ivy-profiles', 'workdir')
  profile_libdir = os.path.join(profile_dir, '%s.libs' % profile)
  profile_check = '%s.checked' % profile_libdir
  if not os.path.exists(profile_check):
    # TODO(John Sirois): refactor IvyResolve to share ivy invocation command line bits
    ivy_classpath = [ivy_jar] if ivy_jar else config.getlist('ivy', 'classpath')

    safe_mkdir(profile_libdir)
    ivy_settings = ivy_settings or config.get('ivy', 'ivy_settings')
    ivy_xml = os.path.join(profile_dir, '%s.ivy.xml' % profile)
    ivy_opts = [
      '-settings', ivy_settings,
      '-ivy', ivy_xml,

      # TODO(John Sirois): this pattern omits an [organisation]- prefix to satisfy IDEA jar naming
      # needs for scala - isolate this hack to idea.py where it belongs
      '-retrieve', '%s/[artifact]-[revision](-[classifier]).[ext]' % profile_libdir,

      '-sync',
      '-symlink',
      '-types', 'jar', 'bundle',
      '-confs', 'default'
    ]
    result = java_runner(classpath=ivy_classpath, main='org.apache.ivy.Main',
                         workunit_factory=workunit_factory,
                         workunit_name='%s:bootstrap' % profile, opts=ivy_opts)
    if result != 0:
      raise TaskError('Failed to load profile %s, ivy exit code %s' % (profile, str(result)))
    touch(profile_check)

  return [os.path.join(profile_libdir, jar) for jar in os.listdir(profile_libdir)]
Exemple #27
0
  def _bootstrap_ivy(self, bootstrap_jar_path):
    if not os.path.exists(bootstrap_jar_path):
      with temporary_file() as bootstrap_jar:
        fetcher = Fetcher()
        checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
        try:
          log.info('\nDownloading %s' % self._bootstrap_jar_url)
          # TODO: Capture the stdout of the fetcher, instead of letting it output
          # to the console directly.
          fetcher.download(self._bootstrap_jar_url,
                           listener=fetcher.ProgressListener().wrap(checksummer),
                           path_or_fd=bootstrap_jar,
                           timeout=self._timeout)
          log.info('sha1: %s' % checksummer.checksum)
          bootstrap_jar.close()
          touch(bootstrap_jar_path)
          shutil.move(bootstrap_jar.name, bootstrap_jar_path)
        except fetcher.Error as e:
          raise self.Error('Problem fetching the ivy bootstrap jar! %s' % e)

    return Ivy(bootstrap_jar_path,
               ivy_settings=self._ivy_settings,
               ivy_cache_dir=self.ivy_cache_dir)
  def setUp(self):
    self.base_dir = tempfile.mkdtemp()

    def generate_path(name):
      return os.path.join(self.base_dir, name)

    test_class_path = generate_path('com/twitter/Test.class')
    duplicate_class_path = generate_path('com/twitter/commons/Duplicate.class')
    unique_class_path = generate_path('org/apache/Unique.class')
    unicode_class_path = generate_path('cucumber/api/java/zh_cn/假如.class')

    touch(test_class_path)
    touch(duplicate_class_path)
    touch(unique_class_path)
    touch(unicode_class_path)

    def generate_jar(path, *class_name):
      with closing(ZipFile(generate_path(path), 'w')) as zipfile:
        for clazz in class_name:
          zipfile.write(clazz)
        return zipfile.filename

    @contextmanager
    def jars():
      test_jar = generate_jar('test.jar', test_class_path, duplicate_class_path)
      jar_with_duplicates = generate_jar('dups.jar', duplicate_class_path, unique_class_path)
      jar_without_duplicates = generate_jar('no_dups.jar', unique_class_path)
      jar_with_unicode = generate_jar('unicode_class.jar', unicode_class_path)

      yield test_jar, jar_with_duplicates, jar_without_duplicates, jar_with_unicode

    with jars() as jars:
      test_jar, jar_with_duplicates, jar_without_duplicates, jar_with_unicode = jars
      self.path_with_duplicates = {
          'com/twitter/Test.class': set([test_jar]),
          'com/twitter/commons/Duplicate.class': set([test_jar, jar_with_duplicates]),
          'org/apache/Unique.class': set([jar_with_duplicates]),
          'cucumber/api/java/zh_cn/假如.class' : set([jar_with_unicode]),
      }
      self.path_without_duplicates = {
          'com/twitter/Test.class': set([test_jar]),
          'com/twitter/commons/Duplicate.class': set([test_jar]),
          'org/apache/Unique.class': set([jar_without_duplicates]),
          'cucumber/api/java/zh_cn/假如.class' : set([jar_with_unicode]),
      }
Exemple #29
0
def test_garbage_collector(safe_rmtree, safe_delete):
    with temporary_dir() as sandbox, temporary_dir(
    ) as checkpoint_root, temporary_dir() as log_dir:

        path = TaskPath(root=checkpoint_root, task_id='test', log_dir=log_dir)

        touch(os.path.join(sandbox, 'test_file1'))
        touch(os.path.join(sandbox, 'test_file2'))
        safe_mkdir(
            os.path.dirname(path.given(state='finished').getpath('task_path')))
        safe_mkdir(os.path.dirname(path.getpath('runner_checkpoint')))
        touch(path.given(state='finished').getpath('task_path'))

        header = RunnerHeader(task_id='test', sandbox=sandbox, log_dir=log_dir)
        ckpt = TaskRunnerHelper.open_checkpoint(
            path.getpath('runner_checkpoint'))
        ckpt.write(RunnerCkpt(runner_header=header))
        ckpt.close()

        gc = TaskGarbageCollector(checkpoint_root, task_id='test')
        assert gc._state.header.log_dir == log_dir
        assert gc._state.header.sandbox == sandbox

        # erase metadata
        gc.erase_metadata()
        safe_delete.assert_has_calls([
            call(path.given(state='finished').getpath('task_path')),
            call(path.getpath('runner_checkpoint'))
        ],
                                     any_order=True)
        safe_rmtree.assert_has_calls([call(path.getpath('checkpoint_path'))])

        safe_delete.reset_mock()
        safe_rmtree.reset_mock()

        # erase logs
        gc.erase_logs()
        safe_rmtree.assert_has_calls([call(log_dir)])

        safe_delete.reset_mock()
        safe_rmtree.reset_mock()

        # erase sandbox
        gc.erase_data()

        safe_delete.assert_has_calls([
            call(os.path.join(sandbox, 'test_file1')),
            call(os.path.join(sandbox, 'test_file2'))
        ],
                                     any_order=True)
        safe_rmtree.assert_has_calls([call(sandbox)])
def test_garbage_collector(safe_rmtree, safe_delete):
  with temporary_dir() as sandbox, temporary_dir() as checkpoint_root, temporary_dir() as log_dir:

    path = TaskPath(root=checkpoint_root, task_id='test', log_dir=log_dir)

    touch(os.path.join(sandbox, 'test_file1'))
    touch(os.path.join(sandbox, 'test_file2'))
    safe_mkdir(os.path.dirname(path.given(state='finished').getpath('task_path')))
    safe_mkdir(os.path.dirname(path.getpath('runner_checkpoint')))
    touch(path.given(state='finished').getpath('task_path'))

    header = RunnerHeader(task_id='test', sandbox=sandbox, log_dir=log_dir)
    ckpt = TaskRunnerHelper.open_checkpoint(path.getpath('runner_checkpoint'))
    ckpt.write(RunnerCkpt(runner_header=header))
    ckpt.close()

    gc = TaskGarbageCollector(checkpoint_root, task_id='test')
    assert gc._state.header.log_dir == log_dir
    assert gc._state.header.sandbox == sandbox

    # erase metadata
    gc.erase_metadata()
    safe_delete.assert_has_calls([
        call(path.given(state='finished').getpath('task_path')),
        call(path.getpath('runner_checkpoint'))], any_order=True)
    safe_rmtree.assert_has_calls([call(path.getpath('checkpoint_path'))])

    safe_delete.reset_mock()
    safe_rmtree.reset_mock()

    # erase logs
    gc.erase_logs()
    safe_rmtree.assert_has_calls([call(log_dir)])

    safe_delete.reset_mock()
    safe_rmtree.reset_mock()

    # erase sandbox
    gc.erase_data()

    safe_delete.assert_has_calls([
        call(os.path.join(sandbox, 'test_file1')),
        call(os.path.join(sandbox, 'test_file2'))], any_order=True)
    safe_rmtree.assert_has_calls([call(sandbox)])
Exemple #31
0
    def setUp(self):
        self.base_dir = tempfile.mkdtemp()

        def generate_path(name):
            return os.path.join(self.base_dir, name)

        test_class_path = generate_path('com/twitter/Test.class')
        duplicate_class_path = generate_path(
            'com/twitter/commons/Duplicate.class')
        unique_class_path = generate_path('org/apache/Unique.class')

        touch(test_class_path)
        touch(duplicate_class_path)
        touch(unique_class_path)

        def generate_jar(path, *class_name):
            with closing(ZipFile(generate_path(path), 'w')) as zipfile:
                for clazz in class_name:
                    zipfile.write(clazz)
                return zipfile.filename

        @contextmanager
        def jars():
            test_jar = generate_jar('test.jar', test_class_path,
                                    duplicate_class_path)
            jar_with_duplicates = generate_jar('dups.jar',
                                               duplicate_class_path,
                                               unique_class_path)
            jar_without_duplicates = generate_jar('no_dups.jar',
                                                  unique_class_path)

            jars = []
            jars.append(test_jar)
            jars.append(jar_with_duplicates)
            jars.append(jar_without_duplicates)
            yield jars

        with jars() as jars:
            self.path_with_duplicates = [jars[0], jars[1]]
            self.path_without_duplicates = [jars[0], jars[2]]
Exemple #32
0
    def execute_single_compilation(self, versioned_targets, cp):
        compilation_id = Target.maybe_readable_identify(
            versioned_targets.targets)

        # TODO: Use the artifact cache. In flat mode we may want to look for the artifact for all targets,
        # not just the invalid ones, as it might be more likely to be present. Or we could look for both.

        if self._flatten:
            # If compiling in flat mode, we let all dependencies aggregate into a single well-known depfile. This
            # allows us to build different targets in different invocations without losing dependency information
            # from any of them.
            depfile = os.path.join(self._depfile_dir, 'dependencies.flat')
        else:
            # If not in flat mode, we let each compilation have its own depfile, to avoid quadratic behavior (each
            # compilation will read in the entire depfile, add its stuff to it and write it out again).
            depfile = os.path.join(self._depfile_dir,
                                   compilation_id) + '.dependencies'

        if not versioned_targets.valid:
            self.context.log.info('Compiling targets %s' %
                                  str(versioned_targets.targets))
            sources_by_target, processors, fingerprint = self.calculate_sources(
                versioned_targets.targets)
            if sources_by_target:
                sources = reduce(lambda all, sources: all.union(sources),
                                 sources_by_target.values())
                if not sources:
                    touch(
                        depfile
                    )  # Create an empty depfile, since downstream code may assume that one exists.
                    self.context.log.warn(
                        'Skipping java compile for targets with no sources:\n  %s'
                        %
                        '\n  '.join(str(t) for t in sources_by_target.keys()))
                else:
                    classpath = [
                        jar for conf, jar in cp if conf in self._confs
                    ]
                    result = self.compile(classpath, sources, fingerprint,
                                          depfile)
                    if result != 0:
                        default_message = 'Unexpected error - %s returned %d' % (
                            _JMAKE_MAIN, result)
                        raise TaskError(
                            _JMAKE_ERROR_CODES.get(result, default_message))

                if processors:
                    # Produce a monolithic apt processor service info file for further compilation rounds
                    # and the unit test classpath.
                    processor_info_file = os.path.join(self._classes_dir,
                                                       _PROCESSOR_INFO_FILE)
                    if os.path.exists(processor_info_file):
                        with safe_open(processor_info_file, 'r') as f:
                            for processor in f:
                                processors.add(processor.strip())
                    self.write_processor_info(processor_info_file, processors)

        # Read in the deps created either just now or by a previous compiler run on these targets.
        deps = Dependencies(self._classes_dir)
        deps.load(depfile)
        self._deps.merge(deps)
Exemple #33
0
def test_task_detector():
  with temporary_dir() as root:
    active_log_dir = os.path.join(root, 'active_log')
    finished_log_dir = os.path.join(root, 'finished_log')

    path = TaskPath(root=root)
    detector = TaskDetector(root)

    # test empty paths

    assert list(detector.get_task_ids(state='active')) == []
    assert list(detector.get_task_ids(state='finished')) == []
    assert set(detector.get_task_ids()) == set()

    assert detector.get_checkpoint(task_id='active_task') == path.given(
        task_id='active_task').getpath('runner_checkpoint')

    assert detector.get_checkpoint(task_id='finished_task') == path.given(
        task_id='finished_task').getpath('runner_checkpoint')

    assert set(detector.get_process_checkpoints('active_task')) == set()
    assert set(detector.get_process_checkpoints('finished_task')) == set()
    assert set(detector.get_process_runs('active_task', active_log_dir)) == set()
    assert set(detector.get_process_runs('finished_task', finished_log_dir)) == set()
    assert set(detector.get_process_logs('active_task', active_log_dir)) == set()
    assert set(detector.get_process_logs('finished_task', finished_log_dir)) == set()

    # create paths

    paths = [
        path.given(state='active', task_id='active_task').getpath('task_path'),
        path.given(state='finished', task_id='finished_task').getpath('task_path'),
        path.given(task_id='active_task').getpath('runner_checkpoint'),
        path.given(task_id='finished_task').getpath('runner_checkpoint'),
        path.given(
            task_id='active_task',
            process='hello_world',
            run='0',
            log_dir=active_log_dir
        ).with_filename('stdout').getpath('process_logdir'),
        path.given(
            task_id='finished_task',
            process='goodbye_world',
            run='1',
            log_dir=finished_log_dir
        ).with_filename('stderr').getpath('process_logdir'),
        path.given(task_id='active_task', process='hello_world').getpath('process_checkpoint'),
        path.given(task_id='finished_task', process='goodbye_world').getpath('process_checkpoint'),
    ]

    for p in paths:
      touch(p)

    detector = TaskDetector(root)

    assert list(detector.get_task_ids(state='active')) == list([('active', 'active_task')])
    assert list(detector.get_task_ids(state='finished')) == list([('finished', 'finished_task')])
    assert set(detector.get_task_ids()) == set(
        [('active', 'active_task'), ('finished', 'finished_task')])

    assert list(detector.get_process_checkpoints('active_task')) == [
        path.given(task_id='active_task', process='hello_world').getpath('process_checkpoint')]

    assert list(detector.get_process_checkpoints('finished_task')) == [
        path.given(task_id='finished_task', process='goodbye_world').getpath('process_checkpoint')]

    assert list(detector.get_process_runs('active_task', active_log_dir)) == [
        ('hello_world', 0)]
    assert list(detector.get_process_runs('finished_task', finished_log_dir)) == [
        ('goodbye_world', 1)]

    assert list(detector.get_process_logs('active_task', active_log_dir)) == [
        path.given(
            task_id='active_task',
            process='hello_world',
            run='0',
            log_dir=active_log_dir
        ).with_filename('stdout').getpath('process_logdir')]

    assert list(detector.get_process_logs('finished_task', finished_log_dir)) == [
        path.given(
            task_id='finished_task',
            process='goodbye_world',
            run='1',
            log_dir=finished_log_dir
        ).with_filename('stderr').getpath('process_logdir')]
Exemple #34
0
  def execute_single_compilation(self, versioned_target_set, cp, upstream_analysis_caches):
    """Execute a single compilation, updating upstream_analysis_caches if needed."""
    if self._flatten:
      compilation_id = 'flat'
      output_dir = self._flat_classes_dir
    else:
      compilation_id = Target.maybe_readable_identify(versioned_target_set.targets)
      # Each compilation must output to its own directory, so zinc can then associate those with the appropriate
      # analysis caches of previous compilations. We then copy the results out to the real output dir.
      output_dir = os.path.join(self._incremental_classes_dir, compilation_id)

    depfile = os.path.join(self._depfile_dir, compilation_id) + '.dependencies'
    analysis_cache = os.path.join(self._analysis_cache_dir, compilation_id) + '.analysis_cache'

    safe_mkdir(output_dir)

    if not versioned_target_set.valid:
      with self.check_artifact_cache(versioned_target_set,
                                     build_artifacts=[output_dir, depfile, analysis_cache],
                                     artifact_root=self._workdir) as needs_building:
        if needs_building:
          self.context.log.info('Compiling targets %s' % versioned_target_set.targets)
          sources_by_target = self.calculate_sources(versioned_target_set.targets)
          if sources_by_target:
            sources = reduce(lambda all, sources: all.union(sources), sources_by_target.values())
            if not sources:
              touch(depfile)  # Create an empty depfile, since downstream code may assume that one exists.
              self.context.log.warn('Skipping scala compile for targets with no sources:\n  %s' %
                                    '\n  '.join(str(t) for t in sources_by_target.keys()))
            else:
              classpath = [jar for conf, jar in cp if conf in self._confs]
              result = self.compile(classpath, sources, output_dir, analysis_cache, upstream_analysis_caches, depfile)
              if result != 0:
                raise TaskError('%s returned %d' % (self._main, result))

    # Note that the following post-processing steps must happen even for valid targets.

    # Read in the deps created either just now or by a previous compiler run on these targets.
    if self.context.products.isrequired('classes'):
      self.context.log.debug('Reading dependencies from ' + depfile)
      deps = Dependencies(output_dir)
      deps.load(depfile)

      genmap = self.context.products.get('classes')

      for target, classes_by_source in deps.findclasses(versioned_target_set.targets).items():
        for source, classes in classes_by_source.items():
          genmap.add(source, output_dir, classes)
          genmap.add(target, output_dir, classes)

      # TODO(John Sirois): Map target.resources in the same way
      # Create and Map scala plugin info files to the owning targets.
      for target in versioned_target_set.targets:
        if is_scalac_plugin(target) and target.classname:
          basedir = self.write_plugin_info(target)
          genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

    # Update the upstream analysis map.
    analysis_cache_parts = os.path.split(analysis_cache)
    if not upstream_analysis_caches.has(output_dir):
      # A previous chunk might have already updated this. It is certainly possible for a later chunk to
      # independently depend on some target that a previous chunk already built.
      upstream_analysis_caches.add(output_dir, analysis_cache_parts[0], [ analysis_cache_parts[1] ])

    # Update the classpath.
    with self.context.state('classpath', []) as cp:
      for conf in self._confs:
        cp.insert(0, (conf, output_dir))
Exemple #35
0
 def touch(cls, path):
   touch(os.path.join(BuildFileTest.root_dir, path))
Exemple #36
0
 def touch(cls, path):
     touch(os.path.join(BuildFileTest.root_dir, path))
Exemple #37
0
    def execute_single_compilation(self, versioned_target_set, cp,
                                   upstream_analysis_caches):
        """Execute a single compilation, updating upstream_analysis_caches if needed."""
        if self._flatten:
            compilation_id = 'flat'
            output_dir = self._flat_classes_dir
        else:
            compilation_id = Target.maybe_readable_identify(
                versioned_target_set.targets)
            # Each compilation must output to its own directory, so zinc can then associate those with the appropriate
            # analysis caches of previous compilations. We then copy the results out to the real output dir.
            output_dir = os.path.join(self._incremental_classes_dir,
                                      compilation_id)

        depfile = os.path.join(self._depfile_dir,
                               compilation_id) + '.dependencies'
        analysis_cache = os.path.join(self._analysis_cache_dir,
                                      compilation_id) + '.analysis_cache'

        safe_mkdir(output_dir)

        if not versioned_target_set.valid:
            with self.check_artifact_cache(
                    versioned_target_set,
                    build_artifacts=[output_dir, depfile,
                                     analysis_cache]) as in_cache:
                if not in_cache:
                    self.context.log.info('Compiling targets %s' %
                                          versioned_target_set.targets)
                    sources_by_target = self.calculate_sources(
                        versioned_target_set.targets)
                    if sources_by_target:
                        sources = reduce(
                            lambda all, sources: all.union(sources),
                            sources_by_target.values())
                        if not sources:
                            # Create empty files, since downstream code may assume that these exist.
                            touch(depfile)
                            touch(analysis_cache)
                            self.context.log.warn(
                                'Skipping scala compile for targets with no sources:\n  %s'
                                % '\n  '.join(
                                    str(t) for t in sources_by_target.keys()))
                        else:
                            classpath = [
                                jar for conf, jar in cp if conf in self._confs
                            ]
                            result = self.compile(classpath, sources,
                                                  output_dir, analysis_cache,
                                                  upstream_analysis_caches,
                                                  depfile)
                            if result != 0:
                                raise TaskError('%s returned %d' %
                                                (self._main, result))

        # Note that the following post-processing steps must happen even for valid targets.

        # Read in the deps created either just now or by a previous compiler run on these targets.
        if self.context.products.isrequired('classes'):
            self.context.log.debug('Reading dependencies from ' + depfile)
            deps = Dependencies(output_dir)
            deps.load(depfile)

            genmap = self.context.products.get('classes')

            for target, classes_by_source in deps.findclasses(
                    versioned_target_set.targets).items():
                for source, classes in classes_by_source.items():
                    genmap.add(source, output_dir, classes)
                    genmap.add(target, output_dir, classes)

            # TODO(John Sirois): Map target.resources in the same way
            # Create and Map scala plugin info files to the owning targets.
            for target in versioned_target_set.targets:
                if is_scalac_plugin(target) and target.classname:
                    basedir = self.write_plugin_info(target)
                    genmap.add(target, basedir, [_PLUGIN_INFO_FILE])

        # Update the upstream analysis map.
        analysis_cache_parts = os.path.split(analysis_cache)
        if not upstream_analysis_caches.has(output_dir):
            # A previous chunk might have already updated this. It is certainly possible for a later chunk to
            # independently depend on some target that a previous chunk already built.
            upstream_analysis_caches.add(output_dir, analysis_cache_parts[0],
                                         [analysis_cache_parts[1]])

        # Update the classpath.
        with self.context.state('classpath', []) as cp:
            for conf in self._confs:
                cp.insert(0, (conf, output_dir))