Exemple #1
0
  def as_jar_with_version(self, target):
    """
      Given an internal target, return a JarDependency with the last published revision filled in.
    """
    jar_dep, db_get, _ = self._accessors_for_target(target)

    major = int(db_get('revision.major', '0'))
    minor = int(db_get('revision.minor', '0'))
    patch = int(db_get('revision.patch', '0'))
    snapshot = db_get('revision.snapshot', 'false').lower() == 'true'
    sha = db_get('revision.sha', None)
    fingerprint = db_get('revision.fingerprint', None)
    semver = Semver(major, minor, patch, snapshot=snapshot)
    jar_dep.rev = semver.version()
    return jar_dep, semver, sha, fingerprint
Exemple #2
0
  def as_jar_with_version(self, target):
    """
      Given an internal target, return a JarDependency with the last published revision filled in.
    """
    jar_dep, db_get, _ = self._accessors_for_target(target)

    major = int(db_get('revision.major', '0'))
    minor = int(db_get('revision.minor', '0'))
    patch = int(db_get('revision.patch', '0'))
    snapshot = db_get('revision.snapshot', 'false').lower() == 'true'
    sha = db_get('revision.sha', None)
    fingerprint = db_get('revision.fingerprint', None)
    semver = Semver(major, minor, patch, snapshot=snapshot)
    jar_dep.rev = semver.version()
    return jar_dep, semver, sha, fingerprint
Exemple #3
0
 def set_version(self, target, version, sha, fingerprint):
   version = version if isinstance(version, Semver) else Semver.parse(version)
   _, _, db_set = self._accessors_for_target(target)
   db_set('revision.major', version.major)
   db_set('revision.minor', version.minor)
   db_set('revision.patch', version.patch)
   db_set('revision.snapshot', str(version.snapshot).lower())
   db_set('revision.sha', sha)
   db_set('revision.fingerprint', fingerprint)
Exemple #4
0
 def set_version(self, target, version, sha, fingerprint):
   version = version if isinstance(version, Semver) else Semver.parse(version)
   _, _, db_set = self._accessors_for_target(target)
   db_set('revision.major', version.major)
   db_set('revision.minor', version.minor)
   db_set('revision.patch', version.patch)
   db_set('revision.snapshot', str(version.snapshot).lower())
   db_set('revision.sha', sha)
   db_set('revision.fingerprint', fingerprint)
Exemple #5
0
 def parse_override(override):
   try:
     coordinate, rev = override.split('=', 1)
     try:
       rev = Semver.parse(rev)
     except ValueError as e:
       raise TaskError('Invalid version %s: %s' % (rev, e))
     return parse_jarcoordinate(coordinate), rev
   except ValueError:
     raise TaskError('Invalid override: %s' % override)
Exemple #6
0
 def parse_override(override):
   try:
     coordinate, rev = override.split('=', 1)
     try:
       rev = Semver.parse(rev)
     except ValueError as e:
       raise TaskError('Invalid version %s: %s' % (rev, e))
     return parse_jarcoordinate(coordinate), rev
   except ValueError:
     raise TaskError('Invalid override: %s' % override)
Exemple #7
0
 def parse_override(override):
   try:
     coordinate, rev = override.split('=', 1)
     try:
       # overrides imply semantic versioning
       rev = Semver.parse(rev)
     except ValueError as e:
       raise TaskError('Invalid version {}: {}'.format(rev, e))
     return parse_jarcoordinate(coordinate), rev
   except ValueError:
     raise TaskError('Invalid override: {}'.format(override))
Exemple #8
0
 def parse_override(override):
   try:
     coordinate, rev = override.split('=', 1)
     try:
       # overrides imply semantic versioning
       rev = Semver.parse(rev)
     except ValueError as e:
       raise TaskError('Invalid version {}: {}'.format(rev, e))
     return parse_jarcoordinate(coordinate), rev
   except ValueError:
     raise TaskError('Invalid override: {}'.format(override))
Exemple #9
0
  def get_entry(self, target):
    """Given an internal target, return a PushDb.Entry, which might contain defaults."""
    db_get, _ = self._accessors_for_target(target)

    major = int(db_get('revision.major', '0'))
    minor = int(db_get('revision.minor', '0'))
    patch = int(db_get('revision.patch', '0'))
    snapshot = str(db_get('revision.snapshot', 'false')).lower() == 'true'
    named_version = db_get('revision.named_version', None)
    named_is_latest = str(db_get('revision.named_is_latest', 'false')).lower() == 'true'
    sha = db_get('revision.sha', None)
    fingerprint = db_get('revision.fingerprint', None)
    sem_ver = Semver(major, minor, patch, snapshot=snapshot)
    named_ver = Namedver(named_version) if named_version else None
    return self.Entry(sem_ver, named_ver, named_is_latest, sha, fingerprint)
Exemple #10
0
  def execute(self):
    self.check_clean_master(commit=(not self.dryrun and self.commit))

    exported_targets = self.exported_targets()
    self.check_targets(exported_targets)

    pushdbs = {}

    def get_db(tgt):
      # TODO(tdesai) Handle resource type in get_db.
      if tgt.provides is None:
        raise TaskError('trying to publish target %r which does not provide an artifact' % tgt)
      dbfile = tgt.provides.repo.push_db(tgt)
      result = pushdbs.get(dbfile)
      if not result:
        # Create an empty pushdb if no dbfile exists.
        if (os.path.exists(dbfile)):
          db = PushDb.load(dbfile)
        else:
          safe_mkdir(os.path.dirname(dbfile))
          db = PushDb()
        try:
          repo = self.repos[tgt.provides.repo.name]
        except KeyError:
          raise TaskError('Repository {0} has no entry in the --repos option.'.format(
            tgt.provides.repo.name))
        result = (db, dbfile, repo)
        pushdbs[dbfile] = result
      return result

    def get_pushdb(tgt):
      return get_db(tgt)[0]

    def fingerprint_internal(tgt):
      pushdb = get_pushdb(tgt)
      entry = pushdb.get_entry(tgt)
      return entry.fingerprint or '0.0.0'

    def stage_artifact(tgt, jar, version, changelog, confs=None, artifact_ext='', extra_confs=None):
      def path(name=None, suffix='', extension='jar'):
        return self.artifact_path(jar, version, name=name, suffix=suffix, extension=extension,
                                  artifact_ext=artifact_ext)

      with safe_open(path(suffix='-CHANGELOG', extension='txt'), 'wb') as changelog_file:
        changelog_file.write(changelog.encode('utf-8'))
      ivyxml = path(name='ivy', extension='xml')

      IvyWriter(get_pushdb).write(tgt, ivyxml, confs=confs, extra_confs=extra_confs)
      PomWriter(get_pushdb).write(tgt, path(extension='pom'))

      return ivyxml

    def stage_artifacts(tgt, jar, version, changelog):
      DEFAULT_IVY_TYPE = 'jar'
      DEFAULT_CLASSIFIER = ''
      DEFAULT_EXTENSION = 'jar'

      self._copy_artifact(tgt, jar, version, typename='jars')
      self.create_source_jar(tgt, jar, version)
      doc_jar = self.create_doc_jar(tgt, jar, version)

      confs = set(repo['confs'])
      extra_confs = []

      # Process any extra jars that might have been previously generated for this target, or a
      # target that it was derived from.
      for extra_product, extra_config in (self.get_options().publish_extras or {}).items():
        override_name = jar.name
        if 'override_name' in extra_config:
          # If the supplied string has a '{target_provides_name}' in it, replace it with the
          # current jar name. If not, the string will be taken verbatim.
          override_name = extra_config['override_name'].format(target_provides_name=jar.name)

        classifier = DEFAULT_CLASSIFIER
        suffix = ''
        ivy_type = DEFAULT_IVY_TYPE
        if 'classifier' in extra_config:
          classifier = extra_config['classifier']
          suffix = "-{0}".format(classifier)
          ivy_type = classifier

        extension = DEFAULT_EXTENSION
        if 'extension' in extra_config:
          extension = extra_config['extension']
          if ivy_type == DEFAULT_IVY_TYPE:
            ivy_type = extension

        # A lot of flexibility is allowed in naming the extra artifact. Because the name must be
        # unique, some extra logic is required to ensure that the user supplied at least one
        # non-default value (thus ensuring a uniquely-named artifact in the end).
        if override_name == jar.name and classifier == DEFAULT_CLASSIFIER and extension == DEFAULT_EXTENSION:
          raise TaskError("publish_extra for '{0}' most override one of name, classifier or "
                          "extension with a non-default value.".format(extra_product))

        ivy_tmpl_key = classifier or '%s-%s'.format(override_name, extension)

        # Build a list of targets to check. This list will consist of the current target, plus the
        # entire derived_from chain.
        target_list = [tgt]
        target = tgt
        while target.derived_from != target:
          target_list.append(target.derived_from)
          target = target.derived_from
        for cur_tgt in target_list:
          if self.context.products.get(extra_product).has(cur_tgt):
            self._copy_artifact(cur_tgt, jar, version, typename=extra_product,
                                suffix=suffix, extension=extension,
                                override_name=override_name)
            confs.add(ivy_tmpl_key)
            # Supply extra data about this jar into the Ivy template, so that Ivy will publish it
            # to the final destination.
            extra_confs.append({'name': override_name,
                                'type': ivy_type,
                                'conf': ivy_tmpl_key,
                                'classifier': classifier,
                                'ext': extension})

      confs.add(IvyWriter.SOURCES_CONFIG)
      # don't request docs unless they are available for all transitive targets
      # TODO: doc products should be checked by an independent jar'ing task, and
      # conditionally enabled; see https://github.com/pantsbuild/pants/issues/568
      if doc_jar and self._java_doc(tgt) and self._scala_doc(tgt):
        confs.add(IvyWriter.JAVADOC_CONFIG)
      return stage_artifact(tgt, jar, version, changelog, confs, extra_confs=extra_confs)

    if self.overrides:
      print('Publishing with revision overrides:\n  %s' % '\n  '.join(
        '%s=%s' % (coordinate(org, name), rev) for (org, name), rev in self.overrides.items()
      ))

    head_sha = self.scm.commit_id

    safe_rmtree(self.workdir)
    published = []
    skip = (self.restart_at is not None)
    for target in exported_targets:
      pushdb, dbfile, repo = get_db(target)
      oldentry = pushdb.get_entry(target)

      # the jar version is ignored here, since it is overridden below with the new entry
      jar, _, _ = target.get_artifact_info()
      published.append(jar)

      if skip and (jar.org, jar.name) == self.restart_at:
        skip = False

      # select the next version: either a named version, or semver via the pushdb/overrides
      if self.named_snapshot:
        newentry = oldentry.with_named_ver(self.named_snapshot)
      else:
        override = self.overrides.get((jar.org, jar.name))
        sem_ver = Semver.parse(override) if override else oldentry.sem_ver.bump()
        if self.local_snapshot:
          sem_ver = sem_ver.make_snapshot()

        if sem_ver <= oldentry.sem_ver:
          raise TaskError('Requested version %s must be greater than the current version %s' % (
            sem_ver, oldentry.sem_ver
          ))
        newentry = oldentry.with_sem_ver(sem_ver)

      newfingerprint = self.fingerprint(target, fingerprint_internal)
      newentry = newentry.with_sha_and_fingerprint(head_sha, newfingerprint)
      no_changes = newentry.fingerprint == oldentry.fingerprint

      if no_changes:
        changelog = 'No changes for {0} - forced push.\n'.format(pushdb_coordinate(jar, oldentry))
      else:
        changelog = self.changelog(target, oldentry.sha) or 'Direct dependencies changed.\n'

      if no_changes and not self.force:
        print('No changes for {0}'.format(pushdb_coordinate(jar, oldentry)))
        stage_artifacts(target, jar, oldentry.version().version(), changelog)
      elif skip:
        print('Skipping %s to resume at %s' % (
          jar_coordinate(jar, (newentry.version() if self.force else oldentry.version()).version()),
          coordinate(self.restart_at[0], self.restart_at[1])
        ))
        stage_artifacts(target, jar, oldentry.version().version(), changelog)
      else:
        if not self.dryrun:
          # Confirm push looks good
          if no_changes:
            print(changelog)
          else:
            # The changelog may contain non-ascii text, but the print function can, under certain
            # circumstances, incorrectly detect the output encoding to be ascii and thus blow up on
            # non-ascii changelog characters.  Here we explicitly control the encoding to avoid
            # the print function's mis-interpretation.
            # TODO(John Sirois): Consider introducing a pants/util `print_safe` helper for this.
            message = '\nChanges for {} since {} @ {}:\n\n{}\n'.format(
                coordinate(jar.org, jar.name), oldentry.version(), oldentry.sha, changelog)
            # The stdout encoding can be detected as None when running without a tty (common in
            # tests), in which case we want to force encoding with a unicode-supporting codec.
            encoding = sys.stdout.encoding or 'utf-8'
            sys.stdout.write(message.encode(encoding))
          if not self.confirm_push(coordinate(jar.org, jar.name), newentry.version()):
            raise TaskError('User aborted push')

        pushdb.set_entry(target, newentry)
        ivyxml = stage_artifacts(target, jar, newentry.version().version(), changelog)

        if self.dryrun:
          print('Skipping publish of {0} in test mode.'.format(pushdb_coordinate(jar, newentry)))
        else:
          self.publish(ivyxml, jar=jar, entry=newentry, repo=repo, published=published)

          if self.commit:
            org = jar.org
            name = jar.name
            rev = newentry.version().version()
            args = dict(
              org=org,
              name=name,
              rev=rev,
              coordinate=coordinate(org, name, rev),
              user=getpass.getuser(),
              cause='with forced revision' if (org, name) in self.overrides else '(autoinc)'
            )

            pushdb.dump(dbfile)
            self.commit_pushdb(coordinate(org, name, rev))
            scm_exception = None
            for attempt in range(self.get_options().scm_push_attempts):
              try:
                self.context.log.debug("Trying scm push")
                self.scm.push()
                break # success
              except Scm.RemoteException as scm_exception:
                self.context.log.debug("Scm push failed, trying to refresh")
                # This might fail in the event that there is a real conflict, throwing
                # a Scm.LocalException (in case of a rebase failure) or a Scm.RemoteException
                # in the case of a fetch failure.  We'll directly raise a local exception,
                # since we can't fix it by retrying, but if we do, we want to display the
                # remote exception that caused the refresh as well just in case the user cares.
                # Remote exceptions probably indicate network or configuration issues, so
                # we'll let them propagate
                try:
                  self.scm.refresh(leave_clean=True)
                except Scm.LocalException as local_exception:
                  exc = traceback.format_exc(scm_exception)
                  self.context.log.debug("SCM exception while pushing: %s" % exc)
                  raise local_exception

            else:
              raise scm_exception

            self.scm.tag('%(org)s-%(name)s-%(rev)s' % args,
                         message='Publish of %(coordinate)s initiated by %(user)s %(cause)s' % args)
Exemple #11
0
  def execute(self):
    self.check_clean_master(commit=(not self.dryrun and self.commit))

    exported_targets = self.exported_targets()
    self.check_targets(exported_targets)

    pushdbs = {}

    def get_db(tgt):
      # TODO(tdesai) Handle resource type in get_db.
      if tgt.provides is None:
        raise TaskError('trying to publish target %r which does not provide an artifact' % tgt)
      dbfile = tgt.provides.repo.push_db(tgt)
      result = pushdbs.get(dbfile)
      if not result:
        # Create an empty db file if none exists.
        touch(dbfile)

        db = PushDb.load(dbfile)
        repo = self.repos[tgt.provides.repo.name]
        result = (db, dbfile, repo)
        pushdbs[dbfile] = result
      return result

    def get_pushdb(tgt):
      return get_db(tgt)[0]

    def fingerprint_internal(tgt):
      pushdb, _, _ = get_db(tgt)
      entry = pushdb.get_entry(tgt)
      return entry.fingerprint or '0.0.0'

    def stage_artifact(tgt, jar, version, changelog, confs=None, artifact_ext='', extra_confs=None):
      def path(name=None, suffix='', extension='jar'):
        return self.artifact_path(jar, version, name=name, suffix=suffix, extension=extension,
                                  artifact_ext=artifact_ext)

      with safe_open(path(suffix='-CHANGELOG', extension='txt'), 'wb') as changelog_file:
        changelog_file.write(changelog.encode('utf-8'))
      ivyxml = path(name='ivy', extension='xml')

      IvyWriter(get_pushdb).write(tgt, ivyxml, confs=confs, extra_confs=extra_confs)
      PomWriter(get_pushdb).write(tgt, path(extension='pom'))

      return ivyxml

    def copy_artifact(tgt, jar, version, typename, suffix='', extension='jar', artifact_ext='',
                      override_name=None):
      genmap = self.context.products.get(typename)
      for basedir, jars in genmap.get(tgt).items():
        for artifact in jars:
          path = self.artifact_path(jar, version, name=override_name, suffix=suffix,
                                    extension=extension, artifact_ext=artifact_ext)
          safe_mkdir(os.path.dirname(path))
          shutil.copy(os.path.join(basedir, artifact), path)

    def stage_artifacts(tgt, jar, version, changelog):
      DEFAULT_IVY_TYPE = 'jar'
      DEFAULT_CLASSIFIER = ''
      DEFAULT_EXTENSION = 'jar'

      copy_artifact(tgt, jar, version, typename='jars')
      self.create_source_jar(tgt, jar, version)
      doc_jar = self.create_doc_jar(tgt, jar, version)

      confs = set(repo['confs'])
      extra_confs = []

      # Process any extra jars that might have been previously generated for this target, or a
      # target that it was derived from.
      publish_extras = self.context.config.getdict(self._CONFIG_SECTION, 'publish_extras') or {}
      for extra_product in publish_extras:
        extra_config = publish_extras[extra_product]

        override_name = jar.name
        if 'override_name' in extra_config:
          # If the supplied string has a '{target_provides_name}' in it, replace it with the
          # current jar name. If not, the string will be taken verbatim.
          override_name = extra_config['override_name'].format(target_provides_name=jar.name)

        classifier = DEFAULT_CLASSIFIER
        suffix = ''
        ivy_type = DEFAULT_IVY_TYPE
        if 'classifier' in extra_config:
          classifier = extra_config['classifier']
          suffix = "-{0}".format(classifier)
          ivy_type = classifier

        extension = DEFAULT_EXTENSION
        if 'extension' in extra_config:
          extension = extra_config['extension']
          if ivy_type == DEFAULT_IVY_TYPE:
            ivy_type = extension

        # A lot of flexibility is allowed in naming the extra artifact. Because the name must be
        # unique, some extra logic is required to ensure that the user supplied at least one
        # non-default value (thus ensuring a uniquely-named artifact in the end).
        if override_name == jar.name and classifier == DEFAULT_CLASSIFIER and extension == DEFAULT_EXTENSION:
          raise TaskError("publish_extra for '{0}' most override one of name, classifier or "
                          "extension with a non-default value.".format(extra_product))

        ivy_tmpl_key = "publish_extra-{0}{1}{2}".format(override_name, classifier, extension)

        # Build a list of targets to check. This list will consist of the current target, plus the
        # entire derived_from chain.
        target_list = [tgt]
        target = tgt
        while target.derived_from != target:
          target_list.append(target.derived_from)
          target = target.derived_from
        for cur_tgt in target_list:
          if self.context.products.get(extra_product).has(cur_tgt):
            copy_artifact(cur_tgt, jar, version, typename=extra_product,
                          suffix=suffix, extension=extension,
                          override_name=override_name)
            confs.add(ivy_tmpl_key)
            # Supply extra data about this jar into the Ivy template, so that Ivy will publish it
            # to the final destination.
            extra_confs.append({'name': override_name,
                                'type': ivy_type,
                                'conf': ivy_tmpl_key,
                                'classifier': classifier,
                                'ext': extension})

      confs.add(IvyWriter.SOURCES_CONFIG)
      # don't request docs unless they are available for all transitive targets
      # TODO: doc products should be checked by an independent jar'ing task, and
      # conditionally enabled; see https://github.com/pantsbuild/pants/issues/568
      if doc_jar and self._java_doc(tgt) and self._scala_doc(tgt):
        confs.add(IvyWriter.JAVADOC_CONFIG)
      return stage_artifact(tgt, jar, version, changelog, confs, extra_confs=extra_confs)

    if self.overrides:
      print('Publishing with revision overrides:\n  %s' % '\n  '.join(
        '%s=%s' % (coordinate(org, name), rev) for (org, name), rev in self.overrides.items()
      ))

    head_sha = self.scm.commit_id

    safe_rmtree(self.workdir)
    published = []
    skip = (self.restart_at is not None)
    for target in exported_targets:
      pushdb, dbfile, repo = get_db(target)
      oldentry = pushdb.get_entry(target)

      # the jar version is ignored here, since it is overridden below with the new entry
      jar, _, _ = target.get_artifact_info()
      published.append(jar)

      if skip and (jar.org, jar.name) == self.restart_at:
        skip = False

      # select the next version: either a named version, or semver via the pushdb/overrides
      if self.named_snapshot:
        newentry = oldentry.with_named_ver(self.named_snapshot)
      else:
        override = self.overrides.get((jar.org, jar.name))
        sem_ver = Semver.parse(override) if override else oldentry.sem_ver.bump()
        if self.local_snapshot:
          sem_ver = sem_ver.make_snapshot()

        if sem_ver <= oldentry.sem_ver:
          raise TaskError('Requested version %s must be greater than the current version %s' % (
            sem_ver, oldentry.sem_ver
          ))
        newentry = oldentry.with_sem_ver(sem_ver)

      newfingerprint = self.fingerprint(target, fingerprint_internal)
      newentry = newentry.with_sha_and_fingerprint(head_sha, newfingerprint)
      no_changes = newentry.fingerprint == oldentry.fingerprint

      if no_changes:
        changelog = 'No changes for {0} - forced push.\n'.format(pushdb_coordinate(jar, oldentry))
      else:
        changelog = self.changelog(target, oldentry.sha) or 'Direct dependencies changed.\n'

      if no_changes and not self.force:
        print('No changes for {0}'.format(pushdb_coordinate(jar, oldentry)))
        stage_artifacts(target, jar, (newentry.version() if self.force else oldentry.version()).version(), changelog)
      elif skip:
        print('Skipping %s to resume at %s' % (
          jar_coordinate(jar, (newentry.version() if self.force else oldentry.version()).version()),
          coordinate(self.restart_at[0], self.restart_at[1])
        ))
        stage_artifacts(target, jar, oldentry.version().version(), changelog)
      else:
        if not self.dryrun:
          # Confirm push looks good
          if no_changes:
            print(changelog)
          else:
            print('\nChanges for %s since %s @ %s:\n\n%s' % (
              coordinate(jar.org, jar.name), oldentry.version(), oldentry.sha, changelog
            ))
          if os.isatty(sys.stdin.fileno()):
            push = raw_input('Publish %s with revision %s ? [y|N] ' % (
              coordinate(jar.org, jar.name), newentry.version()
            ))
            print('\n')
            if push.strip().lower() != 'y':
              raise TaskError('User aborted push')

        pushdb.set_entry(target, newentry)
        ivyxml = stage_artifacts(target, jar, newentry.version().version(), changelog)

        if self.dryrun:
          print('Skipping publish of {0} in test mode.'.format(pushdb_coordinate(jar, newentry)))
        else:
          resolver = repo['resolver']
          path = repo.get('path')

          # Get authentication for the publish repo if needed
          jvm_args = self._jvmargs
          if repo.get('auth'):
            user = repo.get('username')
            password = repo.get('password')
            if user and password:
              jvm_args.append('-Dlogin=%s' % user)
              jvm_args.append('-Dpassword=%s' % password)
            else:
              raise TaskError('Unable to publish to %s. %s' %
                              (repo['resolver'], repo.get('help', '')))

          # Do the publish
          def publish(ivyxml_path):
            try:
              ivy = Bootstrapper.default_ivy()
            except Bootstrapper.Error as e:
              raise TaskError('Failed to push {0}! {1}'.format(pushdb_coordinate(jar, newentry), e))

            ivysettings = self.generate_ivysettings(ivy, published, publish_local=path)
            args = [
              '-settings', ivysettings,
              '-ivy', ivyxml_path,
              '-deliverto', '%s/[organisation]/[module]/ivy-[revision].xml' % self.workdir,
              '-publish', resolver,
              '-publishpattern', '%s/[organisation]/[module]/'
                                 '[artifact]-[revision](-[classifier]).[ext]' % self.workdir,
              '-revision', newentry.version().version(),
              '-m2compatible',
            ]

            if LogOptions.stderr_log_level() == logging.DEBUG:
              args.append('-verbose')

            if self.local_snapshot:
              args.append('-overwrite')

            try:
              ivy.execute(jvm_options=jvm_args, args=args,
                          workunit_factory=self.context.new_workunit, workunit_name='jar-publish')
            except Ivy.Error as e:
              raise TaskError('Failed to push {0}! {1}'.format(pushdb_coordinate(jar, newentry), e))

          publish(ivyxml)

          if self.commit:
            org = jar.org
            name = jar.name
            rev = newentry.version().version()
            args = dict(
              org=org,
              name=name,
              rev=rev,
              coordinate=coordinate(org, name, rev),
              user=getpass.getuser(),
              cause='with forced revision' if (org, name) in self.overrides else '(autoinc)'
            )

            pushdb.dump(dbfile)
            self.commit_push(coordinate(org, name, rev))
            self.scm.refresh()
            self.scm.tag('%(org)s-%(name)s-%(rev)s' % args,
                         message='Publish of %(coordinate)s initiated by %(user)s %(cause)s' % args)
Exemple #12
0
  def execute(self):
    self.check_clean_master(commit=(not self.dryrun and self.commit))

    exported_targets = self.exported_targets()
    self.check_targets(exported_targets)

    pushdbs = {}

    def get_db(tgt):
      # TODO(tdesai) Handle resource type in get_db.
      if tgt.provides is None:
        raise TaskError('trying to publish target %r which does not provide an artifact' % tgt)
      dbfile = tgt.provides.repo.push_db
      result = pushdbs.get(dbfile)
      if not result:
        db = PushDb.load(dbfile)
        repo = self.repos[tgt.provides.repo.name]
        result = (db, dbfile, repo)
        pushdbs[dbfile] = result
      return result

    def get_pushdb(tgt):
      return get_db(tgt)[0]

    def fingerprint_internal(tgt):
      pushdb, _, _ = get_db(tgt)
      entry = pushdb.get_entry(tgt)
      return entry.fingerprint or '0.0.0'

    def stage_artifact(tgt, jar, version, changelog, confs=None, artifact_ext=''):
      def path(name=None, suffix='', extension='jar'):
        return self.artifact_path(jar, version, name=name, suffix=suffix, extension=extension,
                                  artifact_ext=artifact_ext)

      with safe_open(path(suffix='-CHANGELOG', extension='txt'), 'w') as changelog_file:
        changelog_file.write(changelog)
      ivyxml = path(name='ivy', extension='xml')

      IvyWriter(get_pushdb).write(tgt, ivyxml, confs=confs)
      PomWriter(get_pushdb).write(tgt, path(extension='pom'))

      return ivyxml

    def copy_artifact(tgt, jar, version, typename, suffix='', artifact_ext=''):
      genmap = self.context.products.get(typename)
      for basedir, jars in genmap.get(tgt).items():
        for artifact in jars:
          path = self.artifact_path(jar, version, suffix=suffix, artifact_ext=artifact_ext)
          safe_mkdir(os.path.dirname(path))
          shutil.copy(os.path.join(basedir, artifact), path)

    def stage_artifacts(tgt, jar, version, changelog):
      copy_artifact(tgt, jar, version, typename='jars')
      self.create_source_jar(tgt, jar, version)
      doc_jar = self.create_doc_jar(tgt, jar, version)

      confs = set(repo['confs'])
      confs.add(IvyWriter.SOURCES_CONFIG)
      if doc_jar:
        confs.add(IvyWriter.JAVADOC_CONFIG)
      return stage_artifact(tgt, jar, version, changelog, confs)

    if self.overrides:
      print('Publishing with revision overrides:\n  %s' % '\n  '.join(
        '%s=%s' % (coordinate(org, name), rev) for (org, name), rev in self.overrides.items()
      ))

    head_sha = self.scm.commit_id

    safe_rmtree(self.workdir)
    published = []
    skip = (self.restart_at is not None)
    for target in exported_targets:
      pushdb, dbfile, repo = get_db(target)
      oldentry = pushdb.get_entry(target)

      # the jar version is ignored here, since it is overridden below with the new entry
      jar, _, _ = target.get_artifact_info()
      published.append(jar)

      if skip and (jar.org, jar.name) == self.restart_at:
        skip = False

      # select the next version: either a named version, or semver via the pushdb/overrides
      if self.named_snapshot:
        newentry = oldentry.with_named_ver(self.named_snapshot)
      else:
        override = self.overrides.get((jar.org, jar.name))
        sem_ver = Semver.parse(override) if override else oldentry.sem_ver.bump()
        if self.local_snapshot:
          sem_ver = sem_ver.make_snapshot()

        if sem_ver <= oldentry.sem_ver:
          raise TaskError('Requested version %s must be greater than the current version %s' % (
            sem_ver, oldentry.sem_ver
          ))
        newentry = oldentry.with_sem_ver(sem_ver)

      newfingerprint = self.fingerprint(target, fingerprint_internal)
      newentry = newentry.with_sha_and_fingerprint(head_sha, newfingerprint)
      no_changes = newentry.fingerprint == oldentry.fingerprint

      if no_changes:
        changelog = 'No changes for %s - forced push.\n' % jar_coordinate(jar, oldentry.version())
      else:
        changelog = self.changelog(target, oldentry.sha) or 'Direct dependencies changed.\n'

      if no_changes and not self.force:
        print('No changes for %s' % jar_coordinate(jar, oldentry.version()))
        stage_artifacts(target, jar, (newentry.version() if self.force else oldentry.version()).version(), changelog)
      elif skip:
        print('Skipping %s to resume at %s' % (
          jar_coordinate(jar, (newentry.version() if self.force else oldentry.version()).version()),
          coordinate(self.restart_at[0], self.restart_at[1])
        ))
        stage_artifacts(target, jar, oldver.version(), changelog)
      else:
        if not self.dryrun:
          # Confirm push looks good
          if no_changes:
            print(changelog)
          else:
            print('\nChanges for %s since %s @ %s:\n\n%s' % (
              coordinate(jar.org, jar.name), oldentry.version(), oldentry.sha, changelog
            ))
          if os.isatty(sys.stdin.fileno()):
            push = raw_input('Publish %s with revision %s ? [y|N] ' % (
              coordinate(jar.org, jar.name), newentry.version()
            ))
            print('\n')
            if push.strip().lower() != 'y':
              raise TaskError('User aborted push')

        pushdb.set_entry(target, newentry)
        ivyxml = stage_artifacts(target, jar, newentry.version().version(), changelog)

        if self.dryrun:
          print('Skipping publish of %s in test mode.' % jar_coordinate(jar, newentry.version()))
        else:
          resolver = repo['resolver']
          path = repo.get('path')

          # Get authentication for the publish repo if needed
          jvm_args = self._jvmargs
          if repo.get('auth'):
            user = repo.get('username')
            password = repo.get('password')
            if user and password:
              jvm_args.append('-Dlogin=%s' % user)
              jvm_args.append('-Dpassword=%s' % password)
            else:
              raise TaskError('Unable to publish to %s. %s' %
                              (repo['resolver'], repo.get('help', '')))

          # Do the publish
          def publish(ivyxml_path):
            try:
              ivy = Bootstrapper.default_ivy()
            except Bootstrapper.Error as e:
              raise TaskError('Failed to push %s! %s' % (jar_coordinate(jar, newentry.version()), e))

            ivysettings = self.generate_ivysettings(ivy, published, publish_local=path)
            args = [
              '-settings', ivysettings,
              '-ivy', ivyxml_path,
              '-deliverto', '%s/[organisation]/[module]/ivy-[revision].xml' % self.workdir,
              '-publish', resolver,
              '-publishpattern', '%s/[organisation]/[module]/'
                                 '[artifact]-[revision](-[classifier]).[ext]' % self.workdir,
              '-revision', newentry.version().version(),
              '-m2compatible',
            ]

            if LogOptions.stderr_log_level() == logging.DEBUG:
              args.append('-verbose')

            if self.local_snapshot:
              args.append('-overwrite')

            try:
              ivy.execute(jvm_options=jvm_args, args=args,
                          workunit_factory=self.context.new_workunit, workunit_name='jar-publish')
            except Ivy.Error as e:
              raise TaskError('Failed to push %s! %s' % (jar_coordinate(jar, newentry.version()), e))

          publish(ivyxml)

          if self.commit:
            org = jar.org
            name = jar.name
            rev = newentry.version().version()
            args = dict(
              org=org,
              name=name,
              rev=rev,
              coordinate=coordinate(org, name, rev),
              user=getpass.getuser(),
              cause='with forced revision' if (org, name) in self.overrides else '(autoinc)'
            )

            pushdb.dump(dbfile)
            self.commit_push(coordinate(org, name, rev))
            self.scm.refresh()
            self.scm.tag('%(org)s-%(name)s-%(rev)s' % args,
                         message='Publish of %(coordinate)s initiated by %(user)s %(cause)s' % args)
Exemple #13
0
    def execute(self):
        self.check_clean_master(commit=(not self.dryrun and self.commit))

        exported_targets = self.exported_targets()
        self.check_targets(exported_targets)

        pushdbs = {}

        def get_db(tgt):
            # TODO(tdesai) Handle resource type in get_db.
            if tgt.provides is None:
                raise TaskError(
                    'trying to publish target %r which does not provide an artifact'
                    % tgt)
            dbfile = tgt.provides.repo.push_db(tgt)
            result = pushdbs.get(dbfile)
            if not result:
                # Create an empty db file if none exists.
                touch(dbfile)

                db = PushDb.load(dbfile)
                repo = self.repos[tgt.provides.repo.name]
                result = (db, dbfile, repo)
                pushdbs[dbfile] = result
            return result

        def get_pushdb(tgt):
            return get_db(tgt)[0]

        def fingerprint_internal(tgt):
            pushdb, _, _ = get_db(tgt)
            entry = pushdb.get_entry(tgt)
            return entry.fingerprint or '0.0.0'

        def stage_artifact(tgt,
                           jar,
                           version,
                           changelog,
                           confs=None,
                           artifact_ext=''):
            def path(name=None, suffix='', extension='jar'):
                return self.artifact_path(jar,
                                          version,
                                          name=name,
                                          suffix=suffix,
                                          extension=extension,
                                          artifact_ext=artifact_ext)

            with safe_open(path(suffix='-CHANGELOG', extension='txt'),
                           'w') as changelog_file:
                changelog_file.write(changelog)
            ivyxml = path(name='ivy', extension='xml')

            IvyWriter(get_pushdb).write(tgt, ivyxml, confs=confs)
            PomWriter(get_pushdb).write(tgt, path(extension='pom'))

            return ivyxml

        def copy_artifact(tgt,
                          jar,
                          version,
                          typename,
                          suffix='',
                          artifact_ext=''):
            genmap = self.context.products.get(typename)
            for basedir, jars in genmap.get(tgt).items():
                for artifact in jars:
                    path = self.artifact_path(jar,
                                              version,
                                              suffix=suffix,
                                              artifact_ext=artifact_ext)
                    safe_mkdir(os.path.dirname(path))
                    shutil.copy(os.path.join(basedir, artifact), path)

        def stage_artifacts(tgt, jar, version, changelog):
            copy_artifact(tgt, jar, version, typename='jars')
            self.create_source_jar(tgt, jar, version)
            doc_jar = self.create_doc_jar(tgt, jar, version)

            confs = set(repo['confs'])
            confs.add(IvyWriter.SOURCES_CONFIG)
            # don't request docs unless they are available for all transitive targets
            # TODO: doc products should be checked by an independent jar'ing task, and
            # conditionally enabled; see https://github.com/pantsbuild/pants/issues/568
            if doc_jar and self._java_doc(target) and self._scala_doc(target):
                confs.add(IvyWriter.JAVADOC_CONFIG)
            return stage_artifact(tgt, jar, version, changelog, confs)

        if self.overrides:
            print('Publishing with revision overrides:\n  %s' %
                  '\n  '.join('%s=%s' % (coordinate(org, name), rev)
                              for (org, name), rev in self.overrides.items()))

        head_sha = self.scm.commit_id

        safe_rmtree(self.workdir)
        published = []
        skip = (self.restart_at is not None)
        for target in exported_targets:
            pushdb, dbfile, repo = get_db(target)
            oldentry = pushdb.get_entry(target)

            # the jar version is ignored here, since it is overridden below with the new entry
            jar, _, _ = target.get_artifact_info()
            published.append(jar)

            if skip and (jar.org, jar.name) == self.restart_at:
                skip = False

            # select the next version: either a named version, or semver via the pushdb/overrides
            if self.named_snapshot:
                newentry = oldentry.with_named_ver(self.named_snapshot)
            else:
                override = self.overrides.get((jar.org, jar.name))
                sem_ver = Semver.parse(
                    override) if override else oldentry.sem_ver.bump()
                if self.local_snapshot:
                    sem_ver = sem_ver.make_snapshot()

                if sem_ver <= oldentry.sem_ver:
                    raise TaskError(
                        'Requested version %s must be greater than the current version %s'
                        % (sem_ver, oldentry.sem_ver))
                newentry = oldentry.with_sem_ver(sem_ver)

            newfingerprint = self.fingerprint(target, fingerprint_internal)
            newentry = newentry.with_sha_and_fingerprint(
                head_sha, newfingerprint)
            no_changes = newentry.fingerprint == oldentry.fingerprint

            if no_changes:
                changelog = 'No changes for {0} - forced push.\n'.format(
                    pushdb_coordinate(jar, oldentry))
            else:
                changelog = self.changelog(
                    target, oldentry.sha) or 'Direct dependencies changed.\n'

            if no_changes and not self.force:
                print('No changes for {0}'.format(
                    pushdb_coordinate(jar, oldentry)))
                stage_artifacts(target, jar,
                                (newentry.version() if self.force else
                                 oldentry.version()).version(), changelog)
            elif skip:
                print('Skipping %s to resume at %s' %
                      (jar_coordinate(jar,
                                      (newentry.version() if self.force else
                                       oldentry.version()).version()),
                       coordinate(self.restart_at[0], self.restart_at[1])))
                stage_artifacts(target, jar, oldver.version(), changelog)
            else:
                if not self.dryrun:
                    # Confirm push looks good
                    if no_changes:
                        print(changelog)
                    else:
                        print('\nChanges for %s since %s @ %s:\n\n%s' %
                              (coordinate(jar.org, jar.name),
                               oldentry.version(), oldentry.sha, changelog))
                    if os.isatty(sys.stdin.fileno()):
                        push = raw_input(
                            'Publish %s with revision %s ? [y|N] ' %
                            (coordinate(jar.org,
                                        jar.name), newentry.version()))
                        print('\n')
                        if push.strip().lower() != 'y':
                            raise TaskError('User aborted push')

                pushdb.set_entry(target, newentry)
                ivyxml = stage_artifacts(target, jar,
                                         newentry.version().version(),
                                         changelog)

                if self.dryrun:
                    print('Skipping publish of {0} in test mode.'.format(
                        pushdb_coordinate(jar, newentry)))
                else:
                    resolver = repo['resolver']
                    path = repo.get('path')

                    # Get authentication for the publish repo if needed
                    jvm_args = self._jvmargs
                    if repo.get('auth'):
                        user = repo.get('username')
                        password = repo.get('password')
                        if user and password:
                            jvm_args.append('-Dlogin=%s' % user)
                            jvm_args.append('-Dpassword=%s' % password)
                        else:
                            raise TaskError(
                                'Unable to publish to %s. %s' %
                                (repo['resolver'], repo.get('help', '')))

                    # Do the publish
                    def publish(ivyxml_path):
                        try:
                            ivy = Bootstrapper.default_ivy()
                        except Bootstrapper.Error as e:
                            raise TaskError('Failed to push {0}! {1}'.format(
                                pushdb_coordinate(jar, newentry), e))

                        ivysettings = self.generate_ivysettings(
                            ivy, published, publish_local=path)
                        args = [
                            '-settings',
                            ivysettings,
                            '-ivy',
                            ivyxml_path,
                            '-deliverto',
                            '%s/[organisation]/[module]/ivy-[revision].xml' %
                            self.workdir,
                            '-publish',
                            resolver,
                            '-publishpattern',
                            '%s/[organisation]/[module]/'
                            '[artifact]-[revision](-[classifier]).[ext]' %
                            self.workdir,
                            '-revision',
                            newentry.version().version(),
                            '-m2compatible',
                        ]

                        if LogOptions.stderr_log_level() == logging.DEBUG:
                            args.append('-verbose')

                        if self.local_snapshot:
                            args.append('-overwrite')

                        try:
                            ivy.execute(
                                jvm_options=jvm_args,
                                args=args,
                                workunit_factory=self.context.new_workunit,
                                workunit_name='jar-publish')
                        except Ivy.Error as e:
                            raise TaskError('Failed to push {0}! {1}'.format(
                                pushdb_coordinate(jar, newentry), e))

                    publish(ivyxml)

                    if self.commit:
                        org = jar.org
                        name = jar.name
                        rev = newentry.version().version()
                        args = dict(org=org,
                                    name=name,
                                    rev=rev,
                                    coordinate=coordinate(org, name, rev),
                                    user=getpass.getuser(),
                                    cause='with forced revision' if
                                    (org,
                                     name) in self.overrides else '(autoinc)')

                        pushdb.dump(dbfile)
                        self.commit_push(coordinate(org, name, rev))
                        self.scm.refresh()
                        self.scm.tag(
                            '%(org)s-%(name)s-%(rev)s' % args,
                            message=
                            'Publish of %(coordinate)s initiated by %(user)s %(cause)s'
                            % args)