Exemplo n.º 1
0
    def __init__(self, package_name, package_version, input_dir_path,
                 artifact_paths):
        self._dry_run = os.environ.get("DRY_RUN", "")
        self._pkg_name = package_name
        self._pkg_version = package_version
        self._input_dir_path = input_dir_path

        self._universe_url_prefix = os.environ.get(
            "UNIVERSE_URL_PREFIX",
            "https://universe-converter.mesosphere.com/transform?url=")

        if not os.path.isdir(input_dir_path):
            raise Exception(
                "Provided package path is not a directory: {}".format(
                    input_dir_path))

        self._artifact_paths = []
        for artifact_path in artifact_paths:
            if not os.path.isfile(artifact_path):
                err = "Provided package path is not a file: {} (full list: {})".format(
                    artifact_path, artifact_paths)
                raise Exception(err)
            self._artifact_paths.append(artifact_path)

        s3_directory_url, self._http_directory_url = s3_urls_from_env(
            self._pkg_name)
        self._uploader = universe.S3Uploader(s3_directory_url, self._dry_run)
Exemplo n.º 2
0
    def __init__(self, package_name, package_version, input_dir_path, artifact_paths):
        self._dry_run = os.environ.get("DRY_RUN", "")
        self._pkg_name = package_name
        self._pkg_version = package_version
        self._artifact_paths = artifact_paths
        self._input_dir_path = input_dir_path
        self._directory_url = "."

        if not os.path.isdir(input_dir_path):
            raise Exception("Provided package path is not a directory: {}".format(input_dir_path))

        s3_bucket = os.environ.get("S3_BUCKET", "infinity-artifacts")
        logger.info("Using artifact bucket: {}".format(s3_bucket))
        self._s3_bucket = s3_bucket

        s3_dir_path = os.environ.get("S3_DIR_PATH")
        if not s3_dir_path:
            s3_dir_path = os.path.join(
                "autodelete7d",
                "{}-{}".format(
                    time.strftime("%Y%m%d-%H%M%S"),
                    "".join(
                        [random.choice(string.ascii_letters + string.digits) for _ in range(16)]
                    ),
                ),
            )

        # Sample S3 file url:
        # Dev : infinity-artifacts/autodelete7d/20160815-134747-S6vxd0gRQBw43NNy/kafka/stub-universe/kafka-stub-universe.dcos
        # Release : infinity-artifacts/permanent/kafka/1.2.3/kafka-1.2.3.dcos
        s3_directory_url = os.environ.get(
            "S3_URL",
            "s3://{}/{}/{}/{}".format(s3_bucket, s3_dir_path, package_name, package_version),
        )
        self._uploader = universe.S3Uploader(s3_directory_url, self._dry_run)
Exemplo n.º 3
0
    def __init__(self,
                 package_version,
                 stub_universe_url,
                 http_release_server=os.environ.get(
                     'HTTP_RELEASE_SERVER',
                     'https://downloads.mesosphere.com'),
                 s3_release_bucket=os.environ.get('S3_RELEASE_BUCKET',
                                                  'downloads.mesosphere.io'),
                 release_docker_image=os.environ.get('RELEASE_DOCKER_IMAGE'),
                 release_dir_path=os.environ.get('RELEASE_DIR_PATH', ''),
                 beta_release=os.environ.get('BETA', 'False'),
                 upgrades_from=os.environ.get('UPGRADES_FROM', '')):
        self._dry_run = os.environ.get('DRY_RUN', '')
        self._force_upload = os.environ.get('FORCE_ARTIFACT_UPLOAD',
                                            '').lower() == 'true'
        self._beta_release = beta_release.lower() == 'true'

        self._stub_universe_pkg_name = self.get_package_name(stub_universe_url)
        self._pkg_name = self.apply_beta_prefix(self._stub_universe_pkg_name,
                                                self._beta_release)
        self._pkg_version = self.apply_beta_version(package_version,
                                                    self._beta_release)

        # universe converter will return an HTTP 400 error because we aren't a DC/OS cluster. get the raw file instead.
        self._stub_universe_url = left_trim(stub_universe_url,
                                            universe_converter_url_prefix)

        if not release_dir_path:
            # determine release artifact directory based on (adjusted) package name
            # assets for beta-foo should always be uploaded to a 'foo' directory (with a '-beta' version)
            release_dir_path = left_trim(self._pkg_name, 'beta-') + '/assets'
            log.info("Uploading assets for %s to %s", self._pkg_name,
                     release_dir_path)

        s3_directory_url = 's3://{}/{}/{}'.format(s3_release_bucket,
                                                  release_dir_path,
                                                  self._pkg_version)
        self._uploader = universe.S3Uploader(s3_directory_url, self._dry_run)
        self._pkg_manager = universe.PackageManager()

        self._http_directory_url = '{}/{}/{}'.format(http_release_server,
                                                     release_dir_path,
                                                     self._pkg_version)

        self._release_docker_image = release_docker_image or None
        self._upgrades_from = list(
            filter(None, map(str.strip, upgrades_from.split(','))))

        log.info('''###
Source URL:      {}
Package name:    {}
Package version: {}
Artifact output: {}
Upgrades from:   {}
###'''.format(self._stub_universe_url, self._pkg_name, self._pkg_version,
              self._http_directory_url, self._upgrades_from))
Exemplo n.º 4
0
    def __init__(
            self,
            package_name,
            package_version,
            input_dir_path,
            artifact_paths):
        self._dry_run = os.environ.get('DRY_RUN', '')
        self._pkg_name = package_name
        self._pkg_version = package_version
        self._input_dir_path = input_dir_path

        self._universe_url_prefix = os.environ.get(
            'UNIVERSE_URL_PREFIX',
            'https://universe-converter.mesosphere.com/transform?url=')

        if not os.path.isdir(input_dir_path):
            raise Exception('Provided package path is not a directory: {}'.format(input_dir_path))

        self._artifact_paths = []
        for artifact_path in artifact_paths:
            if not os.path.isfile(artifact_path):
                err = 'Provided package path is not a file: {} (full list: {})'.format(artifact_path, artifact_paths)
                raise Exception(err)
            self._artifact_paths.append(artifact_path)

        s3_bucket = os.environ.get('S3_BUCKET')
        if not s3_bucket:
            s3_bucket = 'infinity-artifacts'
        logger.info('Using artifact bucket: {}'.format(s3_bucket))

        s3_dir_path = os.environ.get('S3_DIR_PATH', 'autodelete7d')
        dir_name = '{}-{}'.format(
            time.strftime("%Y%m%d-%H%M%S"),
            ''.join([random.SystemRandom().choice(string.ascii_letters + string.digits) for i in range(16)]))

        # sample s3_directory: 'infinity-artifacts/autodelete7d/kafka/20160815-134747-S6vxd0gRQBw43NNy'
        s3_directory_url = os.environ.get(
            'S3_URL',
            's3://{}/{}/{}/{}'.format(
                s3_bucket,
                s3_dir_path,
                package_name,
                dir_name))
        self._uploader = universe.S3Uploader(s3_directory_url, self._dry_run)

        self._http_directory_url = os.environ.get(
            'ARTIFACT_DIR',
            'https://{}.s3.amazonaws.com/{}/{}/{}'.format(
                s3_bucket,
                s3_dir_path,
                package_name,
                dir_name))
Exemplo n.º 5
0
    def __init__(self, package_name, package_version, input_dir_path, artifact_paths):
        self._dry_run = os.environ.get("DRY_RUN", "")
        self._pkg_name = package_name
        self._pkg_version = package_version
        self._artifact_paths = artifact_paths
        self._input_dir_path = input_dir_path
        self._directory_url = "."

        if not os.path.isdir(input_dir_path):
            raise Exception("Provided package path is not a directory: {}".format(input_dir_path))

        s3_bucket = os.environ.get("S3_BUCKET", "infinity-artifacts")
        logger.info("Using artifact bucket: {}".format(s3_bucket))
        self._s3_bucket = s3_bucket

        s3_directory_url, _ = s3_urls_from_env(package_name)
        self._uploader = universe.S3Uploader(s3_directory_url, self._dry_run)
Exemplo n.º 6
0
    def __init__(self,
                 package_version,
                 stub_universe_url,
                 http_release_server=os.environ.get(
                     'HTTP_RELEASE_SERVER',
                     'https://downloads.mesosphere.com'),
                 s3_release_bucket=os.environ.get('S3_RELEASE_BUCKET',
                                                  'downloads.mesosphere.io'),
                 release_docker_image=os.environ.get('RELEASE_DOCKER_IMAGE'),
                 release_dir_path=os.environ.get('RELEASE_DIR_PATH', ''),
                 beta_release=os.environ.get('BETA', 'False')):
        self._dry_run = os.environ.get('DRY_RUN', '')
        self._force_upload = os.environ.get('FORCE_ARTIFACT_UPLOAD',
                                            '').lower() == 'true'
        self._beta_release = beta_release.lower() == 'true'

        name_match = re.match('.+/stub-universe-(.+).(json)$',
                              stub_universe_url)
        if not name_match:
            raise Exception(
                'Unable to extract package name from stub universe URL. ' +
                'Expected filename of form "stub-universe-[pkgname].json"')

        self._stub_universe_pkg_name = name_match.group(1)
        # update package name to reflect beta status (e.g. release 'beta-foo' as non-beta 'foo'):
        if self._beta_release:
            if self._stub_universe_pkg_name.startswith('beta-'):
                self._pkg_name = self._stub_universe_pkg_name
            else:
                self._pkg_name = 'beta-' + self._stub_universe_pkg_name
        else:
            if self._stub_universe_pkg_name.startswith('beta-'):
                self._pkg_name = self._stub_universe_pkg_name[len('beta-'):]
            else:
                self._pkg_name = self._stub_universe_pkg_name

        # update package version to reflect beta status
        if self._beta_release:
            if package_version.endswith('-beta'):
                self._pkg_version = package_version
            else:
                # helpfully add a '-beta' since the user likely just forgot:
                self._pkg_version = package_version + '-beta'
        else:
            # complain if version has '-beta' suffix but BETA mode was disabled:
            if package_version.endswith('-beta'):
                raise Exception(
                    'Requested package version {} ends with "-beta", but BETA mode is disabled. '
                    'Either remove the "-beta" suffix, or enable BETA mode.'.
                    format(package_version))
            else:
                self._pkg_version = package_version

        if stub_universe_url.startswith(universe_converter_url_prefix):
            # universe converter will return an HTTP 400 error because we aren't a DC/OS cluster. get the raw file instead.
            self._stub_universe_url = stub_universe_url[
                len(universe_converter_url_prefix):]
        else:
            self._stub_universe_url = stub_universe_url

        if not release_dir_path:
            # determine release artifact directory based on (adjusted) package name
            artifact_package_name = self._pkg_name
            if artifact_package_name.startswith('beta-'):
                # assets for beta-foo should always be uploaded to a 'foo' directory (with a '-beta' version)
                artifact_package_name = artifact_package_name[len('beta-'):]
            release_dir_path = artifact_package_name + '/assets'

        s3_directory_url = 's3://{}/{}/{}'.format(s3_release_bucket,
                                                  release_dir_path,
                                                  self._pkg_version)
        self._uploader = universe.S3Uploader(self._pkg_name, s3_directory_url,
                                             self._dry_run)
        self._pkg_manager = universe.PackageManager()

        self._http_directory_url = '{}/{}/{}'.format(http_release_server,
                                                     release_dir_path,
                                                     self._pkg_version)

        self._release_docker_image = release_docker_image or None

        log.info('''###
Source URL:      {}
Package name:    {}
Package version: {}
Artifact output: {}
###'''.format(self._stub_universe_url, self._pkg_name, self._pkg_version,
              self._http_directory_url))