Ejemplo n.º 1
0
    def upload(self):
        with tempfile.TemporaryDirectory() as scratch:
            builder = universe.UniversePackageBuilder(
                universe.Package(self._pkg_name, self._pkg_version),
                universe.PackageManager(dry_run=self._dry_run),
                self._input_dir_path,
                self._directory_url,
                self._artifact_paths,
                self._dry_run,
            )
            for filename, content in builder.build_package_files().items():
                with open(os.path.join(scratch, filename), "w") as f:
                    f.write(content)

            for artifact in self._artifact_paths:
                filename = os.path.basename(artifact)
                shutil.copy2(src=artifact, dst=os.path.join(scratch, filename))

            bundle = migrate_and_build(scratch)
            self._uploader.upload(bundle)
            bundle_url_s3 = os.path.join(
                self._uploader.get_s3_directory(), os.path.basename(bundle)
            )
            bundle_url_http = bundle_url_s3.replace(
                "s3://{}".format(self._s3_bucket),
                "https://{}.s3.amazonaws.com".format(self._s3_bucket),
            )
            logger.info("---")
            logger.info("[S3 URL] DCOS BUNDLE: {}".format(bundle_url_s3))
            logger.info("DCOS BUNDLE: {}".format(bundle_url_http))
            logger.info("---")
Ejemplo n.º 2
0
    def upload(self):
        """Generates a container if not exists, then uploads artifacts and a new stub universe to that container"""
        version = Version(release_version=0, package_version=self._pkg_version)
        package_info = universe.Package(name=self._pkg_name, version=version)
        package_manager = universe.PackageManager(dry_run=self._dry_run)
        builder = universe.UniversePackageBuilder(
            package_info,
            package_manager,
            self._input_dir_path,
            "https://{}.blob.core.windows.net/{}".format(
                self._az_storage_account, self._az_container_name),
            self._artifact_paths,
            self._dry_run,
        )
        universe_path = builder.build_package()

        # upload universe package definition first and get its URL
        self._uploader.upload(
            universe_path,
            content_type="application/vnd.dcos.universe.repo+json;charset=utf-8"
        )

        # Get the stub-universe.json file URL from Azure CLI
        universe_url = subprocess.check_output(
            "az storage blob url -o tsv --account-name {} --container-name {} --name {}"\
                .format(self._az_storage_account,
                    self._az_container_name,
                    os.path.basename(universe_path))\
                .split()
        ).decode('ascii').rstrip()

        logger.info("Uploading {} artifacts:".format(len(
            self._artifact_paths)))

        logger.info("---")
        logger.info("STUB UNIVERSE: {}".format(universe_url))
        logger.info("---")

        for path in self._artifact_paths:
            self._uploader.upload(path)

        self._spam_universe_url(universe_url)

        logger.info("---")
        logger.info("(Re)install your package using the following commands:")
        logger.info("dcos package uninstall {}".format(self._pkg_name))
        logger.info("\n- - - -\nFor 1.9 or older clusters only")
        logger.info(
            "dcos node ssh --master-proxy --leader " +
            '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'
            .format(self._pkg_name))
        logger.info("- - - -\n")
        logger.info("dcos package repo remove {}-azure".format(self._pkg_name))
        logger.info("dcos package repo add --index=0 {}-azure '{}'".format(
            self._pkg_name, universe_url))
        logger.info("dcos package install --yes {}".format(self._pkg_name))

        return universe_url
Ejemplo n.º 3
0
    def __init__(self,
                 package_version,
                 stub_universe_url,
                 http_release_server=os.environ.get(
                     'HTTP_RELEASE_SERVER',
                     'https://downloads.mesosphere.com'),
                 s3_release_bucket=os.environ.get('S3_RELEASE_BUCKET',
                                                  'downloads.mesosphere.io'),
                 release_docker_image=os.environ.get('RELEASE_DOCKER_IMAGE'),
                 release_dir_path=os.environ.get('RELEASE_DIR_PATH', ''),
                 beta_release=os.environ.get('BETA', 'False'),
                 upgrades_from=os.environ.get('UPGRADES_FROM', '')):
        self._dry_run = os.environ.get('DRY_RUN', '')
        self._force_upload = os.environ.get('FORCE_ARTIFACT_UPLOAD',
                                            '').lower() == 'true'
        self._beta_release = beta_release.lower() == 'true'

        self._stub_universe_pkg_name = self.get_package_name(stub_universe_url)
        self._pkg_name = self.apply_beta_prefix(self._stub_universe_pkg_name,
                                                self._beta_release)
        self._pkg_version = self.apply_beta_version(package_version,
                                                    self._beta_release)

        # universe converter will return an HTTP 400 error because we aren't a DC/OS cluster. get the raw file instead.
        self._stub_universe_url = left_trim(stub_universe_url,
                                            universe_converter_url_prefix)

        if not release_dir_path:
            # determine release artifact directory based on (adjusted) package name
            # assets for beta-foo should always be uploaded to a 'foo' directory (with a '-beta' version)
            release_dir_path = left_trim(self._pkg_name, 'beta-') + '/assets'
            log.info("Uploading assets for %s to %s", self._pkg_name,
                     release_dir_path)

        s3_directory_url = 's3://{}/{}/{}'.format(s3_release_bucket,
                                                  release_dir_path,
                                                  self._pkg_version)
        self._uploader = universe.S3Uploader(s3_directory_url, self._dry_run)
        self._pkg_manager = universe.PackageManager()

        self._http_directory_url = '{}/{}/{}'.format(http_release_server,
                                                     release_dir_path,
                                                     self._pkg_version)

        self._release_docker_image = release_docker_image or None
        self._upgrades_from = list(
            filter(None, map(str.strip, upgrades_from.split(','))))

        log.info('''###
Source URL:      {}
Package name:    {}
Package version: {}
Artifact output: {}
Upgrades from:   {}
###'''.format(self._stub_universe_url, self._pkg_name, self._pkg_version,
              self._http_directory_url, self._upgrades_from))
Ejemplo n.º 4
0
    def upload(self):
        """generates a unique directory, then uploads artifacts and a new stub universe to that directory"""
        version = Version(release_version=0, package_version=self._pkg_version)
        package_info = universe.Package(name=self._pkg_name, version=version)
        package_manager = universe.PackageManager(dry_run=self._dry_run)
        builder = universe.UniversePackageBuilder(
            package_info,
            package_manager,
            self._input_dir_path,
            self._http_directory_url,
            self._artifact_paths,
            self._dry_run,
        )
        universe_path = builder.build_package()

        # upload universe package definition first and get its URL
        self._uploader.upload(
            universe_path, content_type="application/vnd.dcos.universe.repo+json;charset=utf-8"
        )
        universe_url = (
            self._universe_url_prefix
            + self._http_directory_url
            + "/"
            + os.path.basename(universe_path)
        )
        logger.info("---")
        logger.info("STUB UNIVERSE: {}".format(universe_url))
        logger.info("---")
        logger.info("Uploading {} artifacts:".format(len(self._artifact_paths)))

        for path in self._artifact_paths:
            self._uploader.upload(path)

        self._spam_universe_url(universe_url)

        # print to stdout, while the rest is all stderr:
        print(universe_url)

        logger.info("---")
        logger.info("(Re)install your package using the following commands:")
        logger.info("dcos package uninstall {}".format(self._pkg_name))
        logger.info("\n- - - -\nFor 1.9 or older clusters only")
        logger.info(
            "dcos node ssh --master-proxy --leader "
            + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'.format(
                self._pkg_name
            )
        )
        logger.info("- - - -\n")
        logger.info("dcos package repo remove {}-aws".format(self._pkg_name))
        logger.info(
            "dcos package repo add --index=0 {}-aws {}".format(self._pkg_name, universe_url)
        )
        logger.info("dcos package install --yes {}".format(self._pkg_name))

        return universe_url
Ejemplo n.º 5
0
    def upload(self):
        '''generates a unique directory, then uploads artifacts and a new stub universe to that directory'''
        package_info = universe.Package(self._pkg_name, self._pkg_version)
        package_manager = universe.PackageManager()
        builder = universe.UniversePackageBuilder(package_info,
                                                  package_manager,
                                                  self._input_dir_path,
                                                  self._http_directory,
                                                  self._artifact_paths)
        try:
            universe_path = builder.build_package()
        except Exception as e:
            err = 'Failed to create stub universe: {}'.format(str(e))
            self._github_updater.update('error', err)
            raise

        # upload universe package definition first and get its URL
        universe_url = self._universe_url_prefix + self._upload_artifact(
            universe_path,
            content_type='application/vnd.dcos.universe.repo+json;charset=utf-8'
        )
        logger.info('---')
        logger.info('STUB UNIVERSE: {}'.format(universe_url))
        logger.info('---')
        logger.info('Uploading {} artifacts:'.format(len(
            self._artifact_paths)))

        for path in self._artifact_paths:
            self._upload_artifact(path)

        self._spam_universe_url(universe_url)

        # print to stdout, while the rest is all stderr:
        print(universe_url)

        logger.info('---')
        logger.info('(Re)install your package using the following commands:')
        logger.info('dcos package uninstall {}'.format(self._pkg_name))
        logger.info('\n- - - -\nFor 1.9 or older clusters only')
        logger.info(
            'dcos node ssh --master-proxy --leader ' +
            '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'
            .format(self._pkg_name))
        logger.info('- - - -\n')
        logger.info('dcos package repo remove {}-aws'.format(self._pkg_name))
        logger.info('dcos package repo add --index=0 {}-aws {}'.format(
            self._pkg_name, universe_url))
        logger.info('dcos package install --yes {}'.format(self._pkg_name))

        return universe_url
Ejemplo n.º 6
0
def main(argv):
    if len(argv) < 3:
        print_help(argv)
        return 1
    # the package name:
    package_name = argv[1]
    # the package version:
    package_version = argv[2]
    # local path where the package template is located:
    package_dir_path = argv[3].rstrip('/')
    # artifact paths (to copy along with stub universe)
    artifact_paths = argv[4:]
    logger.info('''###
Package:         {}
Version:         {}
Template path:   {}
Artifacts:
{}
###'''.format(package_name, package_version, package_dir_path,
              '\n'.join(['- {}'.format(path) for path in artifact_paths])))

    publisher = FilesPublisher(package_name, package_version, package_dir_path,
                               artifact_paths)
    package_info = universe.Package(publisher._pkg_name,
                                    publisher._pkg_version)
    package_manager = universe.PackageManager()
    publisher._package_builder = universe.UniversePackageBuilder(
        package_info, package_manager, publisher._input_dir_path,
        publisher._http_url_root, publisher._artifact_paths)
    universe_url = publisher.build(publisher._http_url_root)

    logger.info('---')
    logger.info('(Re)install your package using the following commands:')
    logger.info('dcos package uninstall {}'.format(package_name))
    logger.info('\n- - - -\nFor 1.9 or older clusters only')
    logger.info(
        'dcos node ssh --master-proxy --leader ' +
        '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'
        .format(package_name))
    logger.info('- - - -\n')

    logger.info('dcos package repo remove {}-local'.format(package_name))
    logger.info('dcos package repo add --index=0 {}-local {}'.format(
        package_name, universe_url))

    logger.info('dcos package install --yes {}'.format(package_name))
    return 0
Ejemplo n.º 7
0
def main(argv):
    parser = argparse.ArgumentParser(description=DESCRIPTION_STRING,
                                     epilog=EPILOGUE_STRING)
    parser.add_argument('package_name', type=str, help='The package name')
    parser.add_argument('package_version',
                        type=str,
                        help='The package version string')
    parser.add_argument(
        'package_dir_path',
        type=str,
        help='The local path where the package template is located')
    parser.add_argument(
        'upload_dir_url',
        type=str,
        help='The URL of the directory where artifacts are located (S3, etc)')
    parser.add_argument('artifact_paths',
                        type=str,
                        nargs='+',
                        help='The artifact paths (for sha256 as needed)')

    args = parser.parse_args(argv)

    logger.info('''###
Package:         {} (version {})
Template path:   {}
Upload base dir: {}
Artifacts:       {}
###'''.format(args.package_name, args.package_version, args.package_dir_path,
              args.upload_dir_url, ','.join(args.artifact_paths)))

    package_info = universe.Package(args.package_name, args.package_version)
    package_manager = universe.PackageManager()
    builder = universe.UniversePackageBuilder(package_info, package_manager,
                                              args.package_dir_path,
                                              args.upload_dir_url,
                                              args.artifact_paths)
    package_path = builder.build_package()
    if not package_path:
        logger.error("Error building stub universe")
        return -1
    logger.info('---')
    logger.info('Built stub universe package:')
    # print the package location as stdout (the rest of the file is stderr):
    print(package_path)
    return 0
Ejemplo n.º 8
0
    def upload(self):
        '''generates a unique directory, then uploads artifacts and a new stub universe to that directory'''
        package_info = universe.Package(self._pkg_name, self._pkg_version)
        package_manager = universe.PackageManager(dry_run=self._dry_run)
        builder = universe.UniversePackageBuilder(
            package_info, package_manager,
            self._input_dir_path, self._http_directory_url, self._artifact_paths,
            self._dry_run)
        universe_path = builder.build_package()

        # upload universe package definition first and get its URL
        self._uploader.upload(
            universe_path,
            content_type='application/vnd.dcos.universe.repo+json;charset=utf-8')
        universe_url = self._universe_url_prefix + self._http_directory_url + '/' + os.path.basename(universe_path)
        logger.info('---')
        logger.info('STUB UNIVERSE: {}'.format(universe_url))
        logger.info('---')
        with open("stub-universe", "a") as stub_file:
                stub_file.write(universe_url)
        logger.info('Uploading {} artifacts:'.format(len(self._artifact_paths)))

        for path in self._artifact_paths:
            self._uploader.upload(path)

        self._spam_universe_url(universe_url)

        # print to stdout, while the rest is all stderr:
        print(universe_url)

        logger.info('---')
        logger.info('(Re)install your package using the following commands:')
        logger.info('dcos package uninstall {}'.format(self._pkg_name))
        logger.info('\n- - - -\nFor 1.9 or older clusters only')
        logger.info('dcos node ssh --master-proxy --leader ' +
                    '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'.format(self._pkg_name))
        logger.info('- - - -\n')
        logger.info('dcos package repo remove {}-aws'.format(self._pkg_name))
        logger.info('export STUB_UNIVERSE_URL=\'{}\''.format(universe_url))
        logger.info('dcos package repo add --index=0 {}-aws $STUB_UNIVERSE_URL'.format(self._pkg_name))
        logger.info('dcos package install --yes {}'.format(self._pkg_name))

        return universe_url
Ejemplo n.º 9
0
    def _update_package_json(self, pkgdir):
        '''Updates the package.json definition to contain the desired version string,
        and updates the package to reflect any beta or non-beta status as necessary.
        '''
        package_file_name = os.path.join(pkgdir, 'package.json')
        with open(package_file_name) as f:
            package_json = json.load(f,
                                     object_pairs_hook=collections.OrderedDict)
        orig_package_json = package_json.copy()

        # For beta releases, always clear 'selected'
        if self._beta_release:
            package_json['selected'] = False

        # Update package's name to reflect any changes due to BETA=on/off
        package_json['name'] = self._pkg_name
        # Update package's version to reflect the user's input
        package_json['version'] = self._pkg_version
        # Update package's upgradesFrom/downgradesTo to reflect any package name changes
        # due to enabling or disabling a beta bit.
        if self._stub_universe_pkg_name != self._pkg_name:
            last_release = universe.PackageManager().get_latest(self._pkg_name)
            if last_release is None:
                # nothing to upgrade from
                package_json['upgradesFrom'] = []
                package_json['downgradesTo'] = []
            else:
                package_json['upgradesFrom'] = [last_release.get_version()]
                package_json['downgradesTo'] = [last_release.get_version()]

        logger.info('Updated package.json:')
        logger.info('\n'.join(
            difflib.ndiff(
                json.dumps(orig_package_json, indent=4).split('\n'),
                json.dumps(package_json, indent=4).split('\n'))))

        # Update package.json with changes:
        with open(package_file_name, 'w') as f:
            json.dump(package_json, f, indent=4)
            f.write('\n')
Ejemplo n.º 10
0
    def launch_http(self):
        # kill any prior matching process
        procname = "publish_httpd_{}.py".format(self._pkg_name)
        try:
            subprocess.check_call("killall -9 {}".format(procname).split())
            logger.info("Killed previous HTTP process(es): {}".format(procname))
        except Exception:
            logger.info("No previous HTTP process found: {}".format(procname))

        if self._http_port == 0:
            # hack: grab/release a suitable ephemeral port and hope nobody steals it in the meantime
            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            sock.bind((self._http_host, 0))
            port = sock.getsockname()[1]
            sock.close()
        else:
            port = self._http_port

        http_url_root = "http://{}:{}".format(self._http_host, port)

        version = Version(release_version=0, package_version=self._pkg_version)
        package_info = universe.Package(name=self._pkg_name, version=version)
        package_manager = universe.PackageManager()
        self._package_builder = universe.UniversePackageBuilder(
            package_info, package_manager, self._input_dir_path, http_url_root, self._artifact_paths
        )

        # hack: write httpd script then run it directly
        httpd_py_content = """#!/usr/bin/env python3
import os, socketserver
from http.server import SimpleHTTPRequestHandler
rootdir = '{}'
host = '{}'
port = {}

class CustomTypeHandler(SimpleHTTPRequestHandler):
    def __init__(self, req, client_addr, server):
        SimpleHTTPRequestHandler.__init__(self, req, client_addr, server)
    def guess_type(self, path):
        if path.endswith('.json'):
            return 'application/vnd.dcos.universe.repo+json;charset=utf-8'
        return SimpleHTTPRequestHandler.guess_type(self, path)

os.chdir(rootdir)
httpd = socketserver.TCPServer((host, port), CustomTypeHandler)
print('Serving %s at http://%s:%s' % (rootdir, host, port))
httpd.serve_forever()
""".format(
            self._http_dir, self._http_host, port
        )

        httpd_py_path = os.path.join(self._http_dir, procname)
        if not os.path.isdir(self._http_dir):
            os.makedirs(self._http_dir)
        httpd_py_file = open(httpd_py_path, "w+")
        httpd_py_file.write(httpd_py_content)
        httpd_py_file.flush()
        httpd_py_file.close()

        os.chmod(httpd_py_path, 0o744)
        logger.info("Launching HTTPD: {}".format(httpd_py_path))
        subprocess.Popen([httpd_py_path, "2&1>", "/dev/null"])

        return http_url_root
Ejemplo n.º 11
0
    def __init__(self,
                 package_version,
                 stub_universe_url,
                 http_release_server=os.environ.get(
                     'HTTP_RELEASE_SERVER',
                     'https://downloads.mesosphere.com'),
                 s3_release_bucket=os.environ.get('S3_RELEASE_BUCKET',
                                                  'downloads.mesosphere.io'),
                 release_docker_image=os.environ.get('RELEASE_DOCKER_IMAGE'),
                 release_dir_path=os.environ.get('RELEASE_DIR_PATH', ''),
                 beta_release=os.environ.get('BETA', 'False')):
        self._dry_run = os.environ.get('DRY_RUN', '')
        self._force_upload = os.environ.get('FORCE_ARTIFACT_UPLOAD',
                                            '').lower() == 'true'
        self._beta_release = beta_release.lower() == 'true'

        name_match = re.match('.+/stub-universe-(.+).(json)$',
                              stub_universe_url)
        if not name_match:
            raise Exception(
                'Unable to extract package name from stub universe URL. ' +
                'Expected filename of form "stub-universe-[pkgname].json"')

        self._stub_universe_pkg_name = name_match.group(1)
        # update package name to reflect beta status (e.g. release 'beta-foo' as non-beta 'foo'):
        if self._beta_release:
            if self._stub_universe_pkg_name.startswith('beta-'):
                self._pkg_name = self._stub_universe_pkg_name
            else:
                self._pkg_name = 'beta-' + self._stub_universe_pkg_name
        else:
            if self._stub_universe_pkg_name.startswith('beta-'):
                self._pkg_name = self._stub_universe_pkg_name[len('beta-'):]
            else:
                self._pkg_name = self._stub_universe_pkg_name

        # update package version to reflect beta status
        if self._beta_release:
            if package_version.endswith('-beta'):
                self._pkg_version = package_version
            else:
                # helpfully add a '-beta' since the user likely just forgot:
                self._pkg_version = package_version + '-beta'
        else:
            # complain if version has '-beta' suffix but BETA mode was disabled:
            if package_version.endswith('-beta'):
                raise Exception(
                    'Requested package version {} ends with "-beta", but BETA mode is disabled. '
                    'Either remove the "-beta" suffix, or enable BETA mode.'.
                    format(package_version))
            else:
                self._pkg_version = package_version

        if stub_universe_url.startswith(universe_converter_url_prefix):
            # universe converter will return an HTTP 400 error because we aren't a DC/OS cluster. get the raw file instead.
            self._stub_universe_url = stub_universe_url[
                len(universe_converter_url_prefix):]
        else:
            self._stub_universe_url = stub_universe_url

        if not release_dir_path:
            # determine release artifact directory based on (adjusted) package name
            artifact_package_name = self._pkg_name
            if artifact_package_name.startswith('beta-'):
                # assets for beta-foo should always be uploaded to a 'foo' directory (with a '-beta' version)
                artifact_package_name = artifact_package_name[len('beta-'):]
            release_dir_path = artifact_package_name + '/assets'

        s3_directory_url = 's3://{}/{}/{}'.format(s3_release_bucket,
                                                  release_dir_path,
                                                  self._pkg_version)
        self._uploader = universe.S3Uploader(self._pkg_name, s3_directory_url,
                                             self._dry_run)
        self._pkg_manager = universe.PackageManager()

        self._http_directory_url = '{}/{}/{}'.format(http_release_server,
                                                     release_dir_path,
                                                     self._pkg_version)

        self._release_docker_image = release_docker_image or None

        log.info('''###
Source URL:      {}
Package name:    {}
Package version: {}
Artifact output: {}
###'''.format(self._stub_universe_url, self._pkg_name, self._pkg_version,
              self._http_directory_url))