def upload(self): with tempfile.TemporaryDirectory() as scratch: builder = universe.UniversePackageBuilder( universe.Package(self._pkg_name, self._pkg_version), universe.PackageManager(dry_run=self._dry_run), self._input_dir_path, self._directory_url, self._artifact_paths, self._dry_run, ) for filename, content in builder.build_package_files().items(): with open(os.path.join(scratch, filename), "w") as f: f.write(content) for artifact in self._artifact_paths: filename = os.path.basename(artifact) shutil.copy2(src=artifact, dst=os.path.join(scratch, filename)) bundle = migrate_and_build(scratch) self._uploader.upload(bundle) bundle_url_s3 = os.path.join( self._uploader.get_s3_directory(), os.path.basename(bundle) ) bundle_url_http = bundle_url_s3.replace( "s3://{}".format(self._s3_bucket), "https://{}.s3.amazonaws.com".format(self._s3_bucket), ) logger.info("---") logger.info("[S3 URL] DCOS BUNDLE: {}".format(bundle_url_s3)) logger.info("DCOS BUNDLE: {}".format(bundle_url_http)) logger.info("---")
def upload(self): """Generates a container if not exists, then uploads artifacts and a new stub universe to that container""" version = Version(release_version=0, package_version=self._pkg_version) package_info = universe.Package(name=self._pkg_name, version=version) package_manager = universe.PackageManager(dry_run=self._dry_run) builder = universe.UniversePackageBuilder( package_info, package_manager, self._input_dir_path, "https://{}.blob.core.windows.net/{}".format( self._az_storage_account, self._az_container_name), self._artifact_paths, self._dry_run, ) universe_path = builder.build_package() # upload universe package definition first and get its URL self._uploader.upload( universe_path, content_type="application/vnd.dcos.universe.repo+json;charset=utf-8" ) # Get the stub-universe.json file URL from Azure CLI universe_url = subprocess.check_output( "az storage blob url -o tsv --account-name {} --container-name {} --name {}"\ .format(self._az_storage_account, self._az_container_name, os.path.basename(universe_path))\ .split() ).decode('ascii').rstrip() logger.info("Uploading {} artifacts:".format(len( self._artifact_paths))) logger.info("---") logger.info("STUB UNIVERSE: {}".format(universe_url)) logger.info("---") for path in self._artifact_paths: self._uploader.upload(path) self._spam_universe_url(universe_url) logger.info("---") logger.info("(Re)install your package using the following commands:") logger.info("dcos package uninstall {}".format(self._pkg_name)) logger.info("\n- - - -\nFor 1.9 or older clusters only") logger.info( "dcos node ssh --master-proxy --leader " + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"' .format(self._pkg_name)) logger.info("- - - -\n") logger.info("dcos package repo remove {}-azure".format(self._pkg_name)) logger.info("dcos package repo add --index=0 {}-azure '{}'".format( self._pkg_name, universe_url)) logger.info("dcos package install --yes {}".format(self._pkg_name)) return universe_url
def upload(self): """generates a unique directory, then uploads artifacts and a new stub universe to that directory""" version = Version(release_version=0, package_version=self._pkg_version) package_info = universe.Package(name=self._pkg_name, version=version) package_manager = universe.PackageManager(dry_run=self._dry_run) builder = universe.UniversePackageBuilder( package_info, package_manager, self._input_dir_path, self._http_directory_url, self._artifact_paths, self._dry_run, ) universe_path = builder.build_package() # upload universe package definition first and get its URL self._uploader.upload( universe_path, content_type="application/vnd.dcos.universe.repo+json;charset=utf-8" ) universe_url = ( self._universe_url_prefix + self._http_directory_url + "/" + os.path.basename(universe_path) ) logger.info("---") logger.info("STUB UNIVERSE: {}".format(universe_url)) logger.info("---") logger.info("Uploading {} artifacts:".format(len(self._artifact_paths))) for path in self._artifact_paths: self._uploader.upload(path) self._spam_universe_url(universe_url) # print to stdout, while the rest is all stderr: print(universe_url) logger.info("---") logger.info("(Re)install your package using the following commands:") logger.info("dcos package uninstall {}".format(self._pkg_name)) logger.info("\n- - - -\nFor 1.9 or older clusters only") logger.info( "dcos node ssh --master-proxy --leader " + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'.format( self._pkg_name ) ) logger.info("- - - -\n") logger.info("dcos package repo remove {}-aws".format(self._pkg_name)) logger.info( "dcos package repo add --index=0 {}-aws {}".format(self._pkg_name, universe_url) ) logger.info("dcos package install --yes {}".format(self._pkg_name)) return universe_url
def upload(self): '''generates a unique directory, then uploads artifacts and a new stub universe to that directory''' package_info = universe.Package(self._pkg_name, self._pkg_version) package_manager = universe.PackageManager() builder = universe.UniversePackageBuilder(package_info, package_manager, self._input_dir_path, self._http_directory, self._artifact_paths) try: universe_path = builder.build_package() except Exception as e: err = 'Failed to create stub universe: {}'.format(str(e)) self._github_updater.update('error', err) raise # upload universe package definition first and get its URL universe_url = self._universe_url_prefix + self._upload_artifact( universe_path, content_type='application/vnd.dcos.universe.repo+json;charset=utf-8' ) logger.info('---') logger.info('STUB UNIVERSE: {}'.format(universe_url)) logger.info('---') logger.info('Uploading {} artifacts:'.format(len( self._artifact_paths))) for path in self._artifact_paths: self._upload_artifact(path) self._spam_universe_url(universe_url) # print to stdout, while the rest is all stderr: print(universe_url) logger.info('---') logger.info('(Re)install your package using the following commands:') logger.info('dcos package uninstall {}'.format(self._pkg_name)) logger.info('\n- - - -\nFor 1.9 or older clusters only') logger.info( 'dcos node ssh --master-proxy --leader ' + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"' .format(self._pkg_name)) logger.info('- - - -\n') logger.info('dcos package repo remove {}-aws'.format(self._pkg_name)) logger.info('dcos package repo add --index=0 {}-aws {}'.format( self._pkg_name, universe_url)) logger.info('dcos package install --yes {}'.format(self._pkg_name)) return universe_url
def main(argv): if len(argv) < 3: print_help(argv) return 1 # the package name: package_name = argv[1] # the package version: package_version = argv[2] # local path where the package template is located: package_dir_path = argv[3].rstrip('/') # artifact paths (to copy along with stub universe) artifact_paths = argv[4:] logger.info('''### Package: {} Version: {} Template path: {} Artifacts: {} ###'''.format(package_name, package_version, package_dir_path, '\n'.join(['- {}'.format(path) for path in artifact_paths]))) publisher = FilesPublisher(package_name, package_version, package_dir_path, artifact_paths) package_info = universe.Package(publisher._pkg_name, publisher._pkg_version) package_manager = universe.PackageManager() publisher._package_builder = universe.UniversePackageBuilder( package_info, package_manager, publisher._input_dir_path, publisher._http_url_root, publisher._artifact_paths) universe_url = publisher.build(publisher._http_url_root) logger.info('---') logger.info('(Re)install your package using the following commands:') logger.info('dcos package uninstall {}'.format(package_name)) logger.info('\n- - - -\nFor 1.9 or older clusters only') logger.info( 'dcos node ssh --master-proxy --leader ' + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"' .format(package_name)) logger.info('- - - -\n') logger.info('dcos package repo remove {}-local'.format(package_name)) logger.info('dcos package repo add --index=0 {}-local {}'.format( package_name, universe_url)) logger.info('dcos package install --yes {}'.format(package_name)) return 0
def main(argv): parser = argparse.ArgumentParser(description=DESCRIPTION_STRING, epilog=EPILOGUE_STRING) parser.add_argument('package_name', type=str, help='The package name') parser.add_argument('package_version', type=str, help='The package version string') parser.add_argument( 'package_dir_path', type=str, help='The local path where the package template is located') parser.add_argument( 'upload_dir_url', type=str, help='The URL of the directory where artifacts are located (S3, etc)') parser.add_argument('artifact_paths', type=str, nargs='+', help='The artifact paths (for sha256 as needed)') args = parser.parse_args(argv) logger.info('''### Package: {} (version {}) Template path: {} Upload base dir: {} Artifacts: {} ###'''.format(args.package_name, args.package_version, args.package_dir_path, args.upload_dir_url, ','.join(args.artifact_paths))) package_info = universe.Package(args.package_name, args.package_version) package_manager = universe.PackageManager() builder = universe.UniversePackageBuilder(package_info, package_manager, args.package_dir_path, args.upload_dir_url, args.artifact_paths) package_path = builder.build_package() if not package_path: logger.error("Error building stub universe") return -1 logger.info('---') logger.info('Built stub universe package:') # print the package location as stdout (the rest of the file is stderr): print(package_path) return 0
def upload(self): '''generates a unique directory, then uploads artifacts and a new stub universe to that directory''' package_info = universe.Package(self._pkg_name, self._pkg_version) package_manager = universe.PackageManager(dry_run=self._dry_run) builder = universe.UniversePackageBuilder( package_info, package_manager, self._input_dir_path, self._http_directory_url, self._artifact_paths, self._dry_run) universe_path = builder.build_package() # upload universe package definition first and get its URL self._uploader.upload( universe_path, content_type='application/vnd.dcos.universe.repo+json;charset=utf-8') universe_url = self._universe_url_prefix + self._http_directory_url + '/' + os.path.basename(universe_path) logger.info('---') logger.info('STUB UNIVERSE: {}'.format(universe_url)) logger.info('---') with open("stub-universe", "a") as stub_file: stub_file.write(universe_url) logger.info('Uploading {} artifacts:'.format(len(self._artifact_paths))) for path in self._artifact_paths: self._uploader.upload(path) self._spam_universe_url(universe_url) # print to stdout, while the rest is all stderr: print(universe_url) logger.info('---') logger.info('(Re)install your package using the following commands:') logger.info('dcos package uninstall {}'.format(self._pkg_name)) logger.info('\n- - - -\nFor 1.9 or older clusters only') logger.info('dcos node ssh --master-proxy --leader ' + '"docker run mesosphere/janitor /janitor.py -r {0}-role -p {0}-principal -z dcos-service-{0}"'.format(self._pkg_name)) logger.info('- - - -\n') logger.info('dcos package repo remove {}-aws'.format(self._pkg_name)) logger.info('export STUB_UNIVERSE_URL=\'{}\''.format(universe_url)) logger.info('dcos package repo add --index=0 {}-aws $STUB_UNIVERSE_URL'.format(self._pkg_name)) logger.info('dcos package install --yes {}'.format(self._pkg_name)) return universe_url
def launch_http(self): # kill any prior matching process procname = "publish_httpd_{}.py".format(self._pkg_name) try: subprocess.check_call("killall -9 {}".format(procname).split()) logger.info("Killed previous HTTP process(es): {}".format(procname)) except Exception: logger.info("No previous HTTP process found: {}".format(procname)) if self._http_port == 0: # hack: grab/release a suitable ephemeral port and hope nobody steals it in the meantime sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.bind((self._http_host, 0)) port = sock.getsockname()[1] sock.close() else: port = self._http_port http_url_root = "http://{}:{}".format(self._http_host, port) version = Version(release_version=0, package_version=self._pkg_version) package_info = universe.Package(name=self._pkg_name, version=version) package_manager = universe.PackageManager() self._package_builder = universe.UniversePackageBuilder( package_info, package_manager, self._input_dir_path, http_url_root, self._artifact_paths ) # hack: write httpd script then run it directly httpd_py_content = """#!/usr/bin/env python3 import os, socketserver from http.server import SimpleHTTPRequestHandler rootdir = '{}' host = '{}' port = {} class CustomTypeHandler(SimpleHTTPRequestHandler): def __init__(self, req, client_addr, server): SimpleHTTPRequestHandler.__init__(self, req, client_addr, server) def guess_type(self, path): if path.endswith('.json'): return 'application/vnd.dcos.universe.repo+json;charset=utf-8' return SimpleHTTPRequestHandler.guess_type(self, path) os.chdir(rootdir) httpd = socketserver.TCPServer((host, port), CustomTypeHandler) print('Serving %s at http://%s:%s' % (rootdir, host, port)) httpd.serve_forever() """.format( self._http_dir, self._http_host, port ) httpd_py_path = os.path.join(self._http_dir, procname) if not os.path.isdir(self._http_dir): os.makedirs(self._http_dir) httpd_py_file = open(httpd_py_path, "w+") httpd_py_file.write(httpd_py_content) httpd_py_file.flush() httpd_py_file.close() os.chmod(httpd_py_path, 0o744) logger.info("Launching HTTPD: {}".format(httpd_py_path)) subprocess.Popen([httpd_py_path, "2&1>", "/dev/null"]) return http_url_root