def get_build_template(template_name, params=None, to_file=None): '''get_build template returns a string or file for a particular build template, which is intended to build a version of a Singularity image on a cloud resource. :param template_name: the name of the template to retrieve in build/scripts :param params: (if needed) a dictionary of parameters to substitute in the file :param to_file: if defined, will write to file. Default returns string. ''' base = get_installdir() template_folder = "%s/build/scripts" % (base) template_file = "%s/%s" % (template_folder, template_name) if os.path.exists(template_file): bot.debug("Found template %s" % template_file) # Implement when needed - substitute params here # Will need to read in file instead of copying below # if params != None: if to_file is not None: shutil.copyfile(template_file, to_file) bot.debug("Template file saved to %s" % to_file) return to_file # If the user wants a string content = ''.join(read_file(template_file)) return content else: bot.warning("Template %s not found." % template_file)
def unpack_node(image_path,name=None,output_folder=None,size=None): '''unpackage node is intended to unpackage a node that was packaged with package_node. The image should be a .tgz file. The general steps are to: 1. Package the node using the package_node function 2. Transfer the package somewhere that Singularity is installed''' if not image_path.endswith(".tgz"): bot.error("The image_path should end with .tgz. Did you create with package_node?") sys.exit(1) if output_folder is None: output_folder = os.path.dirname(os.path.abspath(image_path)) image_name = os.path.basename(image_path) if name is None: name = image_name.replace('.tgz','.img') if not name.endswith('.img'): name = "%s.img" %(name) bot.debug("Preparing to unpack %s to %s." %(image_name,name)) unpacked_image = "%s/%s" %(output_folder,name) if not os.path.exists(unpacked_image): os.mkdir(unpacked_image) cmd = ["gunzip","-dc",image_path,"|","sudo","singularity","import", unpacked_image] output = run_command(cmd) # TODO: singularity mount the container, cleanup files (/etc/fstab,...) # and add your custom singularity files. return unpacked_image
def get_build_template(template_name,params=None,to_file=None): '''get_build template returns a string or file for a particular build template, which is intended to build a version of a Singularity image on a cloud resource. :param template_name: the name of the template to retrieve in build/scripts :param params: (if needed) a dictionary of parameters to substitute in the file :param to_file: if defined, will write to file. Default returns string. ''' base = get_installdir() template_folder = "%s/build/scripts" %(base) template_file = "%s/%s" %(template_folder,template_name) if os.path.exists(template_file): bot.debug("Found template %s" %template_file) # Implement when needed - substitute params here # Will need to read in file instead of copying below # if params != None: if to_file is not None: shutil.copyfile(template_file,to_file) bot.debug("Template file saved to %s" %to_file) return to_file # If the user wants a string content = ''.join(read_file(template_file)) return content else: bot.warning("Template %s not found." %template_file)
def container_similarity_vector(container1=None,packages_set=None,by=None): '''container similarity_vector is similar to compare_packages, but intended to compare a container object (singularity image or singularity hub container) to a list of packages. If packages_set is not provided, the default used is 'docker-os'. This can be changed to 'docker-library', or if the user wants a custom list, should define custom_set. :param container1: singularity image or singularity hub container. :param packages_set: a name of a package set, provided are docker-os and docker-library :by: metrics to compare by (files.txt and or folders.txt) ''' if by == None: by = ['files.txt'] if not isinstance(by,list): by = [by] if not isinstance(packages_set,list): packages_set = [packages_set] comparisons = dict() for b in by: bot.debug("Starting comparisons for %s" %b) df = pandas.DataFrame(columns=packages_set) for package2 in packages_set: sim = calculate_similarity(container1=container1, image_package2=package2, by=b)[b] name1 = os.path.basename(package2).replace('.img.zip','') bot.debug("container vs. %s: %s" %(name1,sim)) df.loc["container",package2] = sim df.columns = [os.path.basename(x).replace('.img.zip','') for x in df.columns.tolist()] comparisons[b] = df return comparisons
def package(image_path, spec_path=None, output_folder=None, remove_image=False, verbose=False, S=None): '''generate a zip (including the image) to a user specified output_folder. :param image_path: full path to singularity image file :param runscript: if True, will extract runscript to include in package as runscript :param software: if True, will extract files.txt and folders.txt to package :param remove_image: if True, will not include original image in package (default,False) :param verbose: be verbose when using singularity --export (default,False) :param S: the Singularity object (optional) will be created if not required. ''' # Run create image and bootstrap with Singularity command line tool. S = Singularity(debug=verbose) file_obj, tar = get_image_tar(image_path, S=S) members = tar.getmembers() image_name, ext = os.path.splitext(os.path.basename(image_path)) zip_name = "%s.zip" % (image_name.replace(" ", "_")) # Include the image in the package? to_package = dict() if not remove_image: to_package["files"] = [image_path] # If the specfile is provided, it should also be packaged if spec_path is not None: singularity_spec = "".join(read_file(spec_path)) to_package['Singularity'] = singularity_spec to_package["VERSION"] = get_image_file_hash(image_path) try: inspection = S.inspect(image_path) to_package["inspect.json"] = inspection inspection = json.loads(inspection) to_package['runscript'] = inspection['data']['attributes']['runscript'] except: bot.warning("Trouble extracting container metadata with inspect.") bot.info("Adding software list to package.") files = [x.path for x in members if x.isfile()] folders = [x.path for x in members if x.isdir()] to_package["files.txt"] = files to_package["folders.txt"] = folders # Do zip up here - let's start with basic structures zipfile = zip_up(to_package, zip_name=zip_name, output_folder=output_folder) bot.debug("Package created at %s" % (zipfile)) if not delete_image_tar(file_obj, tar): bot.warning("Could not clean up temporary tarfile.") # return package to user return zipfile
def get_cache(subfolder=None, quiet=False): '''get_cache will return the user's cache for singularity. :param subfolder: a subfolder in the cache base to retrieve, specifically ''' DISABLE_CACHE = convert2boolean( getenv("SINGULARITY_DISABLE_CACHE", default=False)) if DISABLE_CACHE: SINGULARITY_CACHE = tempfile.mkdtemp() else: userhome = pwd.getpwuid(os.getuid())[5] _cache = os.path.join(userhome, ".singularity") SINGULARITY_CACHE = getenv("SINGULARITY_CACHEDIR", default=_cache) # Clean up the path and create cache_base = clean_path(SINGULARITY_CACHE) # Does the user want to get a subfolder in cache base? if subfolder is not None: cache_base = "%s/%s" % (cache_base, subfolder) # Create the cache folder(s), if don't exist mkdir_p(cache_base) if not quiet: bot.debug("Cache folder set to %s" % cache_base) return cache_base
def estimate_image_size(spec_file, sudopw=None, padding=None): '''estimate_image_size will generate an image in a directory, and add some padding to it to estimate the size of the image file to generate :param sudopw: the sudopw for Singularity, root should provide '' :param spec_file: the spec file, called "Singuarity" :param padding: the padding (MB) to add to the image ''' from .build import build_from_spec if padding == None: padding = 200 if not isinstance(padding, int): padding = int(padding) image_folder = build_from_spec( spec_file=spec_file, # default will package the image sudopw=sudopw, # with root should not need sudo build_folder=True, debug=False) original_size = calculate_folder_size(image_folder) bot.debug("Original image size calculated as %s" % original_size) padded_size = original_size + padding bot.debug("Size with padding will be %s" % padded_size) return padded_size
def zip_up(file_list,zip_name,output_folder=None): '''zip_up will zip up some list of files into a package (.zip) :param file_list: a list of files to include in the zip. :param output_folder: the output folder to create the zip in. If not :param zip_name: the name of the zipfile to return. specified, a temporary folder will be given. ''' tmpdir = tempfile.mkdtemp() # Make a new archive output_zip = "%s/%s" %(tmpdir,zip_name) zf = zipfile.ZipFile(output_zip, "w", zipfile.ZIP_DEFLATED, allowZip64=True) # Write files to zip, depending on type for filename,content in file_list.items(): bot.debug("Adding %s to package..." %filename) # If it's the files list, move files into the archive if filename.lower() == "files": if not isinstance(content,list): content = [content] for copyfile in content: zf.write(copyfile,os.path.basename(copyfile)) os.remove(copyfile) else: output_file = "%s/%s" %(tmpdir, filename) # If it's a list, write to new file, and save if isinstance(content,list): write_file(output_file,"\n".join(content)) # If it's a dict, save to json elif isinstance(content,dict): write_json(content,output_file) # If bytes, need to decode elif isinstance(content,bytes): write_file(output_file,content.decode('utf-8')) # String or other else: output_file = write_file(output_file,content) if os.path.exists(output_file): zf.write(output_file,filename) os.remove(output_file) # Close the zip file zf.close() if output_folder is not None: shutil.copyfile(output_zip,"%s/%s"%(output_folder,zip_name)) shutil.rmtree(tmpdir) output_zip = "%s/%s"%(output_folder,zip_name) return output_zip
def post(self, url, data=None, return_json=True): '''post will use requests to get a particular url ''' bot.debug("POST %s" % url) return self.call(url, func=requests.post, data=data, return_json=return_json)
def get(self, url, headers=None, token=None, data=None, return_json=True): '''get will use requests to get a particular url ''' bot.debug("GET %s" % url) return self.call(url, func=requests.get, data=data, return_json=return_json)
def put(self, url, data=None, return_json=True): '''put request ''' bot.debug("PUT %s" % url) return self.call(url, func=requests.put, data=data, return_json=return_json)
def test_container(image_path): '''test_container is a simple function to send a command to a container, and return the status code and any message run for the test. This comes after :param image_path: path to the container image ''' from singularity.utils import run_command bot.debug('Testing container exec with a list command.') testing_command = ["singularity", "exec", image_path, 'ls'] return run_command(testing_command)
def test_container(image_path): '''test_container is a simple function to send a command to a container, and return the status code and any message run for the test. This comes after :param image_path: path to the container image ''' from singularity.utils import run_command bot.debug('Testing container exec with a list command.') testing_command = ["singularity", "exec", image_path, 'ls'] return run_command(testing_command)
def delete_image_tar(file_obj, tar): '''delete image tar will close a file object (if extracted into memory) or delete from the file system (if saved to disk)''' try: file_obj.close() except: tar.close() if os.path.exists(file_obj): os.remove(file_obj) deleted = True bot.debug('Deleted temporary tar.') return deleted
def delete_image_tar(file_obj, tar): '''delete image tar will close a file object (if extracted into memory) or delete from the file system (if saved to disk)''' try: file_obj.close() except: tar.close() if os.path.exists(file_obj): os.remove(file_obj) deleted = True bot.debug('Deleted temporary tar.') return deleted
def get_image_tar(image_path): '''get an image tar, either written in memory or to the file system. file_obj will either be the file object, or the file itself. ''' bot.debug('Generate file system tar...') file_obj = Client.image.export(image_path=image_path) if file_obj is None: bot.error("Error generating tar, exiting.") sys.exit(1) tar = tarfile.open(file_obj) return file_obj, tar
def main(args, parser, subparser): # We can only continue if singularity is installed if check_install() is not True: bot.error("Cannot find Singularity! Is it installed?") sys.exit(1) # Output folder will be pwd if not specified output_folder = os.getcwd() if args.outfolder is not None: output_folder = os.getcwd() if args.images is not None: image1, image2 = args.images.split(',') bot.debug("Image1: %s" % image1) bot.debug("Image2: %s" % image2) images = dict() cli = Singularity(debug=args.debug) for image in [image1, image2]: existed = True if not os.path.exists(image): image = cli.pull(image) existed = False images[image] = existed # Just for clarity image1, image2 = list(images.keys()) # the user wants to make a similarity tree if args.simtree is True: from singularity.cli.app import make_sim_tree make_sim_tree(image1, image2) # the user wants to make a difference tree elif args.subtract is True: from singularity.cli.app import make_diff_tree make_diff_tree(image1, image2) else: # If none specified, just print score from singularity.analysis.compare import calculate_similarity score = calculate_similarity(image1, image2, by="files.txt") print(score["files.txt"]) for image, existed in images.items(): clean_up(image, existed) else: print("Please specify images to compare with --images") subparser.print_help()
def compare_packages(packages_set1=None, packages_set2=None, by=None): '''compare_packages will compare one image or package to one image or package. If the folder isn't specified, the default singularity packages (included with install) will be used (os vs. docker library). Images will take preference over packages :param packages_set1: a list of package files not defined uses docker-library :param packages_set2: a list of package files, not defined uses docker-os :by: metrics to compare by (files.txt and or folders.txt) ''' if packages_set1 == None: packages_set1 = get_packages('docker-library') if packages_set2 == None: packages_set2 = get_packages('docker-os') if by == None: by = ['files.txt'] if not isinstance(by, list): by = [by] if not isinstance(packages_set1, list): packages_set1 = [packages_set1] if not isinstance(packages_set2, list): packages_set2 = [packages_set2] comparisons = dict() for b in by: bot.debug("Starting comparisons for %s" % b) df = pandas.DataFrame(index=packages_set1, columns=packages_set2) for package1 in packages_set1: for package2 in packages_set2: if package1 != package2: sim = calculate_similarity(image_package1=package1, image_package2=package2, by=b)[b] else: sim = 1.0 name1 = os.path.basename(package1).replace('.img.zip', '') name2 = os.path.basename(package2).replace('.img.zip', '') bot.debug("%s vs. %s: %s" % (name1, name2, sim)) df.loc[package1, package2] = sim df.index = [ os.path.basename(x).replace('.img.zip', '') for x in df.index.tolist() ] df.columns = [ os.path.basename(x).replace('.img.zip', '') for x in df.columns.tolist() ] comparisons[b] = df return comparisons
def get_image_tar(image_path, S=None): '''get an image tar, either written in memory or to the file system. file_obj will either be the file object, or the file itself. ''' bot.debug('Generate file system tar...') if S is None: S = Singularity() file_obj = S.export(image_path=image_path) if file_obj is None: bot.error("Error generating tar, exiting.") sys.exit(1) tar = tarfile.open(file_obj) return file_obj, tar
def main(args, parser, subparser): if args.recipe is None: subparser.print_help() bot.newline() print("Please specify creating a recipe with --recipe") sys.exit(0) # Output folder will be pwd if not specified output_folder = os.getcwd() if args.outfolder is not None: output_folder = os.getcwd() bootstrap = '' if args.bootstrap is not None: bootstrap = args.bootstrap bot.debug("bootstrap: %s" % bootstrap) bootstrap_from = '' if args.bootstrap_from is not None: bootstrap_from = args.bootstrap_from bot.debug("from: %s" % bootstrap_from) template = "Singularity" output_file = template app = '' if args.app is not None: app = args.app.lower() template = "Singularity.app" output_file = "Singularity.%s" % app input_file = "%s/cli/app/templates/recipes/%s" % (get_installdir(), template) output_file = "%s/%s" % (output_folder, output_file) if os.path.exists(output_file): ext = str(uuid.uuid4())[0:4] output_file = "%s.%s" % (output_file, ext) # Read the file, make substitutions contents = read_file(input_file, readlines=False) # Replace all occurrences of app contents = contents.replace('{{ app }}', app) contents = contents.replace('{{ bootstrap }}', bootstrap) contents = contents.replace('{{ from }}', bootstrap_from) write_file(output_file, contents) bot.info("Output file written to %s" % output_file)
def update_headers(self, fields=None): '''update headers with a token & other fields ''' if self.headers is None: headers = self._init_headers() else: headers = self.headers if fields is not None: for key, value in fields.items(): headers[key] = value header_names = ",".join(list(headers.keys())) bot.debug("Headers found: %s" % header_names) self.headers = headers
def pull(self,image_path,pull_folder='', name_by_hash=False, name_by_commit=False, image_name=None, size=None): '''pull will pull a singularity hub image :param image_path: full path to image / uris :param name_by: can be one of commit or hash, default is by image name ''' if image_name is not None: name_by_hash=False name_by_commit=False final_image = None if not image_path.startswith('shub://') and not image_path.startswith('docker://'): bot.error("pull is only valid for docker and shub, %s is invalid." %image_name) sys.exit(1) if self.debug is True: cmd = ['singularity','--debug','pull'] else: cmd = ['singularity','pull'] if pull_folder not in [None,'']: os.environ['SINGULARITY_PULLFOLDER'] = pull_folder pull_folder = "%s/" % pull_folder if image_path.startswith('shub://'): if image_name is not None: bot.debug("user specified naming pulled image %s" %image_name) cmd = cmd +["--name",image_name] elif name_by_commit is True: bot.debug("user specified naming by commit.") cmd.append("--commit") elif name_by_hash is True: bot.debug("user specified naming by hash.") cmd.append("--hash") # otherwise let the Singularity client determine own name elif image_path.startswith('docker://'): if size is not None: cmd = cmd + ["--size",size] if image_name is None: image_name = "%s" %image_path.replace("docker://","").replace("/","-") final_image = "%s%s.img" %(pull_folder,image_name) cmd = cmd + ["--name", image_name] cmd.append(image_path) bot.debug(' '.join(cmd)) output = self.run_command(cmd) self.println(output) if final_image is None: # shub final_image = output.split('Container is at:')[-1].strip('\n').strip() return final_image
def container_similarity_vector(container1=None, packages_set=None, by=None, custom_set=None): '''container similarity_vector is similar to compare_packages, but intended to compare a container object (singularity image or singularity hub container) to a list of packages. If packages_set is not provided, the default used is 'docker-os'. This can be changed to 'docker-library', or if the user wants a custom list, should define custom_set. :param container1: singularity image or singularity hub container. :param packages_set: a name of a package set, provided are docker-os and docker-library :param custom_set: a list of package files, used first if provided. :by: metrics to compare by (files.txt and or folders.txt) ''' if custom_set == None: if packages_set == None: packages_set = get_packages('docker-os') else: packages_set = custom_set if by == None: by = ['files.txt'] if not isinstance(by, list): by = [by] if not isinstance(packages_set, list): packages_set = [packages_set] comparisons = dict() for b in by: bot.debug("Starting comparisons for %s" % b) df = pandas.DataFrame(columns=packages_set) for package2 in packages_set: sim = calculate_similarity(container1=container1, image_package2=package2, by=b)[b] name1 = os.path.basename(package2).replace('.img.zip', '') bot.debug("container vs. %s: %s" % (name1, sim)) df.loc["container", package2] = sim df.columns = [ os.path.basename(x).replace('.img.zip', '') for x in df.columns.tolist() ] comparisons[b] = df return comparisons
def load_package(package_path, get=None): '''load_package will return the contents of a package, read into memory :param package_path: the full path to the package :param get: the files to load. If none specified, all things loaded ''' if get == None: get = list_package(package_path) # Open the zipfile zf = zipfile.ZipFile(package_path, 'r') # The user might have provided a string and not a list if isinstance(get, str): get = [get] retrieved = dict() for g in get: filename, ext = os.path.splitext(g) # Extract image if ext in [".img"]: tmpdir = tempfile.mkdtemp() print("Extracting image %s to %s..." % (g, tmpdir)) image_extracted_path = zf.extract(g, tmpdir) retrieved[g] = image_extracted_path # Extract text elif ext in [".txt"] or g == "runscript": retrieved[g] = zf.read(g).decode('utf-8').split('\n') elif g in ["VERSION", "NAME"]: retrieved[g] = zf.read(g).decode('utf-8') # Extract json or metadata elif ext in [".json"]: retrieved[g] = json.loads(zf.read(g).decode('utf-8')) else: bot.debug("Unknown extension %s, skipping %s" % (ext, g)) return retrieved
def build_from_spec(spec_file=None, build_dir=None, build_folder=False, sandbox=False, isolated=False, debug=False): '''build_from_spec will build a "spec" file in a "build_dir" and return the directory :param spec_file: the spec file, called "Singuarity" :param build_dir: the directory to build in. If not defined, will use tmpdir. :param isolated: "build" the image inside an isolated environment (>2.4) :param sandbox: ask for a sandbox build :param debug: ask for verbose output from builder ''' if spec_file == None: spec_file = "Singularity" if build_dir == None: build_dir = tempfile.mkdtemp() bot.debug("Building in directory %s" %build_dir) # Copy the spec to a temporary directory bot.debug("Spec file set to %s" % spec_file) spec_path = os.path.abspath(spec_file) bot.debug("Spec file for build should be in %s" %spec_path) image_path = "%s/build.simg" %(build_dir) # Run create image and bootstrap with Singularity command line tool. cli = Singularity(debug=debug) print("\nBuilding image...") # Does the user want to "build" into a folder or image? result = cli.build(image_path=image_path, spec_path=spec_path, sandbox=sandbox, isolated=isolated) print(result) # If image, rename based on hash if sandbox is False: version = get_image_file_hash(image_path) final_path = "%s/%s.simg" %(build_dir,version) os.rename(image_path,final_path) image_path = final_path bot.debug("Built image: %s" %image_path) return image_path
def send_build_close(params,response_url): '''send build close sends a final response (post) to the server to bring down the instance. The following must be included in params: repo_url, logfile, repo_id, secret, log_file, token ''' # Finally, package everything to send back to shub response = {"log": json.dumps(params['log_file']), "repo_url": params['repo_url'], "logfile": params['logfile'], "repo_id": params['repo_id'], "container_id": params['container_id']} body = '%s|%s|%s|%s|%s' %(params['container_id'], params['commit'], params['branch'], params['token'], params['tag']) signature = generate_header_signature(secret=params['token'], payload=body, request_type="finish") headers = {'Authorization': signature } finish = requests.post(response_url,data=response, headers=headers) bot.debug("FINISH POST TO SINGULARITY HUB ---------------------") bot.debug(finish.status_code) bot.debug(finish.reason) return finish
def send_build_data(build_dir, data, secret, response_url=None,clean_up=True): '''finish build sends the build and data (response) to a response url :param build_dir: the directory of the build :response_url: where to send the response. If None, won't send :param data: the data object to send as a post :param clean_up: If true (default) removes build directory ''' # Send with Authentication header body = '%s|%s|%s|%s|%s' %(data['container_id'], data['commit'], data['branch'], data['token'], data['tag']) signature = generate_header_signature(secret=secret, payload=body, request_type="push") headers = {'Authorization': signature } if response_url is not None: finish = requests.post(response_url,data=data, headers=headers) bot.debug("RECEIVE POST TO SINGULARITY HUB ---------------------") bot.debug(finish.status_code) bot.debug(finish.reason) else: bot.warning("response_url set to None, skipping sending of build.") if clean_up == True: shutil.rmtree(build_dir) # Delay a bit, to give buffer between bringing instance down time.sleep(20)
def send_build_data(build_dir, data, secret, response_url=None,clean_up=True): '''finish build sends the build and data (response) to a response url :param build_dir: the directory of the build :response_url: where to send the response. If None, won't send :param data: the data object to send as a post :param clean_up: If true (default) removes build directory ''' # Send with Authentication header body = '%s|%s|%s|%s|%s' %(data['container_id'], data['commit'], data['branch'], data['token'], data['tag']) signature = generate_header_signature(secret=secret, payload=body, request_type="push") headers = {'Authorization': signature } if response_url is not None: finish = requests.post(response_url,data=data, headers=headers) bot.debug("RECEIVE POST TO SINGULARITY HUB ---------------------") bot.debug(finish.status_code) bot.debug(finish.reason) else: bot.warning("response_url set to None, skipping sending of build.") if clean_up == True: shutil.rmtree(build_dir) # Delay a bit, to give buffer between bringing instance down time.sleep(20)
def send_build_close(params,response_url): '''send build close sends a final response (post) to the server to bring down the instance. The following must be included in params: repo_url, logfile, repo_id, secret, log_file, token ''' # Finally, package everything to send back to shub response = {"log": json.dumps(params['log_file']), "repo_url": params['repo_url'], "logfile": params['logfile'], "repo_id": params['repo_id'], "container_id": params['container_id']} body = '%s|%s|%s|%s|%s' %(params['container_id'], params['commit'], params['branch'], params['token'], params['tag']) signature = generate_header_signature(secret=params['token'], payload=body, request_type="finish") headers = {'Authorization': signature } finish = requests.post(response_url,data=response, headers=headers) bot.debug("FINISH POST TO SINGULARITY HUB ---------------------") bot.debug(finish.status_code) bot.debug(finish.reason) return finish
def stream(self, url, headers=None, stream_to=None): '''stream is a get that will stream to file_name ''' bot.debug("GET %s" % url) if headers == None: headers = self._init_headers() response = requests.get(url, headers=headers, stream=True) if response.status_code == 200: # Keep user updated with Progress Bar? content_size = None if 'Content-Length' in response.headers: progress = 0 content_size = int(response.headers['Content-Length']) bot.show_progress(progress, content_size, length=35) chunk_size = 1 << 20 with open(stream_to, 'wb') as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress += chunk_size bot.show_progress(iteration=progress, total=content_size, length=35, carriage_return=False) # Newline to finish download sys.stdout.write('\n') return stream_to bot.error("Problem with stream, response %s" % (response.status_code)) sys.exit(1)
def unpack_node(image_path, name=None, output_folder=None, size=None): '''unpackage node is intended to unpackage a node that was packaged with package_node. The image should be a .tgz file. The general steps are to: 1. Package the node using the package_node function 2. Transfer the package somewhere that Singularity is installed''' if not image_path.endswith(".tgz"): bot.error( "The image_path should end with .tgz. Did you create with package_node?" ) sys.exit(1) if output_folder is None: output_folder = os.path.dirname(os.path.abspath(image_path)) image_name = os.path.basename(image_path) if name is None: name = image_name.replace('.tgz', '.img') if not name.endswith('.img'): name = "%s.img" % (name) bot.debug("Preparing to unpack %s to %s." % (image_name, name)) unpacked_image = "%s/%s" % (output_folder, name) S = Singularity(sudo=True) S.create(image_path=unpacked_image, size=size) cmd = [ "gunzip", "-dc", image_path, "|", "sudo", "singularity", "import", unpacked_image ] output = run_command(cmd) # TODO: singularity mount the container, cleanup files (/etc/fstab,...) # and add your custom singularity files. return unpacked_image
def pull(self, images, file_name=None, decompress=True): bot.debug('Execution of PULL for %s images' % len(images)) for image in images: # If we need to decompress, it's old ext3 format if decompress is True: ext = 'img.gz' else: ext = 'simg' # squashfs q = parse_image_name(image, ext=ext) # Verify image existence, and obtain id url = "%s/container/%s/%s:%s" % (self.base, q['collection'], q['image'], q['tag']) bot.debug('Retrieving manifest at %s' % url) manifest = self.get(url) bot.debug(manifest) if file_name is None: file_name = q['storage'].replace('/', '-') image_file = self.download(url=manifest['image'], file_name=file_name, show_progress=True) bot.debug('Retrieved image file %s' % image_file) if os.path.exists(image_file) and decompress is True: # If compressed, decompress try: cli = Singularity() sys.stdout.write('Decompressing image ') bot.spinner.start() image_file = cli.decompress(image_file, quiet=True) except KeyboardInterrupt: bot.warning('Decompression cancelled.') except: bot.info('Image is not compressed.') image_name = image_file.replace('.gz', '') image_file = shutil.move(image_file, image_name) pass bot.spinner.stop() bot.custom(prefix="Success!", message=image_file)
def push(self, path, name, tag=None, compress=False): '''push an image to Singularity Registry''' path = os.path.abspath(path) image = os.path.basename(path) bot.debug("PUSH %s" % path) if not os.path.exists(path): bot.error('%s does not exist.' %path) sys.exit(1) cli = Singularity() metadata = cli.inspect(image_path=path, quiet=True) metadata = json.loads(metadata) # Try to add the size try: image_size = os.path.getsize(path) >> 20 if metadata['data']['attributes']['labels'] is None: metadata['data']['attributes']['labels'] = {'SREGISTRY_SIZE_MB': image_size } else: metadata['data']['attributes']['labels']['SREGISTRY_SIZE_MB'] = image_size except: bot.warning("Cannot load metadata to add calculated size.") pass if "deffile" in metadata['data']['attributes']: if metadata['data']['attributes']['deffile'] is not None: fromimage = parse_header(metadata['data']['attributes']['deffile'], header="from", remove_header=True) metadata['data']['attributes']['labels']['SREGISTRY_FROM'] = fromimage bot.debug("%s was built from a definition file." % image) if compress is True: ext = 'img.gz' # ext3 format else: ext = 'simg' # ext3 format metadata = json.dumps(metadata) names = parse_image_name(name,tag=tag, ext=ext) url = '%s/push/' % self.base if compress is True: try: sys.stdout.write('Compressing image ') bot.spinner.start() upload_from = cli.compress(path) bot.spinner.stop() except KeyboardInterrupt: print('Upload cancelled') if os.path.exists("%s.gz" %path): os.remove("%s.gz" %path) sys.exit(1) else: upload_from = path upload_to = os.path.basename(names['storage']) SREGISTRY_EVENT = self.authorize(request_type="push", names=names) encoder = MultipartEncoder(fields={'collection': names['collection'], 'name':names['image'], 'metadata':metadata, 'tag': names['tag'], 'datafile': (upload_to, open(upload_from, 'rb'), 'text/plain')}) progress_callback = create_callback(encoder) monitor = MultipartEncoderMonitor(encoder, progress_callback) headers = {'Content-Type': monitor.content_type, 'Authorization': SREGISTRY_EVENT } try: r = requests.post(url, data=monitor, headers=headers) message = self.read_response(r) print('\n[Return status {0} {1}]'.format(r.status_code, message)) except KeyboardInterrupt: print('\nUpload cancelled.') # Clean up if compress is True: if os.path.exists("%s.gz" %path): os.remove("%s.gz" %path)
def zip_up(file_list, zip_name, output_folder=None): '''zip_up will zip up some list of files into a package (.zip) :param file_list: a list of files to include in the zip. :param output_folder: the output folder to create the zip in. If not :param zip_name: the name of the zipfile to return. specified, a temporary folder will be given. ''' tmpdir = tempfile.mkdtemp() # Make a new archive output_zip = "%s/%s" % (tmpdir, zip_name) zf = zipfile.ZipFile(output_zip, "w", zipfile.ZIP_DEFLATED, allowZip64=True) # Write files to zip, depending on type for filename, content in file_list.items(): bot.debug("Adding %s to package..." % filename) # If it's the files list, move files into the archive if filename.lower() == "files": if not isinstance(content, list): content = [content] for copyfile in content: zf.write(copyfile, os.path.basename(copyfile)) os.remove(copyfile) else: output_file = "%s/%s" % (tmpdir, filename) # If it's a list, write to new file, and save if isinstance(content, list): write_file(output_file, "\n".join(content)) # If it's a dict, save to json elif isinstance(content, dict): write_json(content, output_file) # If bytes, need to decode elif isinstance(content, bytes): write_file(output_file, content.decode('utf-8')) # String or other else: output_file = write_file(output_file, content) if os.path.exists(output_file): zf.write(output_file, filename) os.remove(output_file) # Close the zip file zf.close() if output_folder is not None: shutil.copyfile(output_zip, "%s/%s" % (output_folder, zip_name)) shutil.rmtree(tmpdir) output_zip = "%s/%s" % (output_folder, zip_name) return output_zip
def delete(self, url, return_json=True): '''delete request, use with caution ''' bot.debug('DELETE %s' % url) return self.call(url, func=requests.delete, return_json=return_json)