def add(backend, variable, value, force=False): '''add the variable to the config ''' print('[add]') settings = read_client_secrets() # If the variable begins with the SREGISTRY_<CLIENT> don't add it prefix = 'SREGISTRY_%s_' %backend.upper() if not variable.startswith(prefix): variable = '%s%s' %(prefix, variable) # All must be uppercase variable = variable.upper() bot.info("%s %s" %(variable, value)) # Does the setting already exist? if backend in settings: if variable in settings[backend] and force is False: previous = settings[backend][variable] bot.error('%s is already set as %s. Use --force to override.' %(variable, previous)) sys.exit(1) if backend not in settings: settings[backend] = {} settings[backend][variable] = value update_secrets(settings)
def update_token(headers): '''update_token uses HTTP basic authentication to attempt to authenticate given a 401 response. We take as input previous headers, and update them. Parameters ========== response: the http request response to parse for the challenge. ''' try: from awscli.clidriver import create_clidriver except: bot.exit('Please install pip install sregistry[aws]') driver = create_clidriver() aws = driver.session.create_client('ecr') tokens = aws.get_authorization_token() token = tokens['authorizationData'][0]['authorizationToken'] try: token = {"Authorization": "Basic %s" % token} headers.update(token) except Exception: bot.error("Error getting token.") sys.exit(1) return headers
def _extract_tar(archive, output_folder): '''use blob2oci to handle whiteout files for extraction. Credit for this script goes to docker2oci by Olivier Freyermouth, and see script folder for license. Parameters ========== archive: the archive to extract output_folder the output folder (sandbox) to extract to ''' from .terminal import (run_command, which) result = which('blob2oci') if result['return_code'] != 0: bot.error('Cannot find blob2oci script on path, exiting.') sys.exit(1) script = result['message'] command = ['exec', script, '--layer', archive, '--extract', output_folder] if not bot.is_quiet(): print("Extracting %s" % archive) return run_command(command)
def download_task(url, headers, download_to, download_type='layer'): '''download an image layer (.tar.gz) to a specified download folder. This task is done by using local versions of the same download functions that are used for the client. core stream/download functions of the parent client. Parameters ========== image_id: the shasum id of the layer, already determined to not exist repo_name: the image name (library/ubuntu) to retrieve download_to: download to this folder. If not set, uses temp. ''' # Update the user what we are doing bot.verbose("Downloading %s from %s" % (download_type, url)) # Step 1: Download the layer atomically file_name = "%s.%s" % (download_to, next(tempfile._get_candidate_names())) tar_download = download(url, file_name, headers=headers) try: shutil.move(tar_download, download_to) except Exception: msg = "Cannot untar layer %s," % tar_download msg += " was there a problem with download?" bot.error(msg) sys.exit(1) return download_to
def get_layer(self, image_id, repo_name, download_folder=None): '''download an image layer (.tar.gz) to a specified download folder. Parameters ========== download_folder: download to this folder. If not set, uses temp. repo_name: the image name (library/ubuntu) to retrieve ''' url = self._get_layerLink(repo_name, image_id) bot.verbose("Downloading layers from %s" % url) download_folder = get_tmpdir(download_folder) download_folder = "%s/%s.tar.gz" % (download_folder, image_id) # Update user what we are doing bot.debug("Downloading layer %s" % image_id) # Step 1: Download the layer atomically file_name = "%s.%s" % (download_folder, next(tempfile._get_candidate_names())) tar_download = self.download(url, file_name) try: shutil.move(tar_download, download_folder) except Exception: msg = "Cannot untar layer %s," % tar_download msg += " was there a problem with download?" bot.error(msg) sys.exit(1) return download_folder
def share(self, query, share_to): '''share will use the client to get an image based on a query, and then the link with an email or endpoint (share_to) of choice. ''' images = self._container_query(query, quiet=True) if len(images) == 0: bot.error('Cannot find a remote image matching %s' % query) sys.exit(0) image = images[0] def callback(request_id, response, exception): if exception: # Handle error print(exception) else: share_id = response.get('id') bot.info('Share to %s complete: %s!' % (share_to, share_id)) batch = self._service.new_batch_http_request(callback=callback) user_permission = { 'type': 'user', 'role': 'reader', 'emailAddress': share_to } batch.add(self._service.permissions().create( fileId=image['id'], body=user_permission, fields='id', )) batch.execute() return image
def push(self, path, name, tag=None): '''push an image to Singularity Registry path: should correspond to an absolte image path (or derive it) name: should be the complete uri that the user has requested to push. tag: should correspond with an image tag. This is provided to mirror Docker ''' path = os.path.abspath(path) bot.debug("PUSH %s" % path) if not os.path.exists(path): bot.error('%s does not exist.' %path) sys.exit(1) # This returns a data structure with collection, container, based on uri names = parse_image_name(remove_uri(name),tag=tag) # use Singularity client, if exists, to inspect to extract metadata metadata = self.get_metadata(path, names=names) # If you want a spinner bot.spinner.start() # do your push request here. Generally you want to except a KeyboardInterrupt # and give the user a status from the response bot.spinner.stop()
def push(self, path, name, tag=None): '''push an image to Google Cloud Storage, meaning uploading it path: should correspond to an absolte image path (or derive it) name: should be the complete uri that the user has requested to push. tag: should correspond with an image tag. This is provided to mirror Docker ''' path = os.path.abspath(path) bot.debug("PUSH %s" % path) if not os.path.exists(path): bot.error('%s does not exist.' % path) sys.exit(1) # This returns a data structure with collection, container, based on uri names = parse_image_name(remove_uri(name), tag=tag) if names['version'] is None: version = get_image_hash(path) names = parse_image_name(remove_uri(name), tag=tag, version=version) # Update metadata with names metadata = self.get_metadata(path, names=names) metadata = metadata['data'] metadata.update(names) manifest = self._upload(source=path, destination=names['storage'], metadata=metadata) print(manifest['mediaLink'])
def _check_install(self, message=''): '''singularity must be installed for some commands to work. This function will exit with an error if this isn't the case. ''' if not check_install(): bot.error('singularity is required for this action %s' % message) sys.exit(1)
def download(self, url, file_name, headers=None, show_progress=True): '''stream to a temporary file, rename on successful completion Parameters ========== file_name: the file name to stream to url: the url to stream from headers: additional headers to add force: If the final image exists, don't overwrite show_progress: boolean to show progress bar ''' tmp_file = get_tmpfile(prefix="%s.tmp." % file_name) # Should we verify the request? verify = self._verify() # Check here if exists if requests.head(url, verify=verify).status_code in [200, 401]: response = self.stream(url, headers=headers, stream_to=tmp_file, show_progress=show_progress) if isinstance(response, HTTPError): bot.exit("Error downloading %s, exiting." % url) shutil.move(tmp_file, file_name) else: bot.error("Invalid url or permissions %s" % url) return file_name
def create(self, image_path, size=None, sudo=False): '''create will create a a new image :param image_path: full path to image :param size: image sizein MiB, default is 1024MiB :param filesystem: supported file systems ext3/ext4 (ext[2/3]: default ext3 ''' self._check_install('[image.create]') if size == None: size = 1024 if self.debug == True: cmd = [ 'singularity', '--debug', 'image.create', '--size', str(size), image_path ] else: cmd = [ 'singularity', 'image.create', '--size', str(size), image_path ] output = self.run_command(cmd, sudo=sudo) self.println(output) if not os.path.exists(image_path): bot.error("Could not create image %s" % image_path) sys.exit(1) return image_path
def download(url, file_name, headers=None, show_progress=True): '''stream to a temporary file, rename on successful completion Parameters ========== file_name: the file name to stream to url: the url to stream from headers: additional headers to add ''' fd, tmp_file = tempfile.mkstemp(prefix=("%s.tmp." % file_name)) os.close(fd) if DISABLE_SSL_CHECK is True: bot.warning('Verify of certificates disabled! ::TESTING USE ONLY::') verify = not DISABLE_SSL_CHECK # Does the url being requested exist? if requests.head(url, verify=verify).status_code in [200, 401]: response = stream(url, headers=headers, stream_to=tmp_file) if isinstance(response, HTTPError): bot.exit("Error downloading %s, exiting." %url) shutil.move(tmp_file, file_name) else: bot.error("Invalid url or permissions %s" %url) return file_name
def get_size(self, add_padding=True, round_up=True, return_mb=True): '''get_size will return the image size (must use v.2.0 manifest) Parameters ========== add_padding: if true, return reported size * 5 round_up: if true, round up to nearest integer return_mb: if true, defaults bytes are converted to MB ''' if not hasattr(self,'manifests'): bot.error('Please retrieve manifests for an image first.') sys.exit(1) size = 768 # default size for schemaVersion, manifest in self.manifests.items(): if "layers" in manifest: size = 0 for layer in manifest["layers"]: if "size" in layer: size += layer['size'] if add_padding is True: size = size * 5 if return_mb is True: size = size / (1024 * 1024) # 1MB = 1024*1024 bytes if round_up is True: size = math.ceil(size) size = int(size) return size
def get_endpoint_path(self, endpoint_id): '''return the first fullpath to a folder in the endpoint based on expanding the user's home from the globus config file. This function is fragile but I don't see any other way to do it. Parameters ========== endpoint_id: the endpoint id to look up the path for ''' config = os.path.expanduser("~/.globusonline/lta/config-paths") if not os.path.exists(config): bot.error('%s not found for a local Globus endpoint.') sys.exit(1) path = None # Read in the config and get the root path config = [x.split(',')[0] for x in read_file(config)] for path in config: if os.path.exists(path): break # If we don't have an existing path, exit if path is None: bot.error('No path was found for a local Globus endpoint.') sys.exit(1) return path
def require_secrets(self, params=None): '''require secrets ensures that the client has the secrets file, and specifically has one or more parameters defined. If params is None, only a check is done for the file. Parameters ========== params: a list of keys to lookup in the client secrets, eg: secrets[client_name][params1] should not be in [None,''] or not set ''' name = self.client_name # Check 1: the client must have secrets, period has_secrets = True # Secrets file not asked for (incorrectly) but still wanted # The client shouldn't be calling this function if didn't init secrets if not hasattr(self, 'secrets'): has_secrets = False # Secret file was not found, period elif hasattr(self, 'secrets'): if self.secrets is None: has_secrets = False # The client isn't defined in the secrets file elif self.client_name not in self.secrets: has_secrets = False # Missing file or client secrets, fail if has_secrets is False: message = '%s requires client secrets.' % name bot.error(message) sys.exit(1) # Check 2: we have secrets and lookup, do we have all needed params? if params is not None: # Assume list so we can always parse through if not isinstance(params, list): params = [params] for param in params: # The parameter is not a key for the client if param not in self.secrets[name]: has_secrets = False # The parameter is a key, but empty or undefined elif self.secrets[name][param] in [None, '']: has_secrets = False # Missing parameter, exit on fail if has_secrets is False: message = 'Missing %s in client secrets.' % param bot.error(message) sys.exit(1)
def get_metadata(self, image_file, names=None): '''extract metadata using Singularity inspect, if the executable is found. If not, return a reasonable default (the parsed image name) Parameters ========== image_file: the full path to a Singularity image names: optional, an extracted or otherwise created dictionary of variables for the image, likely from utils.parse_image_name ''' if names is None: names = {} metadata = {} # We can't return anything without image_file or names if image_file: if not os.path.exists(image_file): bot.error('Cannot find %s.' % image_file) return names or metadata # The user provided a file, but no names if not names: names = parse_image_name(remove_uri(image_file)) # Look for the Singularity Executable singularity = which('singularity')['message'] # Inspect the image, or return names only if os.path.exists(singularity) and image_file: from spython.main import Client as Singularity # Store the original quiet setting is_quiet = Singularity.quiet # We try and inspect, but not required (wont work within Docker) try: Singularity.quiet = True updates = Singularity.inspect(image=image_file) except: bot.warning( 'Inspect command not supported, metadata not included.') updates = None # Restore the original quiet setting Singularity.quiet = is_quiet # Try loading the metadata if updates is not None: try: updates = json.loads(updates) metadata.update(updates) except: pass metadata.update(names) return metadata
def compress(self, image_path): '''compress will (properly) compress an image''' if os.path.exists(image_path): compressed_image = "%s.gz" % image_path os.system('gzip -c -6 %s > %s' % (image_path, compressed_image)) return compressed_image else: bot.error("Cannot find image %s" % image_path) sys.exit(1)
def _update_secrets(self): '''The user is required to have an application secrets file in his or her environment. The information isn't saved to the secrets file, but the client exists with error if the variable isn't found. ''' env = 'GOOGLE_APPLICATION_CREDENTIALS' self._secrets = self._get_and_update_setting(env) if self._secrets is None: bot.error('You must export %s to use Google Storage client' % env) sys.exit(1)
def mkdir_p(path): '''mkdir_p attempts to get the same functionality as mkdir -p :param path: the path to create. ''' try: os.makedirs(path) except OSError as e: if e.errno == errno.EEXIST and os.path.isdir(path): pass else: bot.error("Error creating path %s, exiting." % path) sys.exit(1)
def decompress(self, image_path, quiet=True): '''decompress will (properly) decompress an image''' if not os.path.exists(image_path): bot.error("Cannot find image %s" % image_path) sys.exit(1) extracted_file = image_path.replace('.gz', '') cmd = ['gzip', '-d', '-f', image_path] result = self.run_command(cmd, quiet=quiet) # exits if return code != 0 return extracted_file
def stream(self, url, headers=None, stream_to=None, retry=True, default_headers=True, show_progress=True): ''' stream is a get that will stream to file_name. This stream is intended to take a url and (optionally) a set of headers and file to stream to, and will generate a response with requests.get. Parameters ========== url: the url to do a requests.get to headers: any updated headers to use for the requets stream_to: the file to stream to show_progress: boolean to show progress bar retry: should the client retry? (intended for use after token refresh) by default we retry once after token refresh, then fail. ''' bot.debug("GET %s" % url) # Ensure headers are present, update if not if headers == None: if self.headers is None: self._reset_headers() headers = self.headers.copy() response = requests.get(url, headers=headers, verify=self._verify(), stream=True) # Deal with token if necessary if response.status_code == 401 and retry is True: if hasattr(self, '_update_token'): self._update_token(response) return self.stream(url, headers, stream_to, retry=False, show_progress=show_progress) if response.status_code == 200: return self._stream(response, stream_to=stream_to, show_progress=show_progress) bot.error("Problem with stream, response %s" % (response.status_code)) sys.exit(1)
def push(self, path, name, tag=None): '''push an image to your Storage. If the collection doesn't exist, it is created. Parameters ========== path: should correspond to an absolute image path (or derive it) name: should be the complete uri that the user has requested to push. tag: should correspond with an image tag. This is provided to mirror Docker ''' path = os.path.abspath(path) bot.debug("PUSH %s" % path) if not os.path.exists(path): bot.error('%s does not exist.' % path) sys.exit(1) # Parse image names names = parse_image_name(remove_uri(name), tag=tag) # Get the size of the file file_size = os.path.getsize(path) chunk_size = 4 * 1024 * 1024 storage_path = "/%s" % names['storage'] # Create / get the collection collection = self._get_or_create_collection(names['collection']) # The image name is the name followed by tag image_name = os.path.basename(names['storage']) # prepare the progress bar progress = 0 bot.show_progress(progress, file_size, length=35) # Put the (actual) container into the collection with open(path, 'rb') as F: self.conn.put_object(names['collection'], image_name, contents=F.read(), content_type='application/octet-stream') # Finish up bot.show_progress(iteration=file_size, total=file_size, length=35, carriage_return=True) # Newline to finish download sys.stdout.write('\n')
def stream(url, headers, stream_to=None, retry=True): '''stream is a get that will stream to file_name. Since this is a worker task, it differs from the client provided version in that it requires headers. ''' bot.debug("GET %s" % url) if DISABLE_SSL_CHECK is True: bot.warning('Verify of certificates disabled! ::TESTING USE ONLY::') # Ensure headers are present, update if not response = requests.get(url, headers=headers, verify=not DISABLE_SSL_CHECK, stream=True) # If we get permissions error, one more try with updated token if response.status_code in [401, 403]: headers = update_token(headers) return stream(url, headers, stream_to, retry=False) # Successful Response elif response.status_code == 200: # Keep user updated with Progress Bar content_size = None if 'Content-Length' in response.headers: progress = 0 content_size = int(response.headers['Content-Length']) bot.show_progress(progress,content_size,length=35) chunk_size = 1 << 20 with open(stream_to,'wb') as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress+=chunk_size bot.show_progress(iteration=progress, total=content_size, length=35, carriage_return=False) # Newline to finish download sys.stdout.write('\n') return stream_to bot.error("Problem with stream, response %s" %(response.status_code)) sys.exit(1)
def main(args, parser, subparser): from sregistry.main import get_client # Does the user have a valid image? image = args.image[0] if not os.path.exists(image): subparser.print_help() bot.error("Please supply one or more paths to existing images.") sys.exit(1) # Authenticate cli = get_client(args.name, quiet=args.quiet) cli.announce(args.command) response = cli.push(path=image, name=args.name, tag=args.tag)
def healthy(self, url): '''determine if a resource is healthy based on an accepted response (200) or redirect (301) Parameters ========== url: the URL to check status for, based on the status_code of HEAD ''' response = requests.get(url) status_code = response.status_code if status_code != 200: bot.error('%s, response status code %s.' % (url, status_code)) return False return True
def _update_secrets(self): '''The user is required to have an application secrets file in his or her environment. The client exists with error if the variable isn't found. ''' env = 'SREGISTRY_GOOGLE_DRIVE_CREDENTIALS' self._secrets = self._get_and_update_setting(env) self._base = self._get_and_update_setting('SREGISTRY_GOOGLE_DRIVE_ROOT') if self._base is None: self._base = 'sregistry' if self._secrets is None: bot.error('You must export %s to use Google Drive client' %env) bot.info("https://singularityhub.github.io/sregistry-cli/client-google-drive") sys.exit(1)
def stream_response(self, response, stream_to=None, show_progress=True): ''' stream response is one level higher up than stream, starting with a response object and then performing the stream without making the requests.get. The expectation is that the request was successful (status code 20*). show_progress: boolean to show progress bar Parameters ========== response: a response that is ready to be iterated over to download in streamed chunks stream_to: the file to stream to ''' if response.status_code == 200: if show_progress is False: bot.quiet = True # Keep user updated with Progress Bar, if not quiet content_size = None if 'Content-Length' in response.headers: progress = 0 content_size = int(response.headers['Content-Length']) bot.show_progress(progress, content_size, length=35) chunk_size = 1 << 20 with open(stream_to, 'wb') as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress += chunk_size bot.show_progress(iteration=progress, total=content_size, length=35, carriage_return=False) # Newline to finish download sys.stdout.write('\n') return stream_to bot.error("Problem with stream, response %s" % (response.status_code)) sys.exit(1)
def _get_project(self, project=None): '''get project returns the active project, and exists if not found. Parameters ========== project: a project to default to, if not found in the environment zone: a default zone, will be us-west1-a by default ''' project = self._get_and_update_setting('SREGISTRY_GOOGLE_PROJECT', project) if not project: bot.error('Export your SREGISTRY_GOOGLE_PROJECT to build.') sys.exit(1) return project
def cp(self, move_to, image_name=None, container=None, command="copy"): '''_cp is the shared function between mv (move) and rename, and performs the move, and returns the updated container Parameters ========== image_name: an image_uri to look up a container in the database container: the container object to move (must have a container.image move_to: the full path to move it to ''' if container is None and image_name is None: bot.error('A container or image_name must be provided to %s' % command) sys.exit(1) # If a container isn't provided, look for it from image_uri if container is None: container = self.get(image_name, quiet=True) image = container.image or '' if os.path.exists(image): filedir = os.path.dirname(move_to) # If the two are the same, doesn't make sense if move_to == image: bot.warning('%s is already the name.' % image) sys.exit(1) # Ensure directory exists if not os.path.exists(filedir): bot.error('%s does not exist. Ensure exists first.' % filedir) sys.exit(1) # Ensure writable for user if not os.access(filedir, os.W_OK): bot.error('%s is not writable' % filedir) sys.exit(1) original = os.path.basename(image) try: shutil.move(image, move_to) container.image = move_to self.session.commit() bot.info('[%s] %s => %s' % (command, original, move_to)) return container except: bot.error('Cannot %s %s to %s' % (command, original, move_to)) sys.exit(1) bot.warning('''This operation is not permitted on a remote image. Please pull %s and then %s to the appropriate location.''' % (container.uri, command))
def _client_tagged(self, tags): '''ensure that the client name is included in a list of tags. This is important for matching builders to the correct client. We exit on fail. Parameters ========== tags: a list of tags to look for client name in ''' # We must match the client to a tag name = self.client_name.lower() tags = [t.lower() for t in tags] if name not in tags: bot.error('%s not found in %s, must match!' % (name, tags)) sys.exit(1)