def inspect(self, name): """Inspect a local image in the database, which typically includes the basic fields in the model. """ container = self.get(name) if container is not None: collection = container.collection.name fields = container.__dict__.copy() fields["collection"] = collection fields["metrics"] = json.loads(fields["metrics"]) del fields["_sa_instance_state"] fields["created_at"] = str(fields["created_at"]) print(json.dumps(fields, indent=4, sort_keys=True)) return fields bot.exit("image {} was not found in the database".format(name))
def _update_secrets(self): '''update secrets will take a secrets credential file either located at .sregistry or the environment variable SREGISTRY_CLIENT_SECRETS and update the current client secrets as well as the associated API base. For the case of using Docker Hub, if we find a .docker secrets file, we update from there. ''' bot.debug('Creating aws client...') try: from awscli.clidriver import create_clidriver except: bot.exit('Please install pip install sregistry[aws]') driver = create_clidriver() self.aws = driver.session.create_client('ecr')
def stream_response(self, response, stream_to=None, show_progress=True): """ stream response is one level higher up than stream, starting with a response object and then performing the stream without making the requests.get. The expectation is that the request was successful (status code 20*). show_progress: boolean to show progress bar Parameters ========== response: a response that is ready to be iterated over to download in streamed chunks stream_to: the file to stream to """ if response.status_code == 200: if show_progress is False: bot.quiet = True # Keep user updated with Progress Bar, if not quiet content_size = None if "Content-Length" in response.headers: progress = 0 content_size = int(response.headers["Content-Length"]) bot.show_progress(progress, content_size, length=35) chunk_size = 1 << 20 with open(stream_to, "wb") as filey: for chunk in response.iter_content(chunk_size=chunk_size): filey.write(chunk) if content_size is not None: progress += chunk_size bot.show_progress( iteration=progress, total=content_size, length=35, carriage_return=False, ) # Newline to finish download sys.stdout.write("\n") return stream_to bot.exit("Problem with stream, response %s" % (response.status_code))
def authorize(self, names, payload=None, request_type="push"): """Authorize a client based on encrypting the payload with the client token, which should be matched on the receiving server""" self.username = self._get_and_update_setting("SREGISTRY_REGISTRY_USERNAME") if self.username is None: # backwards compatibility self.username = self._get_and_update_setting("username") if self.username is None: bot.exit( "Failed to authorize: please set SREGISTRY_REGISTRY_USERNAME to an appropriate value" ) else: self._update_setting("SREGISTRY_REGISTRY_USERNAME", self.username) self.token = self._get_and_update_setting("SREGISTRY_REGISTRY_TOKEN") if self.token is None: # backwards compatibility self.token = self._get_and_update_setting("token") if self.token is None: bot.exit( "Failed to authorize: please set SREGISTRY_REGISTRY_TOKEN to an appropriate value" ) else: self._update_setting("SREGISTRY_REGISTRY_TOKEN", self.token) # Use the payload to generate a digest push|collection|name|tag|user timestamp = generate_timestamp() credential = generate_credential(self.username) credential = "%s/%s/%s" % (request_type, credential, timestamp) if payload is None: payload = "%s|%s|%s|%s|%s|" % ( request_type, names["collection"], timestamp, names["image"], names["tag"], ) signature = generate_signature(payload, self.token) return "SREGISTRY-HMAC-SHA256 Credential=%s,Signature=%s" % (credential, signature)
def main(args, parser, extra): """the images entrypoint is intended to list images locally in the user database, optionally taking one or more query string to subset the search """ from sregistry.main import get_client cli = get_client(quiet=args.quiet) # If the client doesn't have the command, exit if not hasattr(cli, "images"): bot.exit("listing images requires using the sqlite database.") for query in args.query: if query in ["", "*"]: query = None cli.images(query=query)
def main(args, parser, extra): '''sharing an image means sending a remote share from an image you control to a contact, usually an email. ''' from sregistry.main import get_client image = args.image # Detect any uri, and refresh client if necessary cli = get_client(image, quiet=args.quiet) cli.announce(args.command) # If the client doesn't have the command, exit if not hasattr(cli, 'share'): msg = "share is not implemented for %s. Why don't you add it?" bot.exit(msg % cli.client_name) cli.share(image, share_to=args.share_to)
def _client_tagged(self, tags): '''ensure that the client name is included in a list of tags. This is important for matching builders to the correct client. We exit on fail. Parameters ========== tags: a list of tags to look for client name in ''' # We must match the client to a tag name = self.client_name.lower() tags = [t.lower() for t in tags] if name not in tags: bot.exit('%s not found in %s, must match!' % (name, tags))
def _update_secrets(self): """update secrets will look for a dropbox token in the environment at SREGISTRY_DROPBOX_TOKEN and if found, create a client. If not, an error message is returned and the client exits. """ # Retrieve the user token. Exit if not found token = self._required_get_and_update("SREGISTRY_DROPBOX_TOKEN") # Create the dropbox client self.dbx = Dropbox(token) # Verify that the account is valid try: self.account = self.dbx.users_get_current_account() except: bot.exit("Account invalid. Exiting.")
def _get_bucket(self): """get a bucket based on a bucket name. If it doesn't exist, create it. """ # Case 1: The bucket already exists try: self._bucket = self._bucket_service.get_bucket(self._bucket_name) # Case 2: The bucket needs to be created except google.cloud.exceptions.NotFound: self._bucket = self._bucket_service.create_bucket(self._bucket_name) # Case 3: The bucket name is already taken except: bot.exit("Cannot get or create %s" % self._bucket_name) return self._bucket
def main(args, parser, extra): from sregistry.main import get_client image = args.image cli = get_client(image, quiet=args.quiet) cli.announce(args.command) if not hasattr(cli, 'rm'): msg = "remove is not implemented for %s. Why don't you add it?" bot.exit(msg % cli.client_name) result = cli.rm(image) if result is None: bot.exit("No {} record found in the database".format(image))
def run_compute_build(cli, args): """a compute based build is the oldest versions of build - here we bring up our own instance, and then provide control to it. The helper functions below (kill, instances, templates) support this version. """ # Does the user want to save the image? command = args.commands.pop(0) # Option 1: The user wants to kill an instance if command == "kill": kill(args) # Option 2: Just list running instances elif command == "instances": instances(args) # Option 3: The user wants to list templates elif "template" in command: templates(args) # Option 4: View a specific or latest log elif command == "logs": list_logs(args) # Option 3: The user is providing a Github repo! recipe = "Singularity" if "github" in command: # One argument indicates a recipe if len(args.commands) == 1: recipe = args.commands.pop(0) else: # If a command is provided, but not a Github repo bot.exit("%s is not a recognized option." % command) # Does the user want to specify a name for the collection? name = args.name # No image is needed, we are creating in the cloud return cli.build(repo=command, name=name, recipe=recipe, preview=args.preview)
def stream( self, url, headers=None, stream_to=None, retry=True, default_headers=True, show_progress=True, ): """ stream is a get that will stream to file_name. This stream is intended to take a url and (optionally) a set of headers and file to stream to, and will generate a response with requests.get. Parameters ========== url: the url to do a requests.get to headers: any updated headers to use for the requets stream_to: the file to stream to show_progress: boolean to show progress bar retry: should the client retry? (intended for use after token refresh) by default we retry once after token refresh, then fail. """ bot.debug("GET %s" % url) # Ensure headers are present, update if not if headers is None: if self.headers is None: self._reset_headers() headers = self.headers.copy() response = requests.get(url, headers=headers, verify=self._verify(), stream=True) # Deal with token if necessary if response.status_code == 401 and retry is True: if hasattr(self, "_update_token"): self._update_token(response) return self.stream( url, headers, stream_to, retry=False, show_progress=show_progress ) if response.status_code == 200: return self._stream(response, stream_to=stream_to, show_progress=show_progress) bot.exit("Problem with stream, response %s" % (response.status_code))
def main(args, parser, extra): from sregistry.main import get_client for query in args.query: original = query query = remove_uri(query) if query in ['', '*']: query = None try: cli = get_client(original, quiet=args.quiet) cli.announce(args.command) cli.search(query, args=args) except NotImplementedError: msg = "search is not implemented for %s. Why don't you add it?" bot.exit(msg % cli.client_name)
def main(args, parser, extra): '''the list command corresponds with listing images for an external resource. This is different from listing images that are local to the database, which should be done with "images" ''' from sregistry.main import get_client cli = get_client(quiet=args.quiet) # If the client doesn't have the command, exit if not hasattr(cli, 'ls'): msg = "list is not implemented for %s. Why don't you add it?" bot.exit(msg % cli.client_name) for query in args.query: if query in ['', '*']: query = None cli.ls(query=query)
def _update_secrets(self): """The user is required to have an application secrets file in his or her environment. The client exists with error if the variable isn't found. """ env = "SREGISTRY_GOOGLE_DRIVE_CREDENTIALS" self._secrets = self._get_and_update_setting(env) self._base = self._get_and_update_setting( "SREGISTRY_GOOGLE_DRIVE_ROOT") if self._base is None: self._base = "sregistry" if self._secrets is None: bot.error("You must export %s to use Google Drive client" % env) bot.exit( "https://singularityhub.github.io/sregistry-cli/client-google-drive" )
def run_registry_build(cli, args, extra): """a registry build pushes a recipe file to Singularity Registry Server, or given that a GitHub Url is provided, we build from there. For more regular building, the user is suggested to directly connect the repository to Singularity Registry server. This can serve as a one time build. """ # The uri can also contain github, which indicates a Github build if args.name is None: bot.exit("Please provide a container identifier with --name") recipe = args.commands.pop(0) response = cli.build(name=args.name, recipe=recipe, extra=extra) # Print output to the console if response is not None: print_output(response, args.outfile) return response
def main(args, parser, extra): from sregistry.main import get_client # Does the user have a valid image? image = args.image if not os.path.exists(image): bot.exit("%s does not exist" % image) # Authenticate cli = get_client(args.name, quiet=args.quiet) cli.announce(args.command) # If the client doesn't have the command, exit if not hasattr(cli, "push"): msg = "push is not implemented for %s. Why don't you add it?" bot.exit(msg % cli.client_name) cli.push(path=image, name=args.name, tag=args.tag)
def get_file_hash(image_path, algorithm='sha256'): '''return an md5 hash of the file based on a criteria level. This is intended to give the file a reasonable version. Parameters ========== image_path: full path to the singularity image ''' try: hasher = getattr(hashlib, algorithm)() except AttributeError: bot.error("%s is an invalid algorithm.") bot.exit(' '.join(hashlib.algorithms_guaranteed)) with open(image_path, "rb") as f: for chunk in iter(lambda: f.read(4096), b""): hasher.update(chunk) return hasher.hexdigest()
def init_db(self, db_path): '''initialize the database, with the default database path or custom of the format sqlite:////home/<username>/sregistry.db The custom path can be set with the environment var SREGISTRY_DATABASE when a user creates the client, we must initialize this db the database should use the .singularity cache folder to cache layers and images, and .singularity/sregistry.db as a database ''' # Database Setup, use default if uri not provided self.database = 'sqlite:///%s' % db_path self.storage = SREGISTRY_STORAGE # If the path isn't defined, cut out early if not db_path: return # Ensure that the parent_folder exists) parent_folder = os.path.dirname(db_path) # Case 1: Does not exist if not os.path.exists(parent_folder): bot.exit("Database location {} does not exist.".format(parent_folder)) # Case 2: Insufficient permission for write if not os.access(parent_folder, os.W_OK): bot.exit( "Insufficient permission to write to {}".format(parent_folder)) bot.debug("Database located at %s" % self.database) self.engine = create_engine(self.database, convert_unicode=True) self.session = scoped_session( sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) Base.query = self.session.query_property() # import all modules here that might define models so that # they will be registered properly on the metadata. Otherwise # you will have to import them first before calling init_db() Base.metadata.create_all(bind=self.engine) self.Base = Base
def get_digests(self, repo_name, tag): """ return a list of layers from a manifest. The function is intended to work with both version 1 and 2 of the schema. All layers (including redundant) are returned. By default, we try version 2 first, then fall back to version 1. For version 1 manifests: extraction is reversed Parameters ========== manifest: the manifest to read_layers from """ if not hasattr(self, "manifest"): bot.exit("Please retrieve manifest for the image first.") # version 2 manifest here! return self.manifest["layers"]
def get_bucket(self): '''given a bucket name and a client that is initialized, get or create the bucket. ''' for attr in ['bucket_name', 's3']: if not hasattr(self, attr): bot.exit('client is missing attribute %s' % (attr)) # See if the bucket is already existing self.bucket = None for bucket in self.s3.buckets.all(): if bucket.name == self.bucket_name: self.bucket = bucket # If the bucket doesn't exist, create it if self.bucket is None: self.bucket = self.s3.create_bucket(Bucket=self.bucket_name) bot.info('Created bucket %s' % self.bucket.name) return self.bucket
def get_config(self, key="Entrypoint", delim=None): """get_config returns a particular key (default is Entrypoint) from a VERSION 1 manifest obtained with get_manifest. Parameters ========== key: the key to return from the manifest config delim: Given a list, the delim to use to join the entries. Default is newline """ if not hasattr(self, "manifests"): bot.exit("Please retrieve manifests for an image first.") cmd = None # If we didn't find the config value in version 2 if cmd is None and "config" in self.manifests: # First try, version 2.0 manifest config has upper level config manifest = self.manifests["config"] if "config" in manifest: if key in manifest["config"]: cmd = manifest["config"][key] # Second try, config manifest (not from verison 2.0 schema blob) if cmd is None and "history" in manifest: for entry in manifest["history"]: if "v1Compatibility" in entry: entry = json.loads(entry["v1Compatibility"]) if "config" in entry: if key in entry["config"]: cmd = entry["config"][key] # Standard is to include commands like ['/bin/sh'] if isinstance(cmd, list): if delim is not None: cmd = delim.join(cmd) bot.verbose("Found Docker config (%s) %s" % (key, cmd)) return cmd
def add( self, image_path=None, image_uri=None, image_name=None, url=None, metadata=None, save=True, copy=False, ): """dummy add simple returns an object that mimics a database entry, so the calling function (in push or pull) can interact with it equally. Most variables (other than image_path) are not used.""" # We can only save if the image is provided if image_path is not None: if not os.path.exists(image_path): bot.exit("Cannot find %s" % image_path) if image_uri is None: bot.exit("You must provide an image uri <collection>/<namespace>") names = parse_image_name(remove_uri(image_uri)) bot.debug("Added %s to filesystem" % names["uri"]) # Create a dummy container on the fly class DummyContainer: def __init__(self, image_path, client_name, url, names): self.image = image_path self.client = client_name self.url = url self.name = names["image"] self.tag = names["tag"] self.uri = names["uri"] container = DummyContainer(image_path, self.client_name, url, names) bot.info("[container][add] %s" % names["uri"]) return container
def get_blob_location(response, bucket): """return a relative path for a blob based on finding the build step, and the container built from it. Parameters ========== response: the response from client._build_status(build_id) bucket: the name of the build bucket. """ # Find the build step, it uses singularityware as a builder build_step = [ x for x in response["steps"] if x["name"] == "gcr.io/cloud-builders/gsutil" and x["args"][0] == "cp" ] # This is fragile, but we have to get the container name if len(build_step) > 0: location = build_step[0]["args"][-1] return re.sub("(gs://%s|%s)" % (bucket, bucket), "", location).strip("/") bot.exit("Cannot find build step with image name.")
def main(args, parser, extra): from sregistry.main import get_client image = args.image name = args.name # Customize client based on uri cli = get_client(image, quiet=args.quiet) cli.announce(args.command) # Does the user want to save the image? do_save = True if args.nocache is True or not hasattr(cli, "storage"): do_save = False # If the client doesn't have the command, exit if not hasattr(cli, "pull"): msg = "pull is not implemented for %s. Why don't you add it?" bot.exit(msg % cli.client_name) cli.pull(images=image, file_name=name, force=args.force, save=do_save)
def get_blob_location(response, bucket): '''return a relative path for a blob based on finding the build step, and the container built from it. Parameters ========== response: the response from client._build_status(build_id) bucket: the name of the build bucket. ''' # Find the build step, it uses singularityware as a builder build_step = [ x for x in response['steps'] if x['name'] == 'gcr.io/cloud-builders/gsutil' and x['args'][0] == 'cp' ] # This is fragile, but we have to get the container name if len(build_step) > 0: location = build_step[0]['args'][-1] return re.sub('(gs://%s|%s)' % (bucket, bucket), '', location).strip('/') bot.exit("Cannot find build step with image name.")
def update_token(self): """update_token uses HTTP basic authentication to get a token for Docker registry API V2 operations. We get here if a 401 is returned for a request. Parameters ========== response: the http request response to parse for the challenge. https://docs.docker.com/registry/spec/auth/token/ """ # Add Amazon headers tokens = self.aws.get_authorization_token() token = tokens["authorizationData"][0]["authorizationToken"] try: token = {"Authorization": "Basic %s" % token} self.headers.update(token) except: bot.exit("Error getting token.")
def get_manifest(self, repo_name, tag): """return the image manifest via the aws client, saved in self.manifest """ image = None repo = self.aws.describe_images(repositoryName=repo_name) if "imageDetails" in repo: for contender in repo.get("imageDetails"): if tag in contender["imageTags"]: image = contender break # if the image isn't found, we need to exit if image is None: bot.exit("Cannot find %s:%s, is the uri correct?" % (repo_name, tag)) digest = image["imageDigest"] digests = self.aws.batch_get_image( repositoryName=repo_name, imageIds=[{"imageDigest": digest, "imageTag": tag}] ) self.manifest = json.loads(digests["images"][0]["imageManifest"]) return self.manifest
def load_build_config(self, config=None): """load a google compute config, meaning that we have the following cases: 1. the user has not provided a config file directly, we look in env. 2. the environment is not set, so we use a reasonable default 3. if the final string is not found as a file, we look for it in library 4. we load the library name, or the user file, else error Parameters ========== config: the config file the user has provided, or the library URI """ # If the config is already a dictionary, it's loaded if isinstance(config, dict): bot.debug("Config is already loaded.") return config # if the config is not defined, look in environment, then choose a default if config is None: config = self._get_and_update_setting( "SREGISTRY_COMPUTE_CONFIG", "google/compute/ubuntu/securebuild-2.4.3" ) # If the config is a file, we read it elif os.path.exists(config): return read_json(config) # otherwise, try to look it up in library configs = self._load_templates(config) if configs is None: bot.exit("%s is not a valid config. %s" % config) bot.info("Found config %s in library!" % config) config = configs[0] return config
def _extract_tar(archive, output_folder): '''use blob2oci to handle whiteout files for extraction. Credit for this script goes to docker2oci by Olivier Freyermouth, and see script folder for license. Parameters ========== archive: the archive to extract output_folder the output folder (sandbox) to extract to ''' from .terminal import (run_command, which) result = which('blob2oci') if result['return_code'] != 0: bot.exit('Cannot find blob2oci script on path, exiting.') script = result['message'] command = ['exec', script, '--layer', archive, '--extract', output_folder] if not bot.is_quiet(): print("Extracting %s" % archive) return run_command(command)