Ejemplo n.º 1
0
def get_cache(subfolder=None,quiet=False):
    '''get_cache will return the user's cache for singularity. The path
    returned is generated at the start of the run, and returned optionally
    with a subfolder
    :param subfolder: a subfolder in the cache base to retrieve, specifically
    '''

    # Clean up the path and create
    cache_base = clean_path(SINGULARITY_CACHE)

    # Does the user want to get a subfolder in cache base?
    if subfolder != None:
        cache_base = "%s/%s" %(cache_base,subfolder)
        
    # Create the cache folder(s), if don't exist
    create_folders(cache_base)

    if not quiet:
        bot.info("Cache folder set to %s" %cache_base)
    return cache_base
Ejemplo n.º 2
0
def get_cache(subfolder=None, quiet=False):
    '''get_cache will return the user's cache for singularity.
    The path returned is generated at the start of the run,
    and returned optionally with a subfolder
    :param subfolder: a subfolder in the cache base to retrieve
    '''

    # Clean up the path and create
    cache_base = clean_path(SINGULARITY_CACHE)

    # Does the user want to get a subfolder in cache base?
    if subfolder is not None:
        cache_base = "%s/%s" % (cache_base, subfolder)

    # Create the cache folder(s), if don't exist
    create_folders(cache_base)

    if not quiet:
        bot.info("Cache folder set to %s" % cache_base)
    return cache_base
Ejemplo n.º 3
0
def main():
    '''parse configuration options and produce
       configuration output file
    '''
    bot.info("\n*** STARTING PYTHON CONFIGURATION HELPER ****")
    parser = get_parser()

    try:
        (args, options) = parser.parse_args()
    except Exception:
        bot.error("Input args to %s improperly set, exiting."
                  % os.path.abspath(__file__))
        parser.print_help()
        sys.exit(1)

    # Check for required args
    [check_required(parser, arg) for arg in [args.defaults,
                                             args.infile,
                                             args.outfile]]

    # Run the configuration
    configure(args)
Ejemplo n.º 4
0
def configure(args):

    # Get fullpath to each file, and concurrently check that exists
    defaultfile = get_fullpath(args.defaults)  # ../src/lib/config_defaults.h
    infile = get_fullpath(args.infile)         # singularity.conf.in

    # Find define statements
    define_re = re.compile("#define ([A-Z_]+) (.*)")

    # Read in input and default files
    defaultfile = read_file(defaultfile)
    data = "".join(read_file(infile))

    # Lookup for values we want replaced
    lookup = {'0': 'no',
              '1': 'yes'}

    defaults = {}
    # Read in defaults to dictionary
    for line in defaultfile:
        match = define_re.match(line)
        if match:
            key, value = match.groups()

            # Maintain the original default set by user
            defaults[key] = value

            # Use parsed value for final config
            new_value = value.replace('"', '')
            if new_value in lookup:
                new_value = lookup[new_value]
            data = data.replace("@" + key + "@", new_value)

    # Write to output file
    outfile = "%s.tmp" % args.outfile
    write_file(outfile, data)
    os.rename(outfile, args.outfile)

    bot.info("*** FINISHED PYTHON CONFIGURATION HELPER ****\n")
Ejemplo n.º 5
0
def IMPORT(image, auth=None, layerfile=None):
    '''IMPORT is the main script that will obtain docker layers,
    runscript information (either entrypoint or cmd), and environment
    and return a list of tarballs to extract into the image
    :param auth: if needed, an authentication header (default None)
    :param layerfile: The file to write layers to extract into
    '''
    bot.debug("Starting Docker IMPORT, includes env, runscript, and metadata.")
    bot.verbose("Docker image: %s" % image)

    # Does the user want to override default of using ENTRYPOINT?
    if INCLUDE_CMD:
        bot.verbose2("Specified Docker CMD as %runscript.")
    else:
        bot.verbose2("Specified Docker ENTRYPOINT as %runscript.")

    # Input Parsing ----------------------------
    # Parse image name, repo name, and namespace
    client = DockerApiConnection(image=image, auth=auth)

    docker_image_uri = "Docker image path: %s" % client.assemble_uri("/")
    bot.info(docker_image_uri)

    # IMAGE METADATA -------------------------------------------
    # Use Docker Registry API (version 2.0) to get images ids, manifest

    images = client.get_images()

    #  DOWNLOAD LAYERS -------------------------------------------
    # Each is a .tar.gz file, obtained from registry with curl

    # Get the cache (or temporary one) for docker
    cache_base = get_cache(subfolder="docker")
    download_client = MultiProcess()

    # Generate a queue of tasks to run with MultiProcess
    layers = []
    tasks = []
    for ii in range(len(images)):
        image_id = images[ii]
        targz = "%s/%s.tar.gz" % (cache_base, image_id)
        if not os.path.exists(targz):
            tasks.append((client, image_id, cache_base))
        layers.append(targz)

    # Does the user want to change permissions of tar?
    func2 = None
    if PLUGIN_FIXPERMS:
        func2 = change_permissions

    if len(tasks) > 0:
        download_layers = download_client.run(func=download_layer,
                                              func2=func2,
                                              tasks=tasks)

    # Get Docker runscript
    runscript = extract_runscript(manifest=client.manifestv1,
                                  includecmd=INCLUDE_CMD)

    # Add the environment export
    tar_file = extract_metadata_tar(client.manifestv1,
                                    client.assemble_uri(),
                                    runscript=runscript)

    bot.verbose2('Tar file with Docker env and labels: %s' % tar_file)

    # Write all layers to the layerfile
    if layerfile is not None:
        bot.verbose3("Writing Docker layers files to %s" % layerfile)
        write_file(layerfile, "\n".join(layers), mode="w")
        if tar_file is not None:
            write_file(layerfile, "\n%s" % tar_file, mode="a")

    # Return additions dictionary
    additions = {"layers": layers,
                 "image": image,
                 "manifest": client.manifest,
                 "manifestv1": client.manifestv1,
                 "cache_base": cache_base,
                 "metadata": tar_file}

    bot.debug("*** FINISHING DOCKER IMPORT PYTHON PORTION ****\n")

    return additions
Ejemplo n.º 6
0
def IMPORT(image, auth=None, layerfile=None):
    '''IMPORT is the main script that will obtain docker layers,
    runscript information (either entrypoint or cmd), and environment
    and return a list of tarballs to extract into the image
    :param auth: if needed, an authentication header (default None)
    :param layerfile: The file to write layers to extract into
    '''
    bot.debug("Starting Docker IMPORT, includes env, runscript, and metadata.")
    bot.verbose("Docker image: %s" % image)

    # Does the user want to override default of using ENTRYPOINT?
    if INCLUDE_CMD:
        bot.verbose2("Specified Docker CMD as %runscript.")
    else:
        bot.verbose2("Specified Docker ENTRYPOINT as %runscript.")

    # Input Parsing ----------------------------
    # Parse image name, repo name, and namespace
    client = DockerApiConnection(image=image, auth=auth)

    docker_image_uri = "Docker image path: %s" % client.assemble_uri("/")
    bot.info(docker_image_uri)

    # IMAGE METADATA -------------------------------------------
    # Use Docker Registry API (version 2.0) to get images ids, manifest

    images = client.get_images()

    #  DOWNLOAD LAYERS -------------------------------------------
    # Each is a .tar.gz file, obtained from registry with curl

    # Get the cache (or temporary one) for docker
    cache_base = get_cache(subfolder="docker")
    download_client = MultiProcess()

    # Generate a queue of tasks to run with MultiProcess
    layers = []
    tasks = []
    for ii in range(len(images)):
        image_id = images[ii]
        targz = "%s/%s.tar.gz" % (cache_base, image_id)
        if not os.path.exists(targz):
            tasks.append((client, image_id, cache_base))
        layers.append(targz)

    # Does the user want to change permissions of tar?
    func2 = None
    if PLUGIN_FIXPERMS:
        func2 = change_permissions

    if len(tasks) > 0:
        download_layers = download_client.run(func=download_layer,
                                              func2=func2,
                                              tasks=tasks)

    # Get Docker runscript
    runscript = extract_runscript(manifest=client.manifestv1,
                                  includecmd=INCLUDE_CMD)

    # Add the environment export
    tar_file = extract_metadata_tar(client.manifestv1,
                                    client.assemble_uri(),
                                    runscript=runscript)

    bot.verbose2('Tar file with Docker env and labels: %s' % tar_file)

    # Write all layers to the layerfile
    if layerfile is not None:
        bot.verbose3("Writing Docker layers files to %s" % layerfile)
        write_file(layerfile, "\n".join(layers), mode="w")
        if tar_file is not None:
            write_file(layerfile, "\n%s" % tar_file, mode="a")

    # Return additions dictionary
    additions = {
        "layers": layers,
        "image": image,
        "manifest": client.manifest,
        "manifestv1": client.manifestv1,
        "cache_base": cache_base,
        "metadata": tar_file
    }

    bot.debug("*** FINISHING DOCKER IMPORT PYTHON PORTION ****\n")

    return additions
Ejemplo n.º 7
0
def INSPECT(inspect_labels=None,inspect_def=None,inspect_runscript=None,inspect_test=None,
            inspect_env=None,pretty_print=True):
    '''INSPECT will print a "manifest" for an image, with one or more variables asked for by
    the user. The default prints a human readable format (text and json) and if pretty_print 
    is turned to False, the entire thing will be returned as json via the JSON API standard.
    The base is checked for the metadata folder, and if it does not exist, the links are 
    searched for and parsed (to support older versions).

    :param inspect_runscript: if not None, will include runscript, if it exists
    :param inspect_labels: if not None, will include labels, if they exist.
    :param inspect_def: if not None, will include definition file, if it exists
    :param inspect_test: if not None, will include test, if it exists.
    :param inspect_env: if not None, will include environment, if exists.
    :param pretty_print: if False, return all JSON API spec
    '''

    data = dict()
    errors = dict()

    # Labels
    if inspect_labels:
        bot.verbose2("Inspection of labels selected.")
        if os.path.exists(LABELFILE):
            data["labels"] = read_json(LABELFILE)
        else:
            data["labels"] = None
            errors["labels"] = generate_error(404, detail="This container does not have labels",
                                              title="Labels Undefined")

    # Definition File
    if inspect_def:
        bot.verbose2("Inspection of deffile selected.")
        if os.path.exists(DEFFILE):
            data["deffile"] = read_file(DEFFILE,readlines=False)
        else:
            data["deffile"] = None
            errors["deffile"] = generate_error(404,title="Definition File Undefined",
                                               detail="This container does not include the bootstrap definition")

    # Runscript
    if inspect_runscript:
        bot.verbose2("Inspection of runscript selected.")
        if os.path.exists(RUNSCRIPT):
            data["runscript"] = read_file(RUNSCRIPT,readlines=False)
        else:
            data["runscript"] = None
            errors["runscript"] = generate_error(404,title="Runscript Undefined",
                                                 detail="This container does not have any runscript defined")

    # Test
    if inspect_test:
        bot.verbose2("Inspection of test selected.")
        if os.path.exists(TESTFILE):
            data["test"] = read_file(TESTFILE,readlines=False)
        else:
            data["test"] = None
            errors["test"] = generate_error(404,title="Tests Undefined",
                                            detail="This container does not have any tests defined")


    # Environment
    if inspect_env:
        bot.verbose2("Inspection of environment selected.")
        if os.path.exists(ENVIRONMENT):
            data["environment"] = read_file(ENVIRONMENT,readlines=False)
        else:
            data["environment"] = None
            errors["environment"] = generate_error(404,title="Tests Undefined",
                                                detail="This container does not have any custom environment defined")

    if pretty_print:
        bot.verbose2("Structured printing specified.")
        for dtype,content in data.items():      
            if content is not None:
                if isinstance(content,dict):
                    print_json(content,print_console=True)
                else:
                    bot.info(content)
            else:
                print_json(errors[dtype],print_console=True)

    else:
        bot.verbose2("Unstructed printing specified")
        # Only define a type if there is data to return, else return errors
        result = dict()
        if len(data) > 0:
            result["data"] = {"attributes": data,
                              "type": "container" }
        else:
            result["errors"] = []
            for dtype,content in errors.items():
                result["errors"].append(content)             
        print_json(result,print_console=True)