def test_write_read_files(self):
        '''test_write_read_files will test the
        functions write_file and read_file
        '''
        print("Testing utils.write_file...")
        from sutils import write_file
        import json
        tmpfile = tempfile.mkstemp()[1]
        os.remove(tmpfile)
        write_file(tmpfile, "hello!")
        self.assertTrue(os.path.exists(tmpfile))

        print("Testing utils.read_file...")
        from sutils import read_file
        content = read_file(tmpfile)[0]
        self.assertEqual("hello!", content)

        from sutils import write_json
        print("Testing utils.write_json...")
        print("Case 1: Providing bad json")
        bad_json = {"Wakkawakkawakka'}": [{True}, "2", 3]}
        tmpfile = tempfile.mkstemp()[1]
        os.remove(tmpfile)
        with self.assertRaises(TypeError) as cm:
            write_json(bad_json, tmpfile)

        print("Case 2: Providing good json")
        good_json = {"Wakkawakkawakka": [True, "2", 3]}
        tmpfile = tempfile.mkstemp()[1]
        os.remove(tmpfile)
        write_json(good_json, tmpfile)
        content = json.load(open(tmpfile, 'r'))
        self.assertTrue(isinstance(content, dict))
        self.assertTrue("Wakkawakkawakka" in content)
Exemple #2
0
    def test_write_read_files(self):
        '''test_write_read_files will test the
        functions write_file and read_file
        '''
        print("Testing utils.write_file...")
        from sutils import write_file
        import json
        tmpfile = tempfile.mkstemp()[1]
        os.remove(tmpfile)
        write_file(tmpfile, "hello!")
        self.assertTrue(os.path.exists(tmpfile))

        print("Testing utils.read_file...")
        from sutils import read_file
        content = read_file(tmpfile)[0]
        self.assertEqual("hello!", content)

        from sutils import write_json
        print("Testing utils.write_json...")
        print("Case 1: Providing bad json")
        bad_json = {"Wakkawakkawakka'}": [{True}, "2", 3]}
        tmpfile = tempfile.mkstemp()[1]
        os.remove(tmpfile)
        with self.assertRaises(TypeError) as cm:
            write_json(bad_json, tmpfile)

        print("Case 2: Providing good json")
        good_json = {"Wakkawakkawakka": [True, "2", 3]}
        tmpfile = tempfile.mkstemp()[1]
        os.remove(tmpfile)
        write_json(good_json, tmpfile)
        content = json.load(open(tmpfile, 'r'))
        self.assertTrue(isinstance(content, dict))
        self.assertTrue("Wakkawakkawakka" in content)
Exemple #3
0
def SIZE(image,auth=None,contentfile=None):
    '''size is intended to be run before an import, to return to the contentfile a list of sizes
    (one per layer) corresponding with the layers that will be downloaded for image
    '''
    bot.debug("Starting Docker SIZE, will get size from manifest")
    bot.verbose("Docker image: %s" %image)
    client = DockerApiConnection(image=image,auth=auth)
    size = client.get_size()
    if contentfile is not None:
        write_file(contentfile,str(size),mode="w")
    return size 
Exemple #4
0
def PULL(image, download_folder=None, layerfile=None):
    '''PULL will retrieve a Singularity Hub image and
    download to the local file system, to the variable
    specified by SINGULARITY_PULLFOLDER.
    :param image: the singularity hub image name
    :param download folder: the folder to pull the image to.
    :param layerfile: if defined, write pulled image to file
    '''
    client = SingularityApiConnection(image=image)
    manifest = client.get_manifest()

    if download_folder is None:
        cache_base = get_cache(subfolder="shub")
    else:
        cache_base = os.path.abspath(download_folder)

    bot.debug("Pull folder set to %s" % cache_base)

    # The image name is the md5 hash, download if it's not there
    image_name = get_image_name(manifest)

    # Did the user specify an absolute path?
    custom_folder = os.path.dirname(image_name)
    if custom_folder not in [None, ""]:
        cache_base = custom_folder
        image_name = os.path.basename(image_name)

    image_file = "%s/%s" % (cache_base, image_name)

    bot.debug('Pulling to %s' % image_file)
    if not os.path.exists(image_file):
        image_file = client.download_image(manifest=manifest,
                                           download_folder=cache_base,
                                           image_name=image_name)
    else:
        if not bot.is_quiet():  # not --quiet
            print("Image already exists at %s, skipping download" % image_file)

    if not bot.is_quiet():  # not --quiet
        print("Singularity Hub Image Download: %s" % image_file)

    manifest = {
        'image_file': image_file,
        'manifest': manifest,
        'cache_base': cache_base,
        'image': image
    }

    if layerfile is not None:
        bot.debug("Writing Singularity Hub image path to %s" % layerfile)
        write_file(layerfile, image_file, mode="w")

    return manifest
Exemple #5
0
def SIZE(image, contentfile=None):
    '''size is intended to be run before an import, to return to the contentfile a list of sizes
    (one per layer) corresponding with the layers that will be downloaded for image
    '''
    bot.debug("Starting Singularity Hub SIZE, will get size from manifest")
    bot.debug("Singularity Hub image: %s" % image)
    client = SingularityApiConnection(image=image)
    manifest = client.get_manifest()
    size = json.loads(manifest['metrics'].replace("'", '"'))['size']
    if contentfile is not None:
        write_file(contentfile, str(size), mode="w")
    return size
Exemple #6
0
def SIZE(image,contentfile=None):
    '''size is intended to be run before an import, to return to the contentfile a list of sizes
    (one per layer) corresponding with the layers that will be downloaded for image
    '''
    bot.debug("Starting Singularity Hub SIZE, will get size from manifest")
    bot.debug("Singularity Hub image: %s" %image)
    client = SingularityApiConnection(image=image)
    manifest = client.get_manifest()
    size = json.loads(manifest['metrics'].replace("'",'"'))['size']
    if contentfile is not None:
        write_file(contentfile,str(size),mode="w")
    return size 
Exemple #7
0
def PULL(image, download_folder=None, layerfile=None):
    '''PULL will retrieve a Singularity Hub image and
    download to the local file system, to the variable
    specified by SINGULARITY_PULLFOLDER.
    :param image: the singularity hub image name
    :param download folder: the folder to pull the image to.
    :param layerfile: if defined, write pulled image to file
    '''
    client = SingularityApiConnection(image=image)
    manifest = client.get_manifest()

    if download_folder is None:
        cache_base = get_cache(subfolder="shub")
    else:
        cache_base = os.path.abspath(download_folder)

    bot.debug("Pull folder set to %s" % cache_base)

    # The image name is the md5 hash, download if it's not there
    image_name = get_image_name(manifest)

    # Did the user specify an absolute path?
    custom_folder = os.path.dirname(image_name)
    if custom_folder not in [None, ""]:
        cache_base = custom_folder
        image_name = os.path.basename(image_name)

    image_file = "%s/%s" % (cache_base, image_name)

    bot.debug('Pulling to %s' % image_file)
    if not os.path.exists(image_file):
        image_file = client.download_image(manifest=manifest,
                                           download_folder=cache_base)
    else:
        if not bot.is_quiet():  # not --quiet
            print("Image already exists at %s, skipping download" % image_file)

    if not bot.is_quiet():  # not --quiet
        print("Singularity Hub Image Download: %s" % image_file)

    manifest = {'image_file': image_file,
                'manifest': manifest,
                'cache_base': cache_base,
                'image': image}

    if layerfile is not None:
        bot.debug("Writing Singularity Hub image path to %s" % layerfile)
        write_file(layerfile, image_file, mode="w")

    return manifest
def configure(args):

    # Get fullpath to each file, and concurrently check that exists
    defaultfile = get_fullpath(args.defaults)  # ../src/lib/config_defaults.h
    infile = get_fullpath(args.infile)         # singularity.conf.in

    # Find define statements
    define_re = re.compile("#define ([A-Z_]+) (.*)")

    # Read in input and default files
    defaultfile = read_file(defaultfile)
    data = "".join(read_file(infile))

    # Lookup for values we want replaced
    lookup = {'0': 'no',
              '1': 'yes'}

    defaults = {}
    # Read in defaults to dictionary
    for line in defaultfile:
        match = define_re.match(line)
        if match:
            key, value = match.groups()

            # Maintain the original default set by user
            defaults[key] = value

            # Use parsed value for final config
            new_value = value.replace('"', '')
            if new_value in lookup:
                new_value = lookup[new_value]
            data = data.replace("@" + key + "@", new_value)

    # Write to output file
    outfile = "%s.tmp" % args.outfile
    write_file(outfile, data)
    os.rename(outfile, args.outfile)

    bot.info("*** FINISHED PYTHON CONFIGURATION HELPER ****\n")
Exemple #9
0
def IMPORT(image, auth=None, layerfile=None):
    '''IMPORT is the main script that will obtain docker layers,
    runscript information (either entrypoint or cmd), and environment
    and return a list of tarballs to extract into the image
    :param auth: if needed, an authentication header (default None)
    :param layerfile: The file to write layers to extract into
    '''
    bot.debug("Starting Docker IMPORT, includes env, runscript, and metadata.")
    bot.verbose("Docker image: %s" % image)

    # Does the user want to override default of using ENTRYPOINT?
    if INCLUDE_CMD:
        bot.verbose2("Specified Docker CMD as %runscript.")
    else:
        bot.verbose2("Specified Docker ENTRYPOINT as %runscript.")

    # Input Parsing ----------------------------
    # Parse image name, repo name, and namespace
    client = DockerApiConnection(image=image, auth=auth)

    docker_image_uri = "Docker image path: %s" % client.assemble_uri("/")
    bot.info(docker_image_uri)

    # IMAGE METADATA -------------------------------------------
    # Use Docker Registry API (version 2.0) to get images ids, manifest

    images = client.get_images()

    #  DOWNLOAD LAYERS -------------------------------------------
    # Each is a .tar.gz file, obtained from registry with curl

    # Get the cache (or temporary one) for docker
    cache_base = get_cache(subfolder="docker")
    download_client = MultiProcess()

    # Generate a queue of tasks to run with MultiProcess
    layers = []
    tasks = []
    for ii in range(len(images)):
        image_id = images[ii]
        targz = "%s/%s.tar.gz" % (cache_base, image_id)
        if not os.path.exists(targz):
            tasks.append((client, image_id, cache_base))
        layers.append(targz)

    # Does the user want to change permissions of tar?
    func2 = None
    if PLUGIN_FIXPERMS:
        func2 = change_permissions

    if len(tasks) > 0:
        download_layers = download_client.run(func=download_layer,
                                              func2=func2,
                                              tasks=tasks)

    # Get Docker runscript
    runscript = extract_runscript(manifest=client.manifestv1,
                                  includecmd=INCLUDE_CMD)

    # Add the environment export
    tar_file = extract_metadata_tar(client.manifestv1,
                                    client.assemble_uri(),
                                    runscript=runscript)

    bot.verbose2('Tar file with Docker env and labels: %s' % tar_file)

    # Write all layers to the layerfile
    if layerfile is not None:
        bot.verbose3("Writing Docker layers files to %s" % layerfile)
        write_file(layerfile, "\n".join(layers), mode="w")
        if tar_file is not None:
            write_file(layerfile, "\n%s" % tar_file, mode="a")

    # Return additions dictionary
    additions = {"layers": layers,
                 "image": image,
                 "manifest": client.manifest,
                 "manifestv1": client.manifestv1,
                 "cache_base": cache_base,
                 "metadata": tar_file}

    bot.debug("*** FINISHING DOCKER IMPORT PYTHON PORTION ****\n")

    return additions
Exemple #10
0
def IMPORT(image, auth=None, layerfile=None):
    '''IMPORT is the main script that will obtain docker layers,
    runscript information (either entrypoint or cmd), and environment
    and return a list of tarballs to extract into the image
    :param auth: if needed, an authentication header (default None)
    :param layerfile: The file to write layers to extract into
    '''
    bot.debug("Starting Docker IMPORT, includes env, runscript, and metadata.")
    bot.verbose("Docker image: %s" % image)

    # Does the user want to override default of using ENTRYPOINT?
    if INCLUDE_CMD:
        bot.verbose2("Specified Docker CMD as %runscript.")
    else:
        bot.verbose2("Specified Docker ENTRYPOINT as %runscript.")

    # Input Parsing ----------------------------
    # Parse image name, repo name, and namespace
    client = DockerApiConnection(image=image, auth=auth)

    docker_image_uri = "Docker image path: %s" % client.assemble_uri("/")
    bot.info(docker_image_uri)

    # IMAGE METADATA -------------------------------------------
    # Use Docker Registry API (version 2.0) to get images ids, manifest

    images = client.get_images()

    #  DOWNLOAD LAYERS -------------------------------------------
    # Each is a .tar.gz file, obtained from registry with curl

    # Get the cache (or temporary one) for docker
    cache_base = get_cache(subfolder="docker")
    download_client = MultiProcess()

    # Generate a queue of tasks to run with MultiProcess
    layers = []
    tasks = []
    for ii in range(len(images)):
        image_id = images[ii]
        targz = "%s/%s.tar.gz" % (cache_base, image_id)
        if not os.path.exists(targz):
            tasks.append((client, image_id, cache_base))
        layers.append(targz)

    # Does the user want to change permissions of tar?
    func2 = None
    if PLUGIN_FIXPERMS:
        func2 = change_permissions

    if len(tasks) > 0:
        download_layers = download_client.run(func=download_layer,
                                              func2=func2,
                                              tasks=tasks)

    # Get Docker runscript
    runscript = extract_runscript(manifest=client.manifestv1,
                                  includecmd=INCLUDE_CMD)

    # Add the environment export
    tar_file = extract_metadata_tar(client.manifestv1,
                                    client.assemble_uri(),
                                    runscript=runscript)

    bot.verbose2('Tar file with Docker env and labels: %s' % tar_file)

    # Write all layers to the layerfile
    if layerfile is not None:
        bot.verbose3("Writing Docker layers files to %s" % layerfile)
        write_file(layerfile, "\n".join(layers), mode="w")
        if tar_file is not None:
            write_file(layerfile, "\n%s" % tar_file, mode="a")

    # Return additions dictionary
    additions = {
        "layers": layers,
        "image": image,
        "manifest": client.manifest,
        "manifestv1": client.manifestv1,
        "cache_base": cache_base,
        "metadata": tar_file
    }

    bot.debug("*** FINISHING DOCKER IMPORT PYTHON PORTION ****\n")

    return additions