Esempio n. 1
0
def create_build_package(package_files, working_dir=None):
    """given a list of files, copy them to a temporary folder,
       compress into a .tar.gz, and rename based on the file hash.
       Return the full path to the .tar.gz in the temporary folder.

       Parameters
       ==========
       package_files: a list of files to include in the tar.gz
       working_dir: If set, the path derived for the recipe and
                    files is relative to this.

    """
    # Ensure package files all exist
    for package_file in package_files:
        if not os.path.exists(package_file):
            bot.exit("Cannot find %s." % package_file)

    bot.log("Generating build package for %s files..." % len(package_files))
    build_dir = get_tmpdir(prefix="sregistry-build")
    build_tar = "%s/build.tar.gz" % build_dir
    tar = tarfile.open(build_tar, "w:gz")

    # Create the tar.gz, making sure relative to working_dir
    for package_file in package_files:

        # Get a relative path
        relative_path = get_relative_path(package_file, working_dir)
        tar.add(package_file, arcname=relative_path)
    tar.close()

    # Get hash (sha256), and rename file
    sha256 = get_file_hash(build_tar)
    hash_tar = "%s/%s.tar.gz" % (build_dir, sha256)
    shutil.move(build_tar, hash_tar)
    return hash_tar
Esempio n. 2
0
def create_build_package(package_files):
    '''given a list of files, copy them to a temporary folder,
       compress into a .tar.gz, and rename based on the file hash.
       Return the full path to the .tar.gz in the temporary folder.

       Parameters
       ==========
       package_files: a list of files to include in the tar.gz

    '''
    # Ensure package files all exist
    for package_file in package_files:
        if not os.path.exists(package_file):
            bot.exit('Cannot find %s.' % package_file)

    bot.log('Generating build package for %s files...' % len(package_files))
    build_dir = get_tmpdir(prefix="sregistry-build")
    build_tar = '%s/build.tar.gz' % build_dir
    tar = tarfile.open(build_tar, "w:gz")

    # Create the tar.gz
    for package_file in package_files:
        tar.add(package_file)
    tar.close()

    # Get hash (sha256), and rename file
    sha256 = get_file_hash(build_tar)
    hash_tar = "%s/%s.tar.gz" % (build_dir, sha256)
    shutil.move(build_tar, hash_tar)
    return hash_tar
Esempio n. 3
0
def build_status(self, build_id):
    """get a build status based on a build id. We return the entire response
       object for the client to parse.
    """
    project = self._get_project()
    response = (self._build_service.projects().builds().get(
        id=build_id, projectId=project).execute())

    build_id = response["id"]
    status = response["status"]
    bot.log("build %s: %s" % (build_id, status))

    return response
Esempio n. 4
0
def run_build(self, config):
    """run a build, meaning creating a build. Retry if there is failure
    """

    response = self._submit_build(config)
    status = response["metadata"]["build"]["status"]
    build_id = response["metadata"]["build"]["id"]

    start = time.time()
    while status not in ["COMPLETE", "FAILURE", "SUCCESS", "TIMEOUT"]:
        time.sleep(15)
        response = self._build_status(build_id)
        status = response["status"]

    end = time.time()
    bot.log("Total build time: %s seconds" % (round(end - start, 2)))
    return self._finish_build(build_id, response=response, config=config)
Esempio n. 5
0
def run_build(self, config):
    '''run a build, meaning creating a build. Retry if there is failure
    '''

    response = self._submit_build(config)
    status = response['metadata']['build']['status']
    build_id = response['metadata']['build']['id']

    start = time.time()
    while status not in ['COMPLETE', 'FAILURE', 'SUCCESS', 'TIMEOUT']:
        time.sleep(15)
        response = self._build_status(build_id)
        status = response['status']

    end = time.time()
    bot.log('Total build time: %s seconds' % (round(end - start, 2)))
    return self._finish_build(build_id, response=response, config=config)
Esempio n. 6
0
def submit_build(self, config):
    """run a build, meaning creating a build. Retry if there is failure
    """

    project = self._get_project()

    #          prefix,    message, color
    bot.custom("PROJECT", project, "CYAN")
    for i, step in enumerate(config["steps"]):
        bot.custom("BUILD %s" % i, step["name"], "CYAN")

    response = (self._build_service.projects().builds().create(
        body=config, projectId=project).execute())

    build_id = response["metadata"]["build"]["id"]
    status = response["metadata"]["build"]["status"]
    bot.log("build %s: %s" % (build_id, status))

    return response
Esempio n. 7
0
def submit_build(self, config):
    '''run a build, meaning creating a build. Retry if there is failure
    '''

    project = self._get_project()

    #          prefix,    message, color
    bot.custom('PROJECT', project, "CYAN")
    for i, step in enumerate(config['steps']):
        bot.custom('BUILD %s' % i, step['name'], "CYAN")

    response = self._build_service.projects().builds().create(
        body=config, projectId=project).execute()

    build_id = response['metadata']['build']['id']
    status = response['metadata']['build']['status']
    bot.log("build %s: %s" % (build_id, status))

    return response
Esempio n. 8
0
def run_build(self, config, bucket, names):
    '''run a build, meaning creating a build. Retry if there is failure
    '''

    project = self._get_project()

    #          prefix,    message, color
    bot.custom('PROJECT', project, "CYAN")
    bot.custom('BUILD  ', config['steps'][0]['name'], "CYAN")

    response = self._build_service.projects().builds().create(
        body=config, projectId=project).execute()

    build_id = response['metadata']['build']['id']
    status = response['metadata']['build']['status']
    bot.log("build %s: %s" % (build_id, status))

    start = time.time()
    while status not in ['COMPLETE', 'FAILURE', 'SUCCESS']:
        time.sleep(15)
        response = self._build_service.projects().builds().get(
            id=build_id, projectId=project).execute()

        build_id = response['id']
        status = response['status']
        bot.log("build %s: %s" % (build_id, status))

    end = time.time()
    bot.log('Total build time: %s seconds' % (round(end - start, 2)))

    # If successful, update blob metadata and visibility
    if status == 'SUCCESS':

        # Does the user want to keep the container private?
        env = 'SREGISTRY_GOOGLE_STORAGE_PRIVATE'
        blob = bucket.blob(response['artifacts']['objects']['paths'][0])

        # Make Public, if desired
        if self._get_and_update_setting(env) == None:
            blob.make_public()
            response['public_url'] = blob.public_url

        # Add the metadata directly to the object
        update_blob_metadata(blob, response, config, bucket, names)
        response['media_link'] = blob.media_link
        response['size'] = blob.size
        response['file_hash'] = blob.md5_hash

    return response
Esempio n. 9
0
def build(
    self,
    name,
    recipe="Singularity",
    context=None,
    preview=False,
    headless=False,
    working_dir=None,
    webhook=None,
    timeout=10800,
    extra_data=None,
):
    """trigger a build on Google Cloud (builder then storage) given a name
       recipe, and Github URI where the recipe can be found. This means
       creating and uploading a build package to use for the build.
    
       Parameters
       ==========
       recipe: the local recipe to build.
       name: should be the complete uri that the user has requested to push.
       context: the dependency files needed for the build. If not defined, only
                the recipe is uploaded.
       preview: if True, preview but don't run the build
       working_dir: The working directory for the build. Defaults to pwd.
       timeout: the number of seconds for the build to timeout. The default 
                is 3 hours, and the maximum is 24 hours. If unset (None)
                it will be 10 minutes.
       webhook: if not None, add a curl POST to finish the build. 
       headless: If true, don't track the build, but submit and provide
                 an endpoint to send a response to.
       extra_data: a dictionary of extra_data to send back to the webhook (they
                   are passed in the environment)

       Environment
       ===========
       SREGISTRY_GOOGLE_BUILD_SINGULARITY_VERSION: the version of Singularity
           to use, defaults to v3.2.1-slim
       SREGISTRY_GOOGLE_BUILD_CLEANUP: after build, delete intermediate 
           dependencies in cloudbuild bucket.

    """
    bot.debug("BUILD %s" % recipe)

    build_package = [recipe]

    if context:

        # If the user gives a ., include recursive $PWD
        if "." in context:
            context = glob(os.getcwd() + "/**/*", recursive=True)
        build_package = build_package + context

    # We need to get and save relative paths for the config.
    package = create_build_package(build_package, working_dir)

    # Does the package already exist? If the user cached, it might
    destination = "source/%s" % os.path.basename(package)
    blob = self._build_bucket.blob(destination)

    # if it doesn't exist, upload it
    if not blob.exists() and preview is False:
        bot.log("Uploading build package!")
        self._upload(source=package,
                     bucket=self._build_bucket,
                     destination=destination)
    else:
        bot.log("Build package found in %s." % self._build_bucket.name)

    # This returns a data structure with collection, container, based on uri
    names = parse_image_name(remove_uri(name))

    prefix = "%s/" % names["collection"]

    # The name should include the complete uri so it's searchable
    name = os.path.basename(names["uri"])

    # Update the recipe name to use a relative path
    recipe = get_relative_path(recipe, working_dir)

    # Load the build configuration (defaults to local)
    config = self._load_build_config(name=name, prefix=prefix, recipe=recipe)
    # Add a webhook, if defined
    if webhook and headless:
        config = add_webhook(config=config,
                             webhook=webhook,
                             extra_data=extra_data)

    # The source should point to the bucket with the .tar.gz, latest generation
    config["source"]["storageSource"]["bucket"] = self._build_bucket.name
    config["source"]["storageSource"]["object"] = destination

    # If the user wants a timeout
    if timeout is not None:
        config["timeout"] = "%ss" % timeout

    # If not a preview, run the build and return the response
    if not preview:
        if not headless:
            config = self._run_build(config)
        else:
            config = self._submit_build(config)

        # If the user wants to cache cloudbuild files, this will be set
        env = "SREGISTRY_GOOGLE_BUILD_CACHE"
        if not self._get_and_update_setting(env, self.envars.get(env)):
            if headless is False:
                blob.delete()

    # Clean up either way, return config or response
    shutil.rmtree(os.path.dirname(package))
    return config
Esempio n. 10
0
def build(self, name, recipe="Singularity", context=None, preview=False):
    '''trigger a build on Google Cloud (builder then storage) given a name
       recipe, and Github URI where the recipe can be found.
    
       Parameters
       ==========
       recipe: the local recipe to build.
       name: should be the complete uri that the user has requested to push.
       context: the dependency files needed for the build. If not defined, only
                the recipe is uploaded.
       preview: if True, preview but don't run the build

       Environment
       ===========
       SREGISTRY_GOOGLE_BUILD_SINGULARITY_VERSION: the version of Singularity
           to use, defaults to 3.0.2-slim
       SREGISTRY_GOOGLE_BUILD_CLEANUP: after build, delete intermediate 
           dependencies in cloudbuild bucket.

    '''
    bot.debug("BUILD %s" % recipe)

    # This returns a data structure with collection, container, based on uri
    names = parse_image_name(remove_uri(name))

    # Load the build configuration
    config = self._load_build_config(name=names['uri'], recipe=recipe)

    build_package = [recipe]
    if context not in [None, '', []]:

        # If the user gives a ., include recursive $PWD
        if '.' in context:
            context = glob(os.getcwd() + '/**/*', recursive=True)
        build_package = build_package + context

    package = create_build_package(build_package)

    # Does the package already exist? If the user cached, it might
    destination = 'source/%s' % os.path.basename(package)
    blob = self._build_bucket.blob(destination)

    # if it doesn't exist, upload it
    if not blob.exists() and preview is False:
        bot.log('Uploading build package!')
        manifest = self._upload(source=package,
                                bucket=self._build_bucket,
                                destination=destination)
    else:
        bot.log('Build package found in %s.' % self._build_bucket.name)

    # The source should point to the bucket with the .tar.gz, latest generation
    config["source"]["storageSource"]['bucket'] = self._build_bucket.name
    config["source"]["storageSource"]['object'] = destination

    # If not a preview, run the build and return the response
    if preview is False:
        config = self._run_build(config, self._bucket, names)

    # If the user wants to cache cloudbuild files, this will be set
    if not self._get_and_update_setting('SREGISTRY_GOOGLE_BUILD_CACHE'):
        blob.delete()

    # Clean up either way, return config or response
    shutil.rmtree(os.path.dirname(package))
    return config