class AppendBlob: '''Append blob used to append data, each time called append, the content will be append to the blob''' def __init__(self): #account = account_name or secretconf["azure"]["storage"][0]["account"] #key = account_key or secretconf["azure"]["storage"][0]["key"] connstr = os.getenv( "AZURE_STORAGE_CONNECTION_STRING", False) or secretconf["azure"]["storage"][0]["connection_string"] self.abservice = AppendBlobService(connection_string=connstr) def create(self, container, blob, metadata=None): '''Create an empty blob Args: container: name of the container blob: name of the blob, use '/' to create a folder metadata: meta data (dict object, value must be str) of the text Returns: url of blob ''' self.abservice.create_blob(container, blob, metadata=metadata, content_settings=textcontent, if_none_match="*") now = datetime.now() start = now + timedelta(-1, 0, 0) expiry = now + timedelta(365, 0, 0) sastoken = self.abservice.generate_blob_shared_access_signature( container, blob, permission=blobpermission, expiry=expiry, start=start) return self.abservice.make_blob_url(container, blob, sas_token=sastoken) def appendText(self, container, blob, text, metadata=None): '''Append text to blob''' self.abservice.append_blob_from_text(container, blob, text)
def main(): logging.basicConfig(level=logging.DEBUG) with open(TASKDATA) as taskdata_file: taskdata = json.loads(taskdata_file.read()) github = GithubAdapter(taskdata) gh_commit = github.get_commit() config = github.get_config() blob_service = AppendBlobService( account_name=taskdata["storage_account_name"], account_key=taskdata["storage_account_key"]) queue_service = QueueService( connection_string=taskdata["queue_connection_string"]) loop = asyncio.get_event_loop() ctx = Context(loop=loop, config=config, blob_service=blob_service, queue_service=queue_service, taskdata=taskdata) blob_service.create_container("logs", fail_on_exist=False, public_access=PublicAccess.Blob) blob_service.create_blob("logs", ctx.pid, content_settings=ContentSettings( content_type="text/plain; charset=utf-8")) gh_commit.create_status( "pending", target_url=blob_service.make_blob_url("logs", ctx.pid), description="Build started", context=config.get("configuration_name", "configuration%s" % taskdata["config_num"])) os.makedirs(REPOSDIR, exist_ok=True) # Check if we're the only process who updates the git cache on SMB share. # Otherwise skip updating. if not os.path.exists(LOCKFILENAME): lock = open(LOCKFILENAME, "w") lock.close() update_git_cache(ctx) os.unlink(LOCKFILENAME) if os.path.exists(SRCDIR): shutil.rmtree(SRCDIR) os.makedirs(os.path.join(SRCDIR, "build/conf")) with open(os.path.join(SRCDIR, "build/conf/auto.conf"), "a") as localconf: localconf.write("\n%s\n" % config.get("localconf", "")) localconf.write(AUTOCONFIG) repos = get_repos(config) repos.append((repodirname(taskdata["gh"]["repository"]["clone_url"]), taskdata["gh"]["repository"]["clone_url"], None, None)) for reponame, repourl, reporef, _ in repos: refrepopath = os.path.join(REPOSDIR, reponame) run(ctx, ["git", "clone", "--reference", refrepopath, repourl, reponame], cwd=SRCDIR) if reporef: LOG.info("Checkout %s to %s" % (reponame, reporef)) run(ctx, ["git", "checkout", "%s" % reporef], cwd=os.path.join(SRCDIR, reponame)) # Do checkout if taskdata["gh"]["type"] == "pull_request": LOG.info("Add remote repo %s" % taskdata["gh"]["clone_url"]) run(ctx, [ "git", "remote", "add", "contributor", taskdata["gh"]["clone_url"] ], cwd=os.path.join( SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"]))) LOG.info("Fetch contributor's repo") run(ctx, ["git", "fetch", "contributor"], cwd=os.path.join( SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"]))) LOG.info("Checkout %s to %s" % (repodirname( taskdata["gh"]["repository"]["clone_url"]), taskdata["gh"]["sha"])) run(ctx, ["git", "checkout", taskdata["gh"]["sha"]], cwd=os.path.join( SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"]))) # Fetch sstate if any if os.path.exists(get_sstate_archive_path(ctx)): with tarfile.open(name=get_sstate_archive_path(ctx), mode="r:gz") as sstate_tar: sstate_tar.extractall(path=SRCDIR) addlayers = [] for dep in config["dependencies"]: repodir = repodirname(dep["url"]) layers = dep.get("layers", None) if layers: addlayers.extend([ "bitbake-layers add-layer ../%s/%s" % (repodir, layer) for layer in layers ]) else: addlayers.append("bitbake-layers add-layer ../%s" % repodir) addlayers.append("bitbake-layers add-layer ../%s" % repodirname(taskdata["gh"]["repository"]["clone_url"])) run_script(ctx, BUILDSCRIPT % ("\n".join(addlayers), config["bitbake_target"]), cwd=SRCDIR) save_sstate(ctx) # Github auth token has expired by now most probably => renew github = GithubAdapter(taskdata) gh_commit = github.get_commit() gh_commit.create_status( "success", target_url=blob_service.make_blob_url("logs", ctx.pid), description="Target has been built successfully", context=config.get("configuration_name", "configuration%s" % taskdata["config_num"])) loop.close() # TODO: copy cloud-init log files to share taskdata["build_result"] = "success" queue_service.put_message( "buildresults", base64.b64encode(json.dumps(taskdata).encode("utf")).decode("utf"))