def prepare_storage_account(storage_account_name, storage_access_key, endpoint_suffix, protocol="https"):
    blob_service = AppendBlobService(account_name=storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing metadata of storage account and stemcells
    table_service = TableService(account_name=storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    table_service.create_table('stemcells')
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["STORAGE_ACCESS_KEY"]

    blob_service = AppendBlobService(default_storage_account_name, storage_access_key)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(default_storage_account_name, storage_access_key)
    table_service.create_table('stemcells')
Exemplo n.º 3
0
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["STORAGE_ACCESS_KEY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=default_storage_account_name,
                                     account_key=storage_access_key,
                                     endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container(container_name='stemcell',
                                  public_access='blob')

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name,
                                 account_key=storage_access_key,
                                 endpoint_suffix=endpoint_suffix)
    table_service.create_table('stemcells')
def prepare_storage_account(storage_account_name,
                            storage_access_key,
                            endpoint_suffix,
                            protocol="https"):
    blob_service = AppendBlobService(account_name=storage_account_name,
                                     account_key=storage_access_key,
                                     endpoint_suffix=endpoint_suffix,
                                     protocol=protocol)
    blob_service.create_container('bosh')
    blob_service.create_container(container_name='stemcell',
                                  public_access='blob')

    # Prepare the table for storing metadata of storage account and stemcells
    table_service = TableService(account_name=storage_account_name,
                                 account_key=storage_access_key,
                                 endpoint_suffix=endpoint_suffix,
                                 protocol=protocol)
    table_service.create_table('stemcells')
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["DEFAULT_STORAGE_ACCESS_KEY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]
    protocol = "https"
    if settings["ENVIRONMENT"] == "AzureStack":
        protocol = "http"

    blob_service = AppendBlobService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix, protocol=protocol)
    table_service.create_table('stemcells')
Exemplo n.º 6
0
def CreateLogFile():
    """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """
    szRet = "";
    if( DEBUG_MODE ):
        return( "Debug モードのためスキップします。" );

    try:
        szRet = "AppendBlobService";
        blob_service    = AppendBlobService(
            account_name,
            account_key
        );
        szRet = "create_container";
        bIsExists = blob_service.exists(
            log_container_name
        );
        if bIsExists:
            pass;
        else:
            blob_service.create_container(
                log_container_name,
                public_access=PublicAccess.Blob
            );
        bIsExists = blob_service.exists(
            log_container_name,
            log_file_name
        );
        if bIsExists:
            szRet = "already blob."
        else:
            szRet = "create_blob";
            blob_service.create_blob(
                log_container_name,
                log_file_name
            );
        szRet = "OK";
    except:
        #szRet = "Log exception";
        pass;
    return szRet;
Exemplo n.º 7
0
def CreateLogFile():
    """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """
    szRet = ""
    try:
        szRet = "AppendBlobService"
        blob_service = AppendBlobService(account_name, account_key)
        szRet = "create_container"
        bIsExists = blob_service.exists(log_container_name)
        if bIsExists:
            pass
        else:
            blob_service.create_container(log_container_name,
                                          public_access=PublicAccess.Blob)
        bIsExists = blob_service.exists(log_container_name, log_file_name)
        if bIsExists:
            szRet = "already blob."
        else:
            szRet = "create_blob"
            blob_service.create_blob(log_container_name, log_file_name)
        szRet = "OK"
    except:
        #szRet = "Log exception";
        pass
    return szRet
Exemplo n.º 8
0
import csv
import sys
import subprocess
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
	account_name='storage_account_name', 
	account_key='storage_account_key')

# Creates an append blob for this app.
append_blob_service.create_container('distances')
append_blob_service.create_blob('distances', 'log.txt')

append_blob_service.append_blob_from_text('distances', 'log.txt', "Starting: " + datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
	reader = csv.DictReader(csvfile)
	for row in reader:
		LatLongDict[row['LocationID']] = [row['Latitude'], row['Longitude']]
Exemplo n.º 9
0
import csv
import sys
import subprocess
import datetime
import time
import storageconfig as cfg

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(
    account_name=cfg.storage['storage_account_name'],
    account_key=cfg.storage['storage_account_key'])

# Creates an append blob for this app.
append_blob_service.create_container(cfg.storage['container_name'])
append_blob_service.create_blob(
    cfg.storage['container_name'], cfg.storage['blob_name'])

append_blob_service.append_blob_from_text(cfg.storage['container_name'],
                                          cfg.storage['blob_name'], "Starting: " +
                                          datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
Exemplo n.º 10
0
def prepare_storage(settings):
    default_storage_account_name = settings["DEFAULT_STORAGE_ACCOUNT_NAME"]
    storage_access_key = settings["STORAGE_ACCESS_KEY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name, account_key=storage_access_key, endpoint_suffix=endpoint_suffix)
    table_service.create_table('stemcells')

    # For secondary
    default_storage_account_name_secondary = settings["DEFAULT_STORAGE_ACCOUNT_NAME_SECONDARY"]
    default_storage_access_key_secondary = settings["DEFAULT_STORAGE_ACCESS_KEY_SECONDARY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=default_storage_account_name_secondary, account_key=default_storage_access_key_secondary, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container(
        container_name='stemcell',
        public_access='blob'
    )

    # Prepare the table for storing meta datas of storage account and stemcells
    table_service = TableService(account_name=default_storage_account_name_secondary, account_key=default_storage_access_key_secondary, endpoint_suffix=endpoint_suffix)
    table_service.create_table('stemcells')


    # Prepare primary premium storage account
    storage_account_name_primary = settings["STORAGE_ACCOUNT_NAME_PRIMARY"]
    storage_access_key_primary = settings["STORAGE_ACCESS_KEY_PRIMARY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=storage_account_name_primary, account_key=storage_access_key_primary, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container('stemcell')

    # Prepare secondary premium storage account
    storage_account_name_secondary = settings["STORAGE_ACCOUNT_NAME_SECONDARY"]
    storage_access_key_secondary = settings["STORAGE_ACCESS_KEY_SECONDARY"]
    endpoint_suffix = settings["SERVICE_HOST_BASE"]

    blob_service = AppendBlobService(account_name=storage_account_name_secondary, account_key=storage_access_key_secondary, endpoint_suffix=endpoint_suffix)
    blob_service.create_container('bosh')
    blob_service.create_container('stemcell')
#from azure.storage.blob import ContentSettings
#block_blob_service.create_blob_from_path(
#    'myseccontainer',
#    'remotesecfiles3.txt',
#    'localfiles3.txt',
#    content_settings=ContentSettings(content_type='text/html')
#            )

#### To list the blobs in a container, use the list_blobs method. This method returns a generator. 
#### The following code outputs the name of each blob in a container to the console.
#generator = block_blob_service.list_blobs('myseccontainer')
#for blob in generator:
#    print(blob.name)

#### The following example demonstrates using get_blob_to_path to download the contents of the myblob blob and store it to the out-sunset.png file.
#block_blob_service.get_blob_to_path('myseccontainer', 'remotesecf.txt', 'fromazure-out.txt')

#### Finally, to delete a blob, call delete_blob.
block_blob_service.delete_blob('myseccontainer', 'remotesecf.txt')

#### The example below creates a new append blob and appends some data to it, simulating a simple logging operation.
from azure.storage.blob import AppendBlobService
append_blob_service = AppendBlobService(myaccount, mykey)
#The same containers can hold all types of blobs
append_blob_service.create_container('mycontainer')
#Append blobs must be created before they are appended to
append_blob_service.create_blob('mycontainer', 'myappendblob')
append_blob_service.append_blob_from_text('mycontainer', 'myappendblob', u'Sinaq, cumle!')
append_blob = append_blob_service.get_blob_to_text('mycontainer', 'myappendblob')
print(append_blob)
Exemplo n.º 12
0
# generator = block_blob_service.list_blobs('mycontainer')
# for blob in generator:
#     print(blob.name)


# block_blob_service.get_blob_to_path('mycontainer', 'myblockblob', 'out-sunset.png')


# block_blob_service.delete_blob('mycontainer', 'myblockblob')





from azure.storage.blob import AppendBlobService

append_blob_service = AppendBlobService(account_name='bobur', account_key='6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw==')

# The same containers can hold all types of blobs
append_blob_service.create_container('myappendcontainer')

# Append blobs must be created before they are appended to
append_blob_service.create_blob('myappendcontainer', 'myappendblob')
append_blob_service.append_blob_from_text('myappendcontainer', 'myappendblob', u'Hello, world!')

append_blob = append_blob_service.get_blob_to_text('myappendcontainer', 'myappendblob')

print append_blob.content

Exemplo n.º 13
0
class LogWriter(object):
    """description of class"""

    LOG_CONTAINER_NAME = r'log-files'
    DEBUG_MODE = bool(os.getenv('DEBUG_MODE', False))

    # コンストラクタ
    def __init__(self, name, key, subFolderName=None):
        super(LogWriter, self).__init__()

        self._name = name
        self._key = key
        self.m_szLogFileName = ""
        self.m_szSubFolderName = subFolderName
        self.m_pBlobService = AppendBlobService(name, key)

    #}def __init__

    def _CreateLogFile(self):
        """ ログファイルを作成する。WriteLog を呼び出す前に実行すること。 """

        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのためスキップします。")

        try:
            if (0 == len(self.m_szLogFileName)):
                szRet = "create_container"
                bIsExists = self.m_pBlobService.exists(
                    LogWriter.LOG_CONTAINER_NAME)
                if bIsExists:
                    pass
                else:
                    self.m_pBlobService.create_container(
                        LogWriter.LOG_CONTAINER_NAME,
                        public_access=PublicAccess.Blob)

                #ログファイル名の決定
                #// 後ろに追加しているが len で 0 と調べているため空文字列
                if ((self.m_szSubFolderName is not None)
                        and (0 < len(self.m_szSubFolderName))):
                    #// サブフォルダー名が指定されているときは追加する
                    self.m_szLogFileName += self.m_szSubFolderName + "\\"
                #}if
                self.m_szLogFileName += r"{0:%Y-%m-%dT%H-%M-%S.log}".format(
                    datetime.datetime.now())

                bIsExists = self.m_pBlobService.exists(
                    LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName)
                if bIsExists:
                    szRet = "already blob."
                else:
                    szRet = "create_blob"
                    self.m_pBlobService.create_blob(
                        LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName)
                szRet = "OK"
            else:
                szRet = "Already called."
                szRet = "OK"
            #}if

        except Exception as e:
            #szRet = "Log exception";
            szRet = szRet + "\r\n" + str(e)
            pass
        return szRet

    #}def

    def WriteLog(self, txt):
        """ ログファイルにテキストを出力する。末尾に改行コードが追加される。 """
        szRet = ""
        szLogText = r"{0:%Y-%m-%d %H:%M:%S}".format(
            datetime.datetime.now()) + r" : " + txt + "\r\n"
        if (LogWriter.DEBUG_MODE):
            print(szLogText)
            return ("Debug モードのためスキップしました。")

        try:
            #ログファイルの作成
            self._CreateLogFile()

            szRet = "append_blob_from_text"
            self.m_pBlobService.append_blob_from_text(
                LogWriter.LOG_CONTAINER_NAME, self.m_szLogFileName, szLogText)
            szRet = "OK"
        except Exception as e:
            #szRet = "Log exception";
            szRet = szRet + "\r\n" + str(e)
        #try

        return szRet

    #}def

    def WriteBlob(self, blob_name, value):
        """ 単一 BLOB ファイルを作成しテキストを保存する。 """
        szRet = ""
        if (LogWriter.DEBUG_MODE):
            return ("Debug モードのため書き込みをしません。")

        try:
            #blob_name = r'sample.txt';

            szRet = "BlockBlobService"
            blob_service = BlockBlobService(self._name, self._key)

            szRet = "create_container"
            blob_service.create_container(LogWriter.LOG_CONTAINER_NAME,
                                          public_access=PublicAccess.Blob)

            szRet = "create_blob_from_bytes"
            #blob_service.create_blob_from_bytes(
            #    log_container_name,
            #    log_blob_name,
            #    b'<center><h1>Hello World!</h1></center>',
            #    content_settings=ContentSettings('text/html')
            #)

            if (isinstance(value, str)):
                szRet = "create_blob_from_text"
                blob_service.create_blob_from_text(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, value)
            else:
                szRet = "create_blob_from_stream"
                blob_service.create_blob_from_stream(
                    LogWriter.LOG_CONTAINER_NAME, blob_name, io.BytesIO(value))
            #}if

            #szRet = "make_blob_url"
            #print(blob_service.make_blob_url(log_container_name, log_blob_name))

            szRet = "OK"
        except:
            print(r"Exception.")
        #try

        return szRet

    #def WriteBlob( blob_name, txt ):

    def MakeBlobUri(self, blob_name):
        blob_service = BlockBlobService(self._name, self._key)
        szRet = blob_service.make_blob_url(LogWriter.LOG_CONTAINER_NAME,
                                           blob_name)

        return (szRet)

    #}def


#}class
Exemplo n.º 14
0
# jobs you SLURM to spawn on the SLURM nodes

import csv
import sys
import subprocess
import datetime
import time

from azure.storage.blob import AppendBlobService

# Configure account name with the Azure Storage Account Name and the account Key from Storage Explorer
append_blob_service = AppendBlobService(account_name='storage_account_name',
                                        account_key='storage_account_key')

# Creates an append blob for this app.
append_blob_service.create_container('distances')
append_blob_service.create_blob('distances', 'log.txt')

append_blob_service.append_blob_from_text(
    'distances', 'log.txt', "Starting: " + datetime.datetime.fromtimestamp(
        time.time()).strftime('%Y-%m-%d %H:%M:%S') + "\n")

LatLongDict = {}

# Reads the number of jobs from the command line.
jobCount = int(sys.argv[1])

# Reads the airport data in to a list for easy access.
with open('airports-world.csv') as csvfile:
    reader = csv.DictReader(csvfile)
    for row in reader:
Exemplo n.º 15
0
def main():
    logging.basicConfig(level=logging.DEBUG)
    with open(TASKDATA) as taskdata_file:
        taskdata = json.loads(taskdata_file.read())
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    config = github.get_config()
    blob_service = AppendBlobService(
        account_name=taskdata["storage_account_name"],
        account_key=taskdata["storage_account_key"])
    queue_service = QueueService(
        connection_string=taskdata["queue_connection_string"])
    loop = asyncio.get_event_loop()
    ctx = Context(loop=loop,
                  config=config,
                  blob_service=blob_service,
                  queue_service=queue_service,
                  taskdata=taskdata)

    blob_service.create_container("logs",
                                  fail_on_exist=False,
                                  public_access=PublicAccess.Blob)
    blob_service.create_blob("logs",
                             ctx.pid,
                             content_settings=ContentSettings(
                                 content_type="text/plain; charset=utf-8"))
    gh_commit.create_status(
        "pending",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Build started",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    os.makedirs(REPOSDIR, exist_ok=True)
    # Check if we're the only process who updates the git cache on SMB share.
    # Otherwise skip updating.
    if not os.path.exists(LOCKFILENAME):
        lock = open(LOCKFILENAME, "w")
        lock.close()
        update_git_cache(ctx)
        os.unlink(LOCKFILENAME)

    if os.path.exists(SRCDIR):
        shutil.rmtree(SRCDIR)
    os.makedirs(os.path.join(SRCDIR, "build/conf"))
    with open(os.path.join(SRCDIR, "build/conf/auto.conf"), "a") as localconf:
        localconf.write("\n%s\n" % config.get("localconf", ""))
        localconf.write(AUTOCONFIG)

    repos = get_repos(config)
    repos.append((repodirname(taskdata["gh"]["repository"]["clone_url"]),
                  taskdata["gh"]["repository"]["clone_url"], None, None))
    for reponame, repourl, reporef, _ in repos:
        refrepopath = os.path.join(REPOSDIR, reponame)
        run(ctx,
            ["git", "clone", "--reference", refrepopath, repourl, reponame],
            cwd=SRCDIR)
        if reporef:
            LOG.info("Checkout %s to %s" % (reponame, reporef))
            run(ctx, ["git", "checkout", "%s" % reporef],
                cwd=os.path.join(SRCDIR, reponame))

    # Do checkout
    if taskdata["gh"]["type"] == "pull_request":
        LOG.info("Add remote repo %s" % taskdata["gh"]["clone_url"])
        run(ctx, [
            "git", "remote", "add", "contributor", taskdata["gh"]["clone_url"]
        ],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
        LOG.info("Fetch contributor's repo")
        run(ctx, ["git", "fetch", "contributor"],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
    LOG.info("Checkout %s to %s" % (repodirname(
        taskdata["gh"]["repository"]["clone_url"]), taskdata["gh"]["sha"]))
    run(ctx, ["git", "checkout", taskdata["gh"]["sha"]],
        cwd=os.path.join(
            SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"])))

    # Fetch sstate if any
    if os.path.exists(get_sstate_archive_path(ctx)):
        with tarfile.open(name=get_sstate_archive_path(ctx),
                          mode="r:gz") as sstate_tar:
            sstate_tar.extractall(path=SRCDIR)

    addlayers = []
    for dep in config["dependencies"]:
        repodir = repodirname(dep["url"])
        layers = dep.get("layers", None)
        if layers:
            addlayers.extend([
                "bitbake-layers add-layer ../%s/%s" % (repodir, layer)
                for layer in layers
            ])
        else:
            addlayers.append("bitbake-layers add-layer ../%s" % repodir)
    addlayers.append("bitbake-layers add-layer ../%s" %
                     repodirname(taskdata["gh"]["repository"]["clone_url"]))

    run_script(ctx,
               BUILDSCRIPT % ("\n".join(addlayers), config["bitbake_target"]),
               cwd=SRCDIR)
    save_sstate(ctx)

    # Github auth token has expired by now most probably => renew
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    gh_commit.create_status(
        "success",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Target has been built successfully",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    loop.close()
    # TODO: copy cloud-init log files to share
    taskdata["build_result"] = "success"
    queue_service.put_message(
        "buildresults",
        base64.b64encode(json.dumps(taskdata).encode("utf")).decode("utf"))