コード例 #1
0
def status():
    """Check the status of the environment and every service in it."""
    settings = project.read_settings()
    session = client.acquire_session(settings)
    env = environments.retrieve(session, settings["environmentId"])
    output.write("environment state: " + env["state"])
    codes = []
    noncodes = []
    for service in services.list(session, settings["environmentId"]):
        if service["type"] != "utility":
            if service["type"] == "code":
                codes.append("\t%s (size = %s, build status = %s, deploy status = %s)" % (service["label"], service["size"], service["build_status"], service["deploy_status"]))
            else:
                noncodes.append("\t%s (size = %s, image = %s, status = %s)" % (service["label"], service["size"], service["name"], service["deploy_status"]))
    for item in (codes + noncodes):
        output.write(item)
コード例 #2
0
ファイル: db.py プロジェクト: catalyzeio/catalyze-paas-cli
def cmd_import(database_label, filepath, mongo_collection, mongo_database, wipe_first, postgres_database = None, mysql_database = None):
    """Imports a file into a chosen database service.

The import is accomplished by encrypting the file and uploading it to Catalyze. An automated service processes the file according to the passed parameters. The command offers the option to either wait until the processing is finished (and be notified of the end result), or to just kick it off.

The type of file depends on the database. For postgres and mysql, this should be a single SQL script with the extension "sql". For mongo, this should be a tar'd, gzipped archive of the dump that you wish to import, with the extension "tar.gz".

If there is an unexpected error, please contact Catalyze support ([email protected]).
"""
    settings = project.read_settings()
    session = client.acquire_session(settings)
    output.write("Looking up service...")
    service_id = services.get_by_label(session, settings["environmentId"], database_label)

    environment = environments.retrieve(session, settings["environmentId"])
    pod = pods.metadata(session, environment["podId"])
    padding_required = pod["importRequiresLength"]

    output.write("Importing '%s' to %s (%s)" % (filepath, database_label, service_id))
    basename = os.path.basename(filepath)
    dir = tempfile.mkdtemp()
    key = Random.new().read(32)
    iv = Random.new().read(AES.block_size)
    output.write("Encrypting...")
    try:
        enc_filepath = os.path.join(dir, basename)
        with open(filepath, 'rb') as file:
            with open(enc_filepath, 'wb') as tf:
                if padding_required:
                    filesize = os.path.getsize(filepath)
                    output.write("File size = %d" % (filesize,))
                    tf.write(struct.pack("<Q", filesize))
                
                contents = file.read()
                contents += b'\0' * (AES.block_size - len(contents) % AES.block_size)
                cipher = AES.new(key, mode = AES.MODE_CBC, IV = iv)
                tf.write(cipher.encrypt(contents))

        with open(enc_filepath, 'rb') as file:
            options = {}
            if mongo_collection is not None:
                options["mongoCollection"] = mongo_collection
            if mongo_database is not None:
                options["mongoDatabase"] = mongo_database
            if postgres_database is not None:
                options["pgDatabase"] = postgres_database
            if mysql_database is not None:
                options["mysqlDatabase"] = mysql_database

            output.write("Uploading...")
            upload_url = services.get_temporary_upload_url(session, settings["environmentId"], service_id)
            resp = services.initiate_import(session, settings["environmentId"],
                    service_id, upload_url, file,
                    base64.b64encode(binascii.hexlify(key)),
                    base64.b64encode(binascii.hexlify(iv)),
                    wipe_first, options)

            task_id = resp["id"]
            output.write("Processing import... (id = %s)" % (task_id,))
            job = tasks.poll_status(session, settings["environmentId"], task_id, exit_on_error=False)
            output.write("\nImport complete (end status = '%s')" % (job["status"],))
            logs.dump(session, settings, database_label, service_id, task_id, "restore", None)
            if job["status"] != "finished":
                sys.exit(-1)
    finally:
        shutil.rmtree(dir)