Ejemplo n.º 1
0
def download(service_label, backup_id, filepath):
    settings = project.read_settings()
    session = client.acquire_session(settings)
    service_id = services.get_by_label(session, settings["environmentId"], service_label)

    job = jobs.retrieve(session, settings["environmentId"], service_id, backup_id)
    if job["type"] != "backup" or job["status"] != "finished":
        output.error("Only 'finished' 'backup' jobs may be downloaded with this command")

    output.write("Downloading backup %s" % (backup_id,))
    url = services.get_temporary_url(session, settings["environmentId"], service_id, backup_id)
    r = requests.get(url, stream=True)
    basename = os.path.basename(filepath)
    dir = tempfile.mkdtemp()
    tmp_filepath = os.path.join(dir, basename)
    with open(tmp_filepath, 'wb+') as f:
        for chunk in r.iter_content(chunk_size=1024):
            if chunk:
                f.write(chunk)
                f.flush()
    output.write("Decrypting...")
    decryption = AESCrypto.Decryption(tmp_filepath, job["backup"]["key"], job["backup"]["iv"])
    decryption.decrypt(filepath)
    os.remove(tmp_filepath)
    output.write("%s downloaded successfully to %s" % (service_label, filepath))
Ejemplo n.º 2
0
def metrics(service_label, format, stream, mins):
    """Print out metrics about a single service or all services in an environment."""
    if stream and (format or mins != 1):
        output.error("--stream cannot be used with a custom format or multiple records.")

    if format is None:
        transformer = TextTransformer()
    elif format == "csv":
        transformer = CSVTransformer()
    elif format == "json":
        transformer = JSONTransformer()
    else:
        output.error("unrecognized format '%s'" % (format,))

    settings = project.read_settings()
    session = client.acquire_session(settings)

    if service_label is None:
        transformer.set_group_mode()
        transformer.set_retriever(lambda: environments.retrieve_metrics(session, settings["environmentId"], mins))
    else:
        service_id = services.get_by_label(session, settings["environmentId"], service_label)
        transformer.set_retriever(
            lambda: services.retrieve_metrics(session, settings["environmentId"], service_id, mins)
        )

    transformer.process(stream)
Ejemplo n.º 3
0
def restore(service_label, backup_id, skip_poll):
    settings = project.read_settings()
    session = client.acquire_session(settings)
    service_id = services.get_by_label(session, settings["environmentId"], service_label)
    task_id = services.restore_backup(session, settings["environmentId"], service_id, backup_id)
    output.write("Restoring (task = %s)" % (task_id,))
    if not skip_poll:
        output.write("Polling until restore is complete.")
        task = tasks.poll_status(session, settings["environmentId"], task_id, exit_on_error=False)
        output.write("\nEnded in status '%s'" % (task["status"],))
        logs.dump(session, settings, service_label, service_id, task_id, "restore", None)
        if task["status"] != "finished":
            sys.exit(-1)
Ejemplo n.º 4
0
def create(service_label, skip_poll):
    settings = project.read_settings()
    session = client.acquire_session(settings)
    service_id = services.get_by_label(session, settings["environmentId"], service_label)
    task_id = services.create_backup(session, settings["environmentId"], service_id)
    print("Backup started (task ID = %s)" % (task_id,))
    if not skip_poll:
        output.write("Polling until backup finishes.")
        task = tasks.poll_status(session, settings["environmentId"], task_id, exit_on_error=False)
        output.write("\nEnded in status '%s'" % (task["status"],))
        logs.dump(session, settings, service_label, service_id, task_id, "backup", None)
        if task["status"] != "finished":
            sys.exit(-1)
Ejemplo n.º 5
0
def open_console(service_label, command):
    """
Opens a secure console to a code or database service.

For code services, a command is required. This command is executed as root in the context of the application root directory.

For database services, no command is needed - instead, the appropriate command for the database type is run. For example, for a postgres database, psql is run.
"""
    global console_closed
    settings = project.read_settings()
    session = client.acquire_session(settings)

    service_id = services.get_by_label(session, settings["environmentId"], service_label)

    output.write("Opening console to service '%s'" % (service_id))

    task_id = services.request_console(session, settings["environmentId"], service_id, command)["taskId"]

    output.write("Waiting for the console to be ready... This might take a bit.")

    job_id = services.poll_console_job(session, settings["environmentId"], service_id, task_id)
    creds = services.get_console_tokens(session, settings["environmentId"], service_id, job_id)

    try:
        url = creds["url"].replace("http", "ws")
        token = creds["token"]
        output.write("Connecting...")

        sslopt = {
            "ssl_version": ssl.PROTOCOL_TLSv1
        }
        if "skip_cert_validation" in config.behavior:
            sslopt["check_hostname"] = False
        ws = ConsoleClient(url, ssl_options = sslopt, headers = [("X-Console-Token", token)])
        ws.daemon = False
        ws.connect()

        with ContextedConsole() as c:
            while not console_closed:
                data = c.get_data()
                if data:
                    ws.send(data)
    finally:
        output.write("Cleaning up")
        services.destroy_console(session, settings["environmentId"], service_id, job_id)
Ejemplo n.º 6
0
def exec_list(service_label, page, page_size):
    """List all created backups for the service, sorted from oldest to newest."""
    settings = project.read_settings()
    session = client.acquire_session(settings)
    service_id = services.get_by_label(session, settings["environmentId"], service_label)
    raw_backups = services.list_backups(session, settings["environmentId"], service_id, page, page_size)
    backup_list = []
    for id, body in raw_backups.items():
        body["id"] = id
        backup_list.append(body)
    backup_list.sort(lambda a, b: int((parse_date(a["created_at"]) - parse_date(b["created_at"])).total_seconds()))
    if len(backup_list) > 0:
        for item in backup_list:
            output.write("%s %s (status = %s)" % (item["id"], item["created_at"], item["status"]))
        if len(backup_list) == page_size and page == 1:
            output.write("(for older backups, try with --page=2 or adjust --page-size)")
    elif page == 1:
        output.write("No backups created yet for this service.")
Ejemplo n.º 7
0
def cmd_export(database_label, filepath):
    """Exports all data from a chosen database service.

The export command is accomplished by first creating a backup of the database. Then requesting a temporary access URL to the encrypted backup file. The file is downloaded, decrypted, and stored at the provided location.

If there is an unexpected error, please contact Catalyze support ([email protected]).
"""
    settings = project.read_settings()
    session = client.acquire_session(settings)
    service_id = services.get_by_label(session, settings["environmentId"], database_label)
    task_id = services.create_backup(session, settings["environmentId"], service_id)
    print("Export started (task ID = %s)" % (task_id,))
    output.write("Polling until export finishes.")
    job = tasks.poll_status(session, settings["environmentId"], task_id, exit_on_error=False)
    if job["status"] != "finished":
        output.write("\nExport finished with illegal status \"%s\", aborting." % (job["status"],))
        logs.dump(session, settings, database_label, service_id, task_id, "backup", None)
        sys.exit(-1)
    output.write("\nEnded in status '%s'" % (job["status"],))
    backup_id = job["id"]
    output.write("Downloading...")
    url = services.get_temporary_url(session, settings["environmentId"], service_id, backup_id)
    r = requests.get(url, stream=True)
    basename = os.path.basename(filepath)
    dir = tempfile.mkdtemp()
    tmp_filepath = os.path.join(dir, basename)
    with open(tmp_filepath, 'wb+') as f:
        for chunk in r.iter_content(chunk_size=1024):
            if chunk:
                f.write(chunk)
                f.flush()
    output.write("Decrypting...")
    decryption = AESCrypto.Decryption(tmp_filepath, job["backup"]["key"], job["backup"]["iv"])
    decryption.decrypt(filepath)
    os.remove(tmp_filepath)
    output.write("%s exported successfully to %s" % (database_label, filepath))
    logs.dump(session, settings, database_label, service_id, task_id, "backup", None)
Ejemplo n.º 8
0
def cmd_import(database_label, filepath, mongo_collection, mongo_database, wipe_first, postgres_database = None, mysql_database = None):
    """Imports a file into a chosen database service.

The import is accomplished by encrypting the file and uploading it to Catalyze. An automated service processes the file according to the passed parameters. The command offers the option to either wait until the processing is finished (and be notified of the end result), or to just kick it off.

The type of file depends on the database. For postgres and mysql, this should be a single SQL script with the extension "sql". For mongo, this should be a tar'd, gzipped archive of the dump that you wish to import, with the extension "tar.gz".

If there is an unexpected error, please contact Catalyze support ([email protected]).
"""
    settings = project.read_settings()
    session = client.acquire_session(settings)
    output.write("Looking up service...")
    service_id = services.get_by_label(session, settings["environmentId"], database_label)

    environment = environments.retrieve(session, settings["environmentId"])
    pod = pods.metadata(session, environment["podId"])
    padding_required = pod["importRequiresLength"]

    output.write("Importing '%s' to %s (%s)" % (filepath, database_label, service_id))
    basename = os.path.basename(filepath)
    dir = tempfile.mkdtemp()
    key = Random.new().read(32)
    iv = Random.new().read(AES.block_size)
    output.write("Encrypting...")
    try:
        enc_filepath = os.path.join(dir, basename)
        with open(filepath, 'rb') as file:
            with open(enc_filepath, 'wb') as tf:
                if padding_required:
                    filesize = os.path.getsize(filepath)
                    output.write("File size = %d" % (filesize,))
                    tf.write(struct.pack("<Q", filesize))
                
                contents = file.read()
                contents += b'\0' * (AES.block_size - len(contents) % AES.block_size)
                cipher = AES.new(key, mode = AES.MODE_CBC, IV = iv)
                tf.write(cipher.encrypt(contents))

        with open(enc_filepath, 'rb') as file:
            options = {}
            if mongo_collection is not None:
                options["mongoCollection"] = mongo_collection
            if mongo_database is not None:
                options["mongoDatabase"] = mongo_database
            if postgres_database is not None:
                options["pgDatabase"] = postgres_database
            if mysql_database is not None:
                options["mysqlDatabase"] = mysql_database

            output.write("Uploading...")
            upload_url = services.get_temporary_upload_url(session, settings["environmentId"], service_id)
            resp = services.initiate_import(session, settings["environmentId"],
                    service_id, upload_url, file,
                    base64.b64encode(binascii.hexlify(key)),
                    base64.b64encode(binascii.hexlify(iv)),
                    wipe_first, options)

            task_id = resp["id"]
            output.write("Processing import... (id = %s)" % (task_id,))
            job = tasks.poll_status(session, settings["environmentId"], task_id, exit_on_error=False)
            output.write("\nImport complete (end status = '%s')" % (job["status"],))
            logs.dump(session, settings, database_label, service_id, task_id, "restore", None)
            if job["status"] != "finished":
                sys.exit(-1)
    finally:
        shutil.rmtree(dir)