Exemplo n.º 1
0
 def __init__(self,
              resource_instance_id,
              access_key_id=None,
              secret_access_key=None):
     self.id = str(uuid.uuid4().hex)
     self.resource_instance_id = resource_instance_id
     self.access_key_id = encrypt_api_key(
         access_key_id) if access_key_id else None
     self.secret_access_key = encrypt_api_key(
         secret_access_key) if secret_access_key else None
Exemplo n.º 2
0
 def __init__(self, name, username, api_key, project_id,
              ibm_cloud_account_id):
     self.id = str(uuid.uuid4().hex)
     self.name = name
     self.username = username
     self.api_key = encrypt_api_key(api_key)
     self.ibm_cloud_account_id = ibm_cloud_account_id
     self.status = "AUTHENTICATING"
     self.project_id = project_id
Exemplo n.º 3
0
def add_ibm_cloud_account(user_id, user):
    """
    Add an IBM Cloud Account
    :param user_id: ID of the user initiating the request
    :param user: object of the user initiating the request
    :return: Response object from flask package
    """
    from doosra.tasks import task_process_new_ibm_cloud_account

    data = request.get_json(force=True)
    existing_cloud = doosradb.session.query(IBMCloud).filter(
        IBMCloud.name == data["name"], IBMCloud.project_id == user.project.id,
        IBMCloud.status != DELETING).first()
    if existing_cloud:
        return Response("ERROR_SAME_NAME", status=409)

    existing_clouds = doosradb.session.query(IBMCloud).filter(
        IBMCloud.project_id == user.project.id,
        IBMCloud.status != DELETING).all()
    for cloud in existing_clouds:
        if cloud.verify_api_key(data['api_key']):
            return Response("ERROR_SAME_API_KEY, cloud_id={}".format(cloud.id),
                            status=409)

    cloud = IBMCloud(data["name"], data["api_key"], user.project.id)
    if data.get("resource_instance_id"):
        cloud.service_credentials = IBMServiceCredentials(
            data["resource_instance_id"])

    if data.get("access_key_id") and data.get("secret_access_key"):
        cloud.service_credentials.access_key_id = encrypt_api_key(
            data["access_key_id"])
        cloud.service_credentials.secret_access_key = encrypt_api_key(
            data["secret_access_key"])

    doosradb.session.add(cloud)
    doosradb.session.commit()
    task_process_new_ibm_cloud_account.apply_async(queue='sync_queue',
                                                   args=[cloud.id])
    return Response(json.dumps(cloud.to_json()),
                    status=201,
                    mimetype="application/json")
Exemplo n.º 4
0
def construct_nas_migration_user_data(instance, region, instance_id, cloud_id):
    """
    Create a User Data Script for NAS Migration and create Volumes as per NAS Volumes
    """
    volume_attachments = []
    for ind_, disk in enumerate(
            instance["nas_migration_info"]["cm_meta_data"].get("disks", [])):
        volume_attachments.append(
            get_volume_attachment_dict(capacity=disk["size"][:-1],
                                       zone=instance["zone"],
                                       name=instance["name"],
                                       index_=ind_))

    insert_volume_in_db(instance_id,
                        volumes_json=volume_attachments,
                        region=region,
                        cloud_id=cloud_id)
    migration_host = os.environ.get("DB_MIGRATION_CONTROLLER_HOST")
    if migration_host.find("https://") != -1:
        migration_host = migration_host.replace("https://", "")
    elif migration_host.find("https://") != -1:
        migration_host = migration_host.replace("http://", "")
    if migration_host.endswith("/"):
        migration_host = migration_host[:-1]

    nas_migration_script = NAS_MIG_CONSTS.format(
        user_id=instance["nas_migration_info"]["cm_meta_data"]["user_id"],
        migration_host=migration_host,
        vpc_backend_host=os.environ.get("VPCPLUS_LINK"),
        trg_migrator_name=f"trg-{region}-{instance['name']}",
        src_migrator_name=instance["nas_migration_info"]["cm_meta_data"]
        ["migrator_name"],
        instance_type=os.environ.get("DB_MIGRATION_INSTANCE_TYPE"),
        disks=json.dumps(
            instance["nas_migration_info"]["cm_meta_data"]["disks"]))

    ibm_instance = IBMInstance.query.get(instance_id)
    ibm_instance.user_data = encrypt_api_key(nas_migration_script)
    doosradb.session.commit()
    LOGGER.info(
        f"Volume Migration Requirements Added for instance {instance_id} NAS Migration Data"
    )
Exemplo n.º 5
0
 def __init__(self, name, api_key, project_id):
     self.id = str(uuid.uuid4().hex)
     self.name = name
     self.api_key = encrypt_api_key(api_key)
     self.status = "AUTHENTICATING"
     self.project_id = project_id
Exemplo n.º 6
0
def construct_user_data_script(instance, ibm_cloud, region, instance_id):
    """
    Create user_data script from COS files and a disk for linux helper migration.
    Fifth Volume is added directly to instance as IBM is supporting more than four volumes with all profiles.
    :return:
    """
    if len(instance.get("volume_attachments") or []) <= 0:
        return

    api_key = decrypt_api_key(ibm_cloud.api_key)

    # sorting in ascending order by volume capacity
    volumes = instance["volume_attachments"]
    sorted_volumes = sorted(volumes, key=lambda i: i['capacity'])
    instance["volume_attachments"] = sorted_volumes

    ## TODO: This is what was consider for linux A1
    attach_volumes = ' '.join([
        str(volume["volume_index"])
        for volume in instance["volume_attachments"]
        if volume.get("volume_index") and volume.get("is_migration_enabled")
    ])
    try:
        attach_volumes_capacity = ' '.join([
            str(volume["capacity"])
            for volume in instance["volume_attachments"] if
            volume.get("volume_index") and volume.get("is_migration_enabled")
        ])
    except KeyError:
        return

    ## TODO: This was considered for windows A2
    window_vhds_index = [
        volume["volume_index"] for volume in instance["volume_attachments"]
        if volume.get("volume_index")
    ]
    ## TODO: Need to consider anyof(A1, A2)

    volume_mig_task = SecondaryVolumeMigrationTask(instance_id=instance_id)
    doosradb.session.add(volume_mig_task)
    doosradb.session.commit()

    if "WINDOWS" in instance.get("original_operating_system_name", "").upper() or \
            "WINDOWS" in instance.get("original_image", "").upper() or \
            "WINDOWS" in instance["image"].get("public_image", "").upper() or \
            "WINDOWS" in instance["image"].get("vpc_image_name", "").upper():
        web_hook_uri = os.environ.get(
            "VPCPLUS_LINK"
        ) + "v1/ibm/instances/secondary-volume-migration/windows/" + volume_mig_task.id
        user_data_script = WINDOWS_MIG_REQ.format(
            API_KEY=api_key,
            REGION=region,
            BUCKET=instance["image"]["bucket_name"],
            VHDS_INDEX=", ".join(repr(item) for item in window_vhds_index),
            INSTANCE_ID=instance_id,
            WEB_HOOK_URI=web_hook_uri,
            VERSION=VERSION,
            GENERATION=GENERATION)
    else:
        new_volume_json = attach_additional_volume(
            instance["volume_attachments"], instance_id, ibm_cloud.id, region)

        operating_system = return_class(
            instance["image"].get("public_image")
            or instance["image"].get("vpc_image_name")
            or instance.get("original_operating_system_name"))

        packages = operating_system.qemu_package
        for pkg in operating_system.PACKAGES:
            packages = packages + " " + pkg

        data_mig_req_string = DATA_MIG_REQUIREMENTS.format(
            SVM_WORKING_DISK=str(new_volume_json[VOLUME][CAPACITY]) + "G",
            ATTACHED_VOLUME_COUNT=attach_volumes,
            ATTACHED_VOLUMES_CAPACITY=attach_volumes_capacity,
            INSTANCE_NAME=instance["name"],
            VOLUME_NAME=new_volume_json["name"],
            PACKAGES=packages,
            REGION=region,
            VERSION=VERSION,
            BUCKET=instance["image"]["bucket_name"],
            WEB_HOOK_URI=os.environ.get("VPCPLUS_LINK") +
            "v1/ibm/instances/secondary_volume_migration/" +
            volume_mig_task.id,
            API_KEY=api_key,
        )
        user_data_script = "{data_mig_req_string}\n{packages}".format(
            data_mig_req_string=data_mig_req_string,
            packages=operating_system.bash_installation_string)
        insert_volume_in_db(instance_id,
                            volumes_json=[new_volume_json],
                            region=region,
                            cloud_id=ibm_cloud.id)

    ibm_instance = IBMInstance.query.get(instance_id)
    if ibm_instance.user_data:
        user_data_script = f"{decrypt_api_key(ibm_instance.user_data)}\n{user_data_script}"
    ibm_instance.user_data = encrypt_api_key(user_data_script)
    doosradb.session.commit()
    LOGGER.info(
        f"Volume Migration Requirements Added for instance {instance_id} Secondary Migration Data"
    )
Exemplo n.º 7
0
def update_ibm_cloud_account(user_id, user, cloud_id):
    """
    Update an IBM Cloud Account
    :param user_id: ID of the user initiating the request
    :param user: object of the user initiating the request
    :param cloud_id: cloud_id for Cloud object
    :return: Response object from flask package
    """
    from doosra.tasks import task_process_new_ibm_cloud_account

    data = request.get_json(force=True)
    force = request.args.get("force")

    cloud_account = doosradb.session.query(IBMCloud).filter_by(
        id=cloud_id, project_id=user.project.id).first()
    if not cloud_account:
        current_app.logger.info(
            "No IBM cloud account found with ID {}".format(cloud_id))
        return Response(status=404)

    if force:
        cloud_account.status = AUTHENTICATING
        doosradb.session.commit()

    if not cloud_account.service_credentials:
        if data.get("resource_instance_id"):
            cloud_account.service_credentials = IBMServiceCredentials(
                data["resource_instance_id"])
            doosradb.session.commit()

        if data.get("access_key_id") and data.get("secret_access_key"):
            cloud_account.service_credentials.access_key_id = encrypt_api_key(
                data["access_key_id"])
            cloud_account.service_credentials.secret_access_key = encrypt_api_key(
                data["secret_access_key"])
            doosradb.session.commit()

    if not cloud_account.service_credentials.access_key_id and not cloud_account.service_credentials.secret_access_key:
        if data.get("access_key_id") and data.get("secret_access_key"):
            cloud_account.service_credentials.access_key_id = encrypt_api_key(
                data["access_key_id"])
            cloud_account.service_credentials.secret_access_key = encrypt_api_key(
                data["secret_access_key"])
            doosradb.session.commit()

    elif data.get("resource_instance_id") and data["resource_instance_id"] != \
            cloud_account.service_credentials.resource_instance_id:
        cloud_account.service_credentials.resource_instance_id = data[
            "resource_instance_id"]
        doosradb.session.commit()

    elif data.get("access_key_id") and data["access_key_id"] != \
            decrypt_api_key(cloud_account.service_credentials.access_key_id) and \
            data.get("secret_access_key") and data["secret_access_key"] != \
            decrypt_api_key(cloud_account.service_credentials.secret_access_key):

        cloud_account.service_credentials.access_key_id = encrypt_api_key(
            data.get("access_key_id"))
        cloud_account.service_credentials.secret_access_key = encrypt_api_key(
            data.get("secret_access_key"))
        cloud_account.status = AUTHENTICATING

    if data.get("name") and data["name"] != cloud_account.name:
        existing_cloud = doosradb.session.query(IBMCloud).filter_by(
            name=data["name"], project_id=user.project.id).first()
        if existing_cloud:
            return Response("ERROR_SAME_NAME", status=409)

        cloud_account.name = data["name"]
        doosradb.session.commit()

    if data.get("api_key") and data["api_key"] != decrypt_api_key(
            cloud_account.api_key):
        existing_clouds = doosradb.session.query(IBMCloud).filter_by(
            project_id=user.project.id).all()
        for cloud in existing_clouds:
            if cloud.verify_api_key(data['api_key']):
                return Response("ERROR_SAME_API_KEY, cloud_id={}".format(
                    cloud.id),
                                status=409)

        cloud_account.api_key = encrypt_api_key(data["api_key"])
        cloud_account.status = AUTHENTICATING

    if data.get('resource_instance_id'):
        cloud_account.status = AUTHENTICATING
        cloud_account.service_credential = data['resource_instance_id']

    if data.get("access_key_id") and data.get("secret_access_key"):
        cloud_account.status = AUTHENTICATING
        cloud_account.service_credential = encrypt_api_key(data["access_key_id"]), \
                                           encrypt_api_key(data["secret_access_key"])

    doosradb.session.commit()
    if cloud_account.status == AUTHENTICATING:
        task_process_new_ibm_cloud_account.apply_async(queue='sync_queue',
                                                       args=[cloud_account.id])

    return jsonify(cloud_account.to_json())