def export_instance(request, instance_id, instance_name, status_check_interval): """Export instance image to S3""" project_id = request.user.tenant_id aws_access_key_id, aws_secret_access_key, region_name = utils.get_api_keys( project_id) buckets = s3.list_buckets(request) if project_id not in buckets: s3.create_bucket(request, project_id, region_name) s3.grant_bucket_acl(request, project_id) task_id = ec2.export_instance_to_s3(request, instance_id, project_id, instance_name) # TODO : from oslo_service.loopingcall import FixedIntervalLoopingCall should be applied while True: task = transport.get_export_task(request, task_id) task_state = task.get("State") if task_state is None: raise KeyError("export instance task state is None") LOG.debug("Export Task State : {}".format(task_state)) sleep(status_check_interval) if task_state == "completed": LOG.debug("VM export ready!!") break return task
def ec2_resource(request): project_id = request.user.tenant_id aws_access_key_id, aws_secret_access_key, region_name = get_api_keys( project_id) session = boto3.session.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region_name) return session.resource("ec2")
def s3_client(request): project_id = request.user.tenant_id aws_access_key_id, aws_secret_access_key, region_name = utils.get_api_keys( project_id) session = boto3.session.Session( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=region_name) return session.client("s3")
def upload_to_s3(request, target_file, object_name): """ Upload to s3. Create the container if it does not exist.""" project_id = request.user.tenant_id aws_access_key_id, aws_secret_access_key, region_name = utils.get_api_keys( project_id) buckets = s3.list_buckets(request) if project_id not in buckets: s3.create_bucket(request, project_id, region_name) s3.grant_bucket_acl(request, project_id) return s3.upload_object(request, target_file, object_name)