def _generate_docker_file_to_obtain_access_token(self): fmlogging.debug( "Generating Docker file that will give new access token.") if self.service_obj: utils.update_status( self.service_obj.get_status_file_location(), "GENERATING Google ARTIFACTS for MySQL service") deploy_dir = ("{instance_dir}/{instance_name}").format( instance_dir=self.instance_prov_workdir, instance_name=self.instance_name) utils.copy_google_creds(constants.GOOGLE_CREDS_PATH, deploy_dir) cmd_1 = ( "RUN sed -i 's/{pat}access_token{pat}.*/{pat}access_token{pat}/' credentials \n" ).format(pat="\\\"") df = ( "FROM lmecld/clis:gcloud \n" "COPY . /src \n" "COPY google-creds/gcloud /root/.config/gcloud \n" "WORKDIR /root/.config/gcloud \n" "RUN token=`/google-cloud-sdk/bin/gcloud beta auth application-default print-access-token` && \ \n" " echo $token > /src/access_token.txt").format(cmd_1=cmd_1) fp = open(deploy_dir + "/Dockerfile.access_token", "w") fp.write(df) fp.close()
def deploy(self, deploy_type, deploy_name): if deploy_type == 'service': fmlogging.debug("AWS deployer called for deploying RDS instance") service_ip_list = [] for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] # Invoke public interface utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) if self.app_obj: self.app_obj.update_app_status(constants.DEPLOYING_SERVICE_INSTANCE) service_ip = serv_handler.provision_and_setup() service_ip_list.append(service_ip) utils.update_status(self.service_obj.get_status_file_location(), constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) if self.app_obj: self.app_obj.update_app_status(constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) utils.save_service_instance_ip(self.service_obj.get_status_file_location(), service_ip) # TODO(devkulkarni): Add support for returning multiple service IPs return service_ip_list[0] else: fmlogging.debug("AWS deployer called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYING_APP) app_ip_addr = self._deploy_app_container(app_obj) app_obj.update_app_status(constants.APP_DEPLOYMENT_COMPLETE) app_obj.update_app_ip(app_ip_addr) fmlogging.debug("AWS deployment complete.") fmlogging.debug("Removing temporary containers created to assist in the deployment.") self._cleanup(app_obj)
def build(self, build_type, build_name): if build_type == 'service': fmlogging.debug("AWS builder called for service") for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] utils.update_status(self.service_obj.get_status_file_location(), "BUILDING_ARTIFACTS_FOR_PROVISIONING_SERVICE_INSTANCE") # Invoke public interface serv_handler.build_instance_artifacts() elif build_type == 'app': fmlogging.debug("Local builder called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.BUILDING_APP) self._build_app_container(app_obj)
def deploy(self, deploy_type, deploy_name): if deploy_type == 'service': fmlogging.debug("Local deployer called for service %s" % deploy_name) utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) serv_handler = self.services[deploy_name] utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) # Invoke public interface service_ip = serv_handler.provision_and_setup() utils.update_status(self.service_obj.get_status_file_location(), constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) utils.save_service_instance_ip( self.service_obj.get_status_file_location(), service_ip) # TODO(devkulkarni): Add support for returning multiple service IPs return service_ip elif deploy_type == 'app': fmlogging.debug("Local deployer called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYING_APP) ip_addr = self._deploy_app_container(app_obj) if ip_addr: app_obj.update_app_status(constants.APP_DEPLOYMENT_COMPLETE) else: app_obj.update_app_status(constants.DEPLOYMENT_ERROR) app_obj.update_app_ip(ip_addr) self._cleanup() return ip_addr
def generate(self, generate_type, service_ip_dict, service_info): if generate_type == 'service': fmlogging.debug("AWS generator called for service") self.service_obj = service.Service(self.task_def.service_data[0]) # deploy_dir = self.service_obj.get_service_prov_work_location() # Copy aws-creds to the service deploy directory cp_cmd = ("cp -r {aws_creds_path} {deploy_dir}/.").format( aws_creds_path=AWS_CREDS_PATH, deploy_dir=self.deploy_dir) fmlogging.debug("Copying aws-creds directory..") fmlogging.debug(cp_cmd) os.system(cp_cmd) if self.task_def.app_data: app_obj = app.App(self.task_def.app_data) app_obj.update_app_status( "GENERATING AWS ARTIFACTS for RDS instance") for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] utils.update_status( self.service_obj.get_status_file_location(), "GENERATING_ARTIFACTS_FOR_PROVISIONING_SERVICE_INSTANCE") # Invoke public interface serv_handler.generate_instance_artifacts() else: fmlogging.debug("AWS generator called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status("GENERATING AWS ARTIFACTS") if self.app_type == 'python': self._generate_for_python_app(app_obj, service_ip_dict, service_info) else: print("Application of type %s not supported." % self.app_type) return 0
def deploy(self, deploy_type, deploy_name): if deploy_type == 'service': fmlogging.debug("Local deployer called for service %s" % deploy_name) utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) serv_handler = self.services[deploy_name] utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) # Invoke public interface service_ip = serv_handler.provision_and_setup() utils.update_status(self.service_obj.get_status_file_location(), constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) utils.save_service_instance_ip( self.service_obj.get_status_file_location(), service_ip) # TODO(devkulkarni): Add support for returning multiple service IPs return service_ip elif deploy_type == 'app': fmlogging.debug("Local deployer called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYING_APP) ip_addr = self._deploy_app_container(app_obj) if ip_addr: app_obj.update_app_status(constants.APP_DEPLOYMENT_COMPLETE) import platform if platform.system() == 'Darwin': ip_addr_parts = ip_addr.split(":") app_port = ip_addr_parts[1].strip() docker_host_fp = os.path.dirname( sys.modules[__name__].__file__) fp = open(docker_host_fp + "/docker_host.txt", "r") line = fp.readline() parts = line.split("=") ip_addr = parts[1].strip() + ":" + app_port app_obj.update_app_ip(ip_addr) else: app_obj.update_app_status(constants.DEPLOYMENT_ERROR) self._cleanup() return ip_addr
def run(self): fmlogging.debug("Starting build/deploy for %s" % self.name) if self.action == "delete": fmlogging.debug("Manager -- delete") gen.Generator(self.task_def).generate_for_delete(self.info) bld.Builder(self.task_def).build_for_delete(self.info) dep.Deployer(self.task_def).deploy_for_delete(self.info) elif self.action == "secure": fmlogging.debug("Manager -- secure") gen.Generator(self.task_def).generate_to_secure(self.info) bld.Builder(self.task_def).build_to_secure(self.info) dep.Deployer(self.task_def).deploy_to_secure(self.info) else: if self.task_def.app_data: app_obj = app.App(self.task_def.app_data) app_obj.update_app_status("name::" + self.name) app_obj.update_app_status("cloud::" + self.task_def.cloud_data['type']) app_cont_name = app_obj.get_cont_name() # Two-step protocol # Step 1: For each service build and deploy. Collect the IP address of deployed service # Step 2: Generate, build, deploy application. Pass the IP addresses of the services # Step 1: service_ip_addresses = {} services = self.task_def.service_data cloud = self.task_def.cloud_data['type'] for serv in services: service_obj = service.Service(serv) service_name = service_obj.get_service_name() service_kind = service_obj.get_service_type() utils.update_status(service_obj.get_status_file_location(), "name::" + service_name) utils.update_status(service_obj.get_status_file_location(), "cloud::" + cloud) try: gen.Generator(self.task_def).generate('service', service_ip_addresses, services) bld.Builder(self.task_def).build(build_type='service', build_name=service_name) serv_ip_addr = dep.Deployer(self.task_def).deploy(deploy_type='service', deploy_name=service_kind) fmlogging.debug("IP Address of the service:%s" % serv_ip_addr) service_ip_addresses[service_kind] = serv_ip_addr except Exception as e: fmlogging.error(e) raise e # Step 2: # - Generate, build, deploy app if self.task_def.app_data: # Allow time for service container to be deployed and started time.sleep(5) try: gen.Generator(self.task_def).generate('app', service_ip_addresses, services) bld.Builder(self.task_def).build(build_type='app', build_name=self.task_def.app_data['app_name']) result = dep.Deployer(self.task_def).deploy(deploy_type='app', deploy_name=self.task_def.app_data['app_name'] ) fmlogging.debug("Result:%s" % result) except Exception as e: fmlogging.error(e) raise e