def deploy(self, deploy_type, deploy_name): if deploy_type == 'service': fmlogging.debug("AWS deployer called for deploying RDS instance") service_ip_list = [] for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] # Invoke public interface utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) if self.app_obj: self.app_obj.update_app_status(constants.DEPLOYING_SERVICE_INSTANCE) service_ip = serv_handler.provision_and_setup() service_ip_list.append(service_ip) utils.update_status(self.service_obj.get_status_file_location(), constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) if self.app_obj: self.app_obj.update_app_status(constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) utils.save_service_instance_ip(self.service_obj.get_status_file_location(), service_ip) # TODO(devkulkarni): Add support for returning multiple service IPs return service_ip_list[0] else: fmlogging.debug("AWS deployer called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYING_APP) app_ip_addr = self._deploy_app_container(app_obj) app_obj.update_app_status(constants.APP_DEPLOYMENT_COMPLETE) app_obj.update_app_ip(app_ip_addr) fmlogging.debug("AWS deployment complete.") fmlogging.debug("Removing temporary containers created to assist in the deployment.") self._cleanup(app_obj)
def deploy(self, deploy_type, deploy_name): if deploy_type == 'service': fmlogging.debug("Local deployer called for service %s" % deploy_name) utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) serv_handler = self.services[deploy_name] utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) # Invoke public interface service_ip = serv_handler.provision_and_setup() utils.update_status(self.service_obj.get_status_file_location(), constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) utils.save_service_instance_ip( self.service_obj.get_status_file_location(), service_ip) # TODO(devkulkarni): Add support for returning multiple service IPs return service_ip elif deploy_type == 'app': fmlogging.debug("Local deployer called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYING_APP) ip_addr = self._deploy_app_container(app_obj) if ip_addr: app_obj.update_app_status(constants.APP_DEPLOYMENT_COMPLETE) else: app_obj.update_app_status(constants.DEPLOYMENT_ERROR) app_obj.update_app_ip(ip_addr) self._cleanup() return ip_addr
def error_update(self): if self.task_def.app_data: app_name = self.task_def.app_data['app_name'] location = self.task_def.app_data['app_location'] #utils.delete_tar_file(location, app_name) #utils.delete_app_folder(location, app_name) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYMENT_ERROR)
def build(self, build_type, build_name): if build_type == 'service': fmlogging.debug("Local builder called for service %s" % build_name) self._build_service_container() elif build_type == 'app': fmlogging.debug("Local builder called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.BUILDING_APP) self._build_app_container(app_obj) return 0
def generate(self, generate_type, service_ip_dict): if generate_type == 'service': self._generate_for_service() elif generate_type == 'app': app_obj = app.App(self.task_def.app_data) app_obj.update_app_status("GENERATING artifacts for local deployment") fmlogging.debug("Local generator called for app %s" % self.task_def.app_data['app_name']) if self.app_type == 'python': self._generate_for_python_app(service_ip_dict) else: print("Application of type %s not supported." % self.app_type) return 0
def generate(self, build_type, service_ip_dict, service_info): if build_type == 'service': fmlogging.debug("Google generator called for service") if self.task_def.app_data: app_obj = app.App(self.task_def.app_data) app_obj.update_app_status("GENERATING Google ARTIFACTS for Cloud SQL instance") for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] # Invoke public interface serv_handler.generate_instance_artifacts() else: fmlogging.debug("Google generator called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) # Sanity check for google self._sanity_check(app_obj) app_obj.update_app_status("GENERATING Google ARTIFACTS for App") if self.app_type == 'python': self._generate_for_python_app(app_obj, service_ip_dict, service_info) else: print("Application of type %s not supported." % self.app_type) return 0
def generate(self, generate_type, service_ip_dict, service_info): if generate_type == 'service': fmlogging.debug("AWS generator called for service") self.service_obj = service.Service(self.task_def.service_data[0]) # deploy_dir = self.service_obj.get_service_prov_work_location() # Copy aws-creds to the service deploy directory cp_cmd = ("cp -r {aws_creds_path} {deploy_dir}/.").format( aws_creds_path=AWS_CREDS_PATH, deploy_dir=self.deploy_dir) fmlogging.debug("Copying aws-creds directory..") fmlogging.debug(cp_cmd) os.system(cp_cmd) if self.task_def.app_data: app_obj = app.App(self.task_def.app_data) app_obj.update_app_status( "GENERATING AWS ARTIFACTS for RDS instance") for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] utils.update_status( self.service_obj.get_status_file_location(), "GENERATING_ARTIFACTS_FOR_PROVISIONING_SERVICE_INSTANCE") # Invoke public interface serv_handler.generate_instance_artifacts() else: fmlogging.debug("AWS generator called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status("GENERATING AWS ARTIFACTS") if self.app_type == 'python': self._generate_for_python_app(app_obj, service_ip_dict, service_info) else: print("Application of type %s not supported." % self.app_type) return 0
def build(self, build_type, build_name): if build_type == 'service': fmlogging.debug("AWS builder called for service") for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] utils.update_status(self.service_obj.get_status_file_location(), "BUILDING_ARTIFACTS_FOR_PROVISIONING_SERVICE_INSTANCE") # Invoke public interface serv_handler.build_instance_artifacts() elif build_type == 'app': fmlogging.debug("Local builder called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.BUILDING_APP) self._build_app_container(app_obj)
def build(self, build_type, build_name): if build_type == 'service': fmlogging.debug("Google builder called for service") for serv in self.task_def.service_data: serv_handler = self.services[serv['service']['type']] # Invoke public interface serv_handler.build_instance_artifacts() elif build_type == 'app': fmlogging.debug("Google builder called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) try: self._build_app_container(app_obj) self._build_first_time_container(app_obj) except Exception as e: fmlogging.error(e) raise e else: fmlogging.debug("Build type %s not supported." % build_type)
def __init__(self, task_def): self.task_def = task_def #self.logger = logging.getLogger(name=self.__class__.__name__) self.services = {} self.app_obj = '' if self.task_def.app_data: self.app_obj = app.App(self.task_def.app_data) self.app_dir = task_def.app_data['app_location'] self.app_name = task_def.app_data['app_name'] if task_def.service_data: self.service_obj = service.Service(task_def.service_data[0]) if self.service_obj.get_service_type() == 'mysql': self.services['mysql'] = awsh.MySQLServiceHandler(self.task_def) self.docker_handler = docker_lib.DockerLib() self.docker_client = Client(base_url='unix://var/run/docker.sock', version='1.18')
def deploy(self, deploy_type, deploy_name): if deploy_type == 'service': fmlogging.debug("Local deployer called for service %s" % deploy_name) utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) serv_handler = self.services[deploy_name] utils.update_status(self.service_obj.get_status_file_location(), constants.DEPLOYING_SERVICE_INSTANCE) # Invoke public interface service_ip = serv_handler.provision_and_setup() utils.update_status(self.service_obj.get_status_file_location(), constants.SERVICE_INSTANCE_DEPLOYMENT_COMPLETE) utils.save_service_instance_ip( self.service_obj.get_status_file_location(), service_ip) # TODO(devkulkarni): Add support for returning multiple service IPs return service_ip elif deploy_type == 'app': fmlogging.debug("Local deployer called for app %s" % self.task_def.app_data['app_name']) app_obj = app.App(self.task_def.app_data) app_obj.update_app_status(constants.DEPLOYING_APP) ip_addr = self._deploy_app_container(app_obj) if ip_addr: app_obj.update_app_status(constants.APP_DEPLOYMENT_COMPLETE) import platform if platform.system() == 'Darwin': ip_addr_parts = ip_addr.split(":") app_port = ip_addr_parts[1].strip() docker_host_fp = os.path.dirname( sys.modules[__name__].__file__) fp = open(docker_host_fp + "/docker_host.txt", "r") line = fp.readline() parts = line.split("=") ip_addr = parts[1].strip() + ":" + app_port app_obj.update_app_ip(ip_addr) else: app_obj.update_app_status(constants.DEPLOYMENT_ERROR) self._cleanup() return ip_addr
def __init__(self, task_def): self.task_def = task_def self.instance_name = '' self.instance_version = '' self.app_name = '' self.app_version = '' self.services = {} self.service_handler = '' self.instance_prov_workdir = '' self.app_variables = '' # Set values using service_data first if task_def.service_data: self.service_obj = service.Service(task_def.service_data[0]) self.instance_prov_workdir = self.service_obj.get_service_prov_work_location( ) self.instance_name = self.service_obj.get_service_name() self.instance_version = self.service_obj.get_service_version() if self.service_obj.get_service_type() == 'mysql': self.services['mysql'] = awsh.MySQLServiceHandler( self.task_def) # If app_data is present overwrite the previously set values if self.task_def.app_data: self.app_type = task_def.app_data['app_type'] self.app_dir = task_def.app_data['app_location'] self.app_name = task_def.app_data['app_name'] self.app_version = task_def.app_data['app_version'] self.instance_prov_workdir = task_def.app_data[ 'app_location'] + "/" + task_def.app_data['app_name'] self.entry_point = app.App( task_def.app_data).get_entrypoint_file_name() if 'app_variables' in task_def.app_data: self.app_variables = task_def.app_data['app_variables'] self.deploy_dir = self.instance_prov_workdir self.docker_handler = docker_lib.DockerLib()
def __init__(self, task_def): self.task_def = task_def #self.logger = fmlogging.getLogger(name=self.__class__.__name__) #handler = lh.RotatingFileHandler(constants.LOG_FILE_NAME, # maxBytes=5000000, backupCount=0) #self.logger.addHandler(handler) if task_def.app_data: self.app_dir = task_def.app_data['app_location'] self.app_name = task_def.app_data['app_name'] self.app_version = task_def.app_data['app_version'] self.access_token_cont_name = "google-access-token-cont-" + self.app_name + "-" + self.app_version self.create_db_cont_name = "google-create-db-" + self.app_name + "-" + self.app_version self.app_obj = app.App(self.task_def.app_data) self.services = {} if task_def.service_data: self.service_obj = service.Service(task_def.service_data[0]) if self.service_obj.get_service_type() == 'mysql': self.services['mysql'] = gh.MySQLServiceHandler(self.task_def) self.docker_handler = docker_lib.DockerLib()
def __init__(self, task_def): self.task_def = task_def self.instance_name = '' self.instance_version = '' self.instance_prov_workdir = '' if task_def.app_data: self.app_type = task_def.app_data['app_type'] self.app_dir = task_def.app_data['app_location'] self.app_name = task_def.app_data['app_name'] self.entry_point = app.App(task_def.app_data).get_entrypoint_file_name() if 'app_variables' in task_def.app_data: self.app_variables = task_def.app_data['app_variables'] self.services = {} if task_def.service_data: self.service_obj = service.Service(task_def.service_data[0]) self.service_details = '' if self.service_obj.get_service_type() == 'mysql': self.services['mysql'] = gh.MySQLServiceHandler(self.task_def) self.docker_handler = docker_lib.DockerLib()
def run(self): fmlogging.debug("Starting build/deploy for %s" % self.name) if self.action == "delete": fmlogging.debug("Manager -- delete") gen.Generator(self.task_def).generate_for_delete(self.info) bld.Builder(self.task_def).build_for_delete(self.info) dep.Deployer(self.task_def).deploy_for_delete(self.info) elif self.action == "secure": fmlogging.debug("Manager -- secure") gen.Generator(self.task_def).generate_to_secure(self.info) bld.Builder(self.task_def).build_to_secure(self.info) dep.Deployer(self.task_def).deploy_to_secure(self.info) else: if self.task_def.app_data: app_obj = app.App(self.task_def.app_data) app_obj.update_app_status("name::" + self.name) app_obj.update_app_status("cloud::" + self.task_def.cloud_data['type']) app_cont_name = app_obj.get_cont_name() # Two-step protocol # Step 1: For each service build and deploy. Collect the IP address of deployed service # Step 2: Generate, build, deploy application. Pass the IP addresses of the services # Step 1: service_ip_addresses = {} services = self.task_def.service_data cloud = self.task_def.cloud_data['type'] for serv in services: service_obj = service.Service(serv) service_name = service_obj.get_service_name() service_kind = service_obj.get_service_type() utils.update_status(service_obj.get_status_file_location(), "name::" + service_name) utils.update_status(service_obj.get_status_file_location(), "cloud::" + cloud) try: gen.Generator(self.task_def).generate('service', service_ip_addresses, services) bld.Builder(self.task_def).build(build_type='service', build_name=service_name) serv_ip_addr = dep.Deployer(self.task_def).deploy(deploy_type='service', deploy_name=service_kind) fmlogging.debug("IP Address of the service:%s" % serv_ip_addr) service_ip_addresses[service_kind] = serv_ip_addr except Exception as e: fmlogging.error(e) raise e # Step 2: # - Generate, build, deploy app if self.task_def.app_data: # Allow time for service container to be deployed and started time.sleep(5) try: gen.Generator(self.task_def).generate('app', service_ip_addresses, services) bld.Builder(self.task_def).build(build_type='app', build_name=self.task_def.app_data['app_name']) result = dep.Deployer(self.task_def).deploy(deploy_type='app', deploy_name=self.task_def.app_data['app_name'] ) fmlogging.debug("Result:%s" % result) except Exception as e: fmlogging.error(e) raise e