def _set_conf_from_file(self, config_file=None, environment=False): if not config_file: config_file = self._get_config_file() if os.path.isfile(config_file): logger.debug( "Loading configuration from file {}".format(config_file)) with open(config_file) as data_file: config_dict = json.load(data_file) self._set_conf_from_dict(config_dict=config_dict, environment=environment) else: raise ArdyNoFileError("File {} not exist".format(config_file))
def pip_install_to_target(self, path, requirements="", local_package=None): """For a given active virtualenv, gather all installed pip packages then copy (re-install) them to the path provided. :param str path: Path to copy installed pip packages to. :param str requirements: If set, only the packages in the requirements.txt file are installed. The requirements.txt file needs to be in the same directory as the project which shall be deployed. Defaults to false and installs all pacakges found via pip freeze if not set. :param str local_package: The path to a local package with should be included in the deploy as well (and/or is not available on PyPi) """ packages = [] if not requirements: logger.debug('Gathering pip packages') # packages.extend(pip.operations.freeze.freeze()) pass else: requirements_path = os.path.join(self.get_src_path(), requirements) logger.debug('Gathering packages from requirements: {}'.format( requirements_path)) if os.path.isfile(requirements_path): data = self.read(requirements_path) packages.extend(data.splitlines()) else: logger.debug( 'No requirements file in {}'.format(requirements_path)) if local_package is not None: if not isinstance(local_package, (list, tuple)): local_package = [local_package] for l_package in local_package: packages.append(l_package) self._install_packages(path, packages)
def set_environment(self, environment=False): logger.debug("Setting environment {}".format(environment)) self.environment = environment
def __init__(self, *args, **kwargs): super(ConfigMixin, self).__init__() logger.debug("[{}] loading config...".format(self.__class__, )) self.config = GlobalConfig(*args, **kwargs)
def deploy(self): """Upload code to AWS Lambda. To use this method, first, must set the zip file with code with `self.set_artefact(code=code)`. Check all lambdas in our config file or the functions passed in command line and exist in our config file. If the function is upload correctly, update/create versions, alias and triggers :return: True """ lambdas_deployed = [] for lambda_funcion in self.config.get_lambdas(): start_deploy = not len(self.lambdas_to_deploy) or \ lambda_funcion["FunctionNameOrigin"] in self.lambdas_to_deploy if start_deploy: lambdas_deployed.append(lambda_funcion["FunctionName"]) conf = lambda_funcion.get_deploy_conf() response = self.remote_get_lambda(**conf) if response: remote_conf = response["Configuration"] # TODO: Diferences sometimes not return all values, check it! logger.info("Diferences:") diffkeys = [k for k in remote_conf if conf.get(k, False) != remote_conf.get(k, True) and k not in ['Code', ]] for k in diffkeys: logger.info((k, ':', conf.get(k, ""), '->', remote_conf.get(k, ""))) logger.info("START to update funcion {}".format(conf["FunctionName"])) self.remote_update_conf_lambada(**conf) result = self.remote_update_code_lambada(**conf) logger.debug("Funcion {} updated {}".format(conf["FunctionName"], result)) else: logger.info("START to create funcion {}".format(lambda_funcion["FunctionName"])) result = self.remote_create_lambada(**conf) logger.debug("Funcion {} created {}".format(conf["FunctionName"], result)) if self.is_client_result_ok(result): # Check and publish version version = "LATEST" if self.config["deploy"].get("use_version", False): logger.info("Publish new version of {} with conf {}".format( lambda_funcion["FunctionName"], json.dumps(conf, indent=4, sort_keys=True) )) result = self.remote_publish_version(**conf) version = result["Version"] logger.info("Published version {}: {}".format( version, json.dumps(result, indent=4, sort_keys=True) )) # Check and publish alias if self.config["deploy"].get("use_alias", False): alias_conf = { "FunctionName": conf["FunctionName"], "Description": conf["Description"], "FunctionVersion": version, } if self.config.get_environment(): alias_conf.update({"Name": self.config.get_environment()}) else: alias_conf.update({"Name": conf["FunctionName"]}) logger.info("Update alias of {} with conf {}".format( lambda_funcion["FunctionName"], json.dumps(alias_conf, indent=4, sort_keys=True) )) result = self.remote_update_alias(**alias_conf) logger.info("Updated alias {}: {}".format(conf["FunctionName"], json.dumps(result, indent=4, sort_keys=True) )) # Check and publish triggers logger.info("Updating Triggers for fuction {}".format(lambda_funcion["FunctionName"])) if lambda_funcion.get("triggers", False): for trigger in lambda_funcion["triggers"].keys(): trigger_object = get_trigger(trigger, lambda_funcion, result["FunctionArn"]) trigger_object.put() if lambdas_deployed: logger.info("Deploy finished. Created/updated lambdas {}".format(", ".join(lambdas_deployed))) else: logger.info("No lambdas found to deploy") # TODO: check errors to return correct value return True