def _install_packages(self, path, packages): """Install all packages listed to the target directory. Ignores any package that includes Python itself and python-lambda as well since its only needed for deploying and not running the code :param str path: Path to copy installed pip packages to. :param list packages: A list of packages to be installed via pip. """ def _filter_blacklist(package): blacklist = ["-i", "#", "Python==", "ardy=="] return all( package.startswith(entry.encode()) is False for entry in blacklist) filtered_packages = filter(_filter_blacklist, packages) # print([package for package in filtered_packages]) for package in filtered_packages: package = str(package, "utf-8") if package.startswith('-e '): package = package.replace('-e ', '') logger.info('Installing {package}'.format(package=package)) pip.main( ['install', package, '-t', path, '--ignore-installed', '-q'])
def put(self, *args, **kwargs): triggers_conf = self.get_triggers() for trigger_conf in triggers_conf: rule_conf = self.get_deploy_conf(trigger_conf) logger.info( "START to deploy CloudWatch Event triggers for rule {} with conf: {}" .format(trigger_conf['Name'], json.dumps(rule_conf, indent=4, sort_keys=True))) self.client.put_rule(**self.get_deploy_conf(trigger_conf)) StatementId = "{}-{}".format(self.lambda_conf["FunctionName"], trigger_conf['Name']) if not self.lambda_exist_policy(self.lambda_conf["FunctionName"], StatementId): self.awslambda.add_permission( Action='lambda:InvokeFunction', FunctionName=self.lambda_function_arn, Principal='events.amazonaws.com', StatementId=StatementId) target_conf = { 'Id': self.lambda_conf["FunctionName"], 'Arn': self.lambda_function_arn, 'Input': trigger_conf.get("Input", ""), } logger.info("Put target CloudWatch {} with conf: {}".format( trigger_conf['Name'], json.dumps(target_conf, indent=4, sort_keys=True))) self.client.put_targets(Rule=trigger_conf['Name'], Targets=[ target_conf, ])
def build_artefact(self, src_project=None): """Run deploy the lambdas defined in our project. Steps: * Build Artefact * Read file or deploy to S3. It's defined in config["deploy"]["deploy_method"] :param src_project: str. Name of the folder or path of the project where our code lives :return: bool """ path_to_zip_file = self.build.run(src_project or self.config.get_projectdir()) self.set_artefact_path(path_to_zip_file) deploy_method = self.config["deploy"]["deploy_method"] if deploy_method == "S3": deploy_bucket = self.config["deploy"]["deploy_bucket"] bucket = self.awss3.Bucket(deploy_bucket) try: self.awss3.meta.client.head_bucket(Bucket=deploy_bucket) except ClientError as e: if e.response['Error']['Code'] == "404" or e.response['Error']['Code'] == "NoSuchBucket": region = self.config.get("aws_credentials", {}).get("region", None) logger.info( "Bucket not exist. Creating new one with name {} in region {}".format(deploy_bucket, region)) bucket_conf = {} if region: bucket_conf = {"CreateBucketConfiguration": {'LocationConstraint': region}} bucket.wait_until_not_exists() bucket.create(**bucket_conf) bucket.wait_until_exists() else: # TODO: handle other errors there pass s3_keyfile = self.config["deploy"]["deploy_file"].split(os.path.sep)[-1] bucket.put_object( Key=s3_keyfile, Body=self.build.read(self.config["deploy"]["deploy_file"]) ) code = {'S3Bucket': deploy_bucket, 'S3Key': s3_keyfile, } elif deploy_method == "FILE": code = {'ZipFile': self.build.read(self.config["deploy"]["deploy_file"])} else: raise Exception("No deploy_method in config") return code
def put(self, *args, **kwargs): triggers_conf = self.get_triggers() for trigger_conf in triggers_conf: logger.info("START to deploy SNS triggers for toppic {}".format(trigger_conf["TopicArn"])) self.client.subscribe( TopicArn=trigger_conf["TopicArn"], Protocol='lambda', Endpoint=self.lambda_function_arn ) StatementId = "{}-{}".format(self.lambda_conf["FunctionName"], trigger_conf['TopicArn'].split(":")[-1]) if not self.lambda_exist_policy(self.lambda_conf["FunctionName"], StatementId): self.awslambda.add_permission( Action='lambda:InvokeFunction', FunctionName=self.lambda_function_arn, Principal='sns.amazonaws.com', # SourceArn='arn:aws:s3:::{}/*'.format(trigger_conf['bucket_name']), StatementId=StatementId )
def _run_local_lambda(self, lambda_config): prev_folder = os.getcwd() os.chdir(self.config.get_projectdir()) sys.path.append(self.config.get_projectdir()) lambda_name = lambda_config["FunctionName"] lambda_handler = self.import_function(lambda_config["Handler"]) # Run and set a counter start = time.time() results = lambda_handler({}, MockContext(lambda_name)) end = time.time() # restore folder os.chdir(prev_folder) # Print results logger.info("{0}".format(results)) logger.info("\nexecution time: {:.8f}s\nfunction execution " "timeout: {:2}s".format(end - start, lambda_config["Timeout"]))
def remote_update_alias(self, **kwargs): conf = kwargs try: logger.info("Update alias {} for function {}" " with version {}".format(conf["Name"], conf["FunctionName"], conf["FunctionVersion"])) response = self.awslambda.update_alias(**conf) except ClientError as e: if e.response['Error']['Code'] == "ResourceNotFoundException": logger.info("Alias {} not exist for function {}. " "Creating new one with version {}".format(conf["Name"], conf["FunctionName"], conf["FunctionVersion"])) response = self.awslambda.create_alias(**conf) else: # TODO: handle other errors there pass return response
def deploy(self): """Upload code to AWS Lambda. To use this method, first, must set the zip file with code with `self.set_artefact(code=code)`. Check all lambdas in our config file or the functions passed in command line and exist in our config file. If the function is upload correctly, update/create versions, alias and triggers :return: True """ lambdas_deployed = [] for lambda_funcion in self.config.get_lambdas(): start_deploy = not len(self.lambdas_to_deploy) or \ lambda_funcion["FunctionNameOrigin"] in self.lambdas_to_deploy if start_deploy: lambdas_deployed.append(lambda_funcion["FunctionName"]) conf = lambda_funcion.get_deploy_conf() response = self.remote_get_lambda(**conf) if response: remote_conf = response["Configuration"] # TODO: Diferences sometimes not return all values, check it! logger.info("Diferences:") diffkeys = [k for k in remote_conf if conf.get(k, False) != remote_conf.get(k, True) and k not in ['Code', ]] for k in diffkeys: logger.info((k, ':', conf.get(k, ""), '->', remote_conf.get(k, ""))) logger.info("START to update funcion {}".format(conf["FunctionName"])) self.remote_update_conf_lambada(**conf) result = self.remote_update_code_lambada(**conf) logger.debug("Funcion {} updated {}".format(conf["FunctionName"], result)) else: logger.info("START to create funcion {}".format(lambda_funcion["FunctionName"])) result = self.remote_create_lambada(**conf) logger.debug("Funcion {} created {}".format(conf["FunctionName"], result)) if self.is_client_result_ok(result): # Check and publish version version = "LATEST" if self.config["deploy"].get("use_version", False): logger.info("Publish new version of {} with conf {}".format( lambda_funcion["FunctionName"], json.dumps(conf, indent=4, sort_keys=True) )) result = self.remote_publish_version(**conf) version = result["Version"] logger.info("Published version {}: {}".format( version, json.dumps(result, indent=4, sort_keys=True) )) # Check and publish alias if self.config["deploy"].get("use_alias", False): alias_conf = { "FunctionName": conf["FunctionName"], "Description": conf["Description"], "FunctionVersion": version, } if self.config.get_environment(): alias_conf.update({"Name": self.config.get_environment()}) else: alias_conf.update({"Name": conf["FunctionName"]}) logger.info("Update alias of {} with conf {}".format( lambda_funcion["FunctionName"], json.dumps(alias_conf, indent=4, sort_keys=True) )) result = self.remote_update_alias(**alias_conf) logger.info("Updated alias {}: {}".format(conf["FunctionName"], json.dumps(result, indent=4, sort_keys=True) )) # Check and publish triggers logger.info("Updating Triggers for fuction {}".format(lambda_funcion["FunctionName"])) if lambda_funcion.get("triggers", False): for trigger in lambda_funcion["triggers"].keys(): trigger_object = get_trigger(trigger, lambda_funcion, result["FunctionArn"]) trigger_object.put() if lambdas_deployed: logger.info("Deploy finished. Created/updated lambdas {}".format(", ".join(lambdas_deployed))) else: logger.info("No lambdas found to deploy") # TODO: check errors to return correct value return True