class AzureDataShipperEventRuleLambdaPermission(LambdaPermission): statement_id = "AllowExecutionFromAzureDataShipper" action = "lambda:InvokeFunction" function_name = SubmitJobLambdaFunction.get_output_attr('function_name') principal = "events.amazonaws.com" source_arn = AzureDataShipperEventRule.get_output_attr('arn') PROCESS = need_to_enable_azure()
def get_rule_engine_cloudwatch_rules_var(): """ Read cloudwatch rule details from the json file and build dict with required details Returns: variable_dict_input (list): List of dict of rule details used to generate terraform variable file """ with open( "resources/lambda_rule_engine/files/rule_engine_cloudwatch_rules.json", "r") as fp: data = fp.read() data = data.replace("role/pacman_ro", "role/" + BaseRole.get_input_attr('name')) variable_dict_input = json.loads(data) required_rules = [] for index in range(len(variable_dict_input)): if variable_dict_input[index][ 'assetGroup'] == "azure" and not need_to_enable_azure(): continue mod = int(index % 20 + 5) item = { 'ruleId': variable_dict_input[index]['ruleUUID'], 'ruleParams': variable_dict_input[index]['ruleParams'], 'schedule': "cron(%s */6 * * ? *)" % str(mod) } required_rules.append(item) return required_rules
def prepare_azure_tenants_credentias(self): tenants = Settings.get('AZURE_TENANTS', []) credential_string = "" if need_to_enable_azure(): for tenant in tenants: tenant_id = tenant['tenantId'] client_id = tenant['clientId'] seccret_id = tenant['secretId'] credential_string = "" if credential_string == "" else ( credential_string + "##") credential_string += "tenant:%s,clientId:%s,secretId:%s" % ( tenant_id, client_id, seccret_id) return credential_string
class AzureDataShipperCloudWatchEventTarget(CloudWatchEventTargetResource): rule = AzureDataShipperEventRule.get_output_attr('name') arn = SubmitJobLambdaFunction.get_output_attr('arn') target_id = 'AzureDataShipperTarget' # Unique identifier target_input = json.dumps({ 'jobName': "data-shipper-azure", 'jobUuid': "data-shipper-azure", 'jobType': "jar", 'jobDesc': "Ship Azure Data from S3 to PacBot ES", 'environmentVariables': [ {'name': "CONFIG_URL", 'value': ApplicationLoadBalancer.get_api_base_url() + "/config/batch,azure-discovery/prd/latest"}, ], 'params': [ {'encrypt': False, 'key': "package_hint", 'value': "com.tmobile.cso.pacman"}, {'encrypt': False, 'key': "config_creds", 'value': "dXNlcjpwYWNtYW4="}, {'encrypt': False, 'key': "datasource", 'value': "azure"}, {'encrypt': False, 'key': "s3.data", 'value': "azure-inventory"} ] }) PROCESS = need_to_enable_azure()
class AzureDataCollectorCloudWatchEventTarget(CloudWatchEventTargetResource): rule = AzureDataCollectorEventRule.get_output_attr('name') arn = SubmitJobLambdaFunction.get_output_attr('arn') target_id = 'AzureDataCollectorTarget' # Unique identifier target_input = json.dumps({ 'jobName': "pacbot-azure-discovery", 'jobUuid': "pacbot-azure-discovery", 'jobType': "jar", 'jobDesc': "Collects azure data and upload to S3", 'environmentVariables': [ {'name': "CONFIG_URL", 'value': ApplicationLoadBalancer.get_api_base_url() + "/config/batch,azure-discovery/prd/latest"}, ], 'params': [ {'encrypt': False, 'key': "package_hint", 'value': "com.tmobile.pacbot"}, {'encrypt': False, 'key': "file.path", 'value': "/home/ec2-user/azure-data"}, {'encrypt': False, 'key': "config_creds", 'value': "dXNlcjpwYWNtYW4="}, {'encrypt': False, 'key': "tenants", 'value': get_azure_tenants()} ] }) PROCESS = need_to_enable_azure()
class AzureDataShipperEventRule(CloudWatchEventRuleResource): name = "data-shipper-azure" schedule_expression = "cron(11 */6 * * ? *)" DEPENDS_ON = [SubmitJobLambdaFunction, ESDomainPolicy] PROCESS = need_to_enable_azure()
class AzureDataCollectorEventRule(CloudWatchEventRuleResource): name = "azure-discovery" schedule_expression = "cron(10 */6 * * ? *)" DEPENDS_ON = [SubmitJobLambdaFunction] PROCESS = need_to_enable_azure()