示例#1
0
文件: function.py 项目: vicgud/pacbot
class SubmitJobLambdaFunction(LambdaFunctionResource):
    function_name = "datacollector"
    role = LambdaRole.get_output_attr('arn')
    handler = BATCH_JOB_FILE_NAME + ".lambda_handler"
    runtime = "python2.7"
    s3_bucket = BucketStorage.get_output_attr('bucket')
    s3_key = UploadLambdaSubmitJobZipFile.get_output_attr('id')
    environment = {
        'variables': {
            'JOB_QUEUE':
            BatchJobsQueue.get_input_attr('name'),
            'JOB_DEFINITION':
            SubmitAndRuleEngineJobDefinition.get_output_attr('arn'),
            'CONFIG_URL':
            ApplicationLoadBalancer.get_api_base_url() +
            "/config/batch,inventory/prd/latest",
            'CONFIG_CREDENTIALS':
            "dXNlcjpwYWNtYW4=",
            'CONFIG_SERVICE_URL':
            ApplicationLoadBalancer.get_http_url() +
            "/api/config/rule/prd/latest"
        }
    }

    DEPENDS_ON = [SubmitAndRuleEngineJobDefinition, BatchJobsQueue]
示例#2
0
    def get_provisioners(self):
        pacbot_build_script = os.path.join(get_terraform_scripts_dir(),
                                           'build_pacbot.py')
        upload_dir = self._create_dir_to_store_build_ap()

        local_execs = [{
            'local-exec': {
                'command': pacbot_build_script,
                'environment': {
                    'PROVIDER_FILE':
                    get_terraform_provider_file(),
                    'APPLICATION_DOMAIN':
                    ApplicationLoadBalancer.get_pacbot_domain_url(),
                    'PACBOT_CODE_DIR':
                    Settings.PACBOT_CODE_DIR,
                    'DIST_FILES_UPLOAD_DIR':
                    upload_dir,
                    'LOG_DIR':
                    Settings.LOG_DIR,
                    'S3_BUCKET':
                    BucketStorage.get_output_attr('bucket'),
                    'S3_KEY_PREFIX':
                    Settings.RESOURCE_NAME_PREFIX
                },
                'interpreter': [Settings.PYTHON_INTERPRETER]
            }
        }]

        return local_execs
示例#3
0
class DataCollectorCloudWatchEventTarget(CloudWatchEventTargetResource):
    rule = DataCollectorEventRule.get_output_attr('name')
    arn = SubmitJobLambdaFunction.get_output_attr('arn')
    target_id = 'DataCollectorTarget'  # Unique identifier
    target_input = json.dumps({
        'jobName': "AWS-Data-Collector",
        'jobUuid': "pacman-aws-inventory-jar-with-dependencies",
        'jobType': "jar",
        'jobDesc': "AWS-Data-Collection",
        'environmentVariables': [
            {'name': "REDSHIFT_INFO", 'value': RedshiftCluster.get_redshift_info()},
            {'name': "REDSHIFT_URL", 'value': RedshiftCluster.get_redshift_url()}
        ],
        'params': [
            {'encrypt': False, 'key': "package_hint", 'value': "com.tmobile.cso.pacman"},
            {'encrypt': False, 'key': "accountinfo", 'value': AwsAccount.get_output_attr('account_id')},
            {'encrypt': False, 'key': "base-account", 'value': AwsAccount.get_output_attr('account_id')},
            {'encrypt': False, 'key': "discovery-role", 'value': BaseRole.get_output_attr('name')},
            {'encrypt': False, 'key': "s3", 'value': BucketStorage.get_output_attr('bucket')},
            {'encrypt': False, 'key': "s3-data", 'value': "inventory"},  # TODO: need to be changed with s3obj class
            {'encrypt': False, 'key': "s3-processed", 'value': "backup"},
            {'encrypt': False, 'key': "s3-role", 'value': BaseRole.get_output_attr('name')},
            {'encrypt': False, 'key': "s3-region", 'value': AwsRegion.get_output_attr('name')},
            {'encrypt': False, 'key': "file-path", 'value': "/home/ec2-user/data"},
            {'encrypt': False, 'key': "base-region", 'value': AwsRegion.get_output_attr('name')}
        ]
    })
示例#4
0
class RuleEngineLambdaFunction(LambdaFunctionResource):
    function_name = "ruleengine"
    role = LambdaRole.get_output_attr('arn')
    handler = RULE_ENGINE_JOB_FILE_NAME + ".lambda_handler"
    runtime = "python2.7"
    s3_bucket = BucketStorage.get_output_attr('bucket')
    s3_key = UploadLambdaRuleEngineZipFile.get_output_attr('id')
    environment = {
        'variables': {
            'JOB_QUEUE': RuleEngineJobQueue.get_input_attr('name'),
            'JOB_DEFINITION': SubmitAndRuleEngineJobDefinition.get_input_attr('name')
        }
    }

    DEPENDS_ON = [SubmitAndRuleEngineJobDefinition, RuleEngineJobQueue]
示例#5
0
class SubmitJobLambdaFunction(LambdaFunctionResource):
    function_name = "datacollector"
    role = LambdaRole.get_output_attr('arn')
    handler = BATCH_JOB_FILE_NAME + ".lambda_handler"
    runtime = "python2.7"
    s3_bucket = BucketStorage.get_output_attr('bucket')
    s3_key = UploadLambdaSubmitJobZipFile.get_output_attr('id')
    environment = {
        'variables': {
            'JOB_QUEUE': BatchJobsQueue.get_input_attr('name'),
            'JOB_DEFINITION': SubmitAndRuleEngineJobDefinition.get_input_attr('name')
        }
    }

    DEPENDS_ON = [SubmitAndRuleEngineJobDefinition, BatchJobsQueue]
示例#6
0
    def add_vaues_for_bucket_placeholder(self):
        input_sh_file = os.path.join(self.dest_dir, self.docker_dir,
                                     "entrypoint.sh.tpl")
        output_sh_file = os.path.join(self.dest_dir, self.docker_dir,
                                      "entrypoint.sh")
        write_lines = []

        with open(input_sh_file, "r") as input_file:
            read_lines = input_file.readlines()
            for line in read_lines:
                s3_bukcet_base_path = os.path.join(
                    BucketStorage.get_input_attr('bucket'),
                    Settings.RESOURCE_NAME_PREFIX)
                write_lines.append(
                    line.replace('{{s3-bucket-base-path}}',
                                 s3_bukcet_base_path))

        with open(output_sh_file, "w") as output_file:
            output_file.writelines(write_lines)
    def post_terraform_apply(self):
        archive_type = "zip"
        s3_client = s3.get_s3_client(Settings.AWS_AUTH_CRED)

        zip_file_name = Settings.RESOURCE_NAME_PREFIX + "-terraform-installer-backup"
        zip_file_abs_path = os.path.join(Settings.BASE_APP_DIR, zip_file_name)
        dir_to_archive = Settings.DATA_DIR
        SysLog().write_debug_log("Started Archiving Terraform Directory")
        shutil.make_archive(zip_file_abs_path, archive_type, dir_to_archive)
        SysLog().write_debug_log("Completed Archiving")

        bucket_name = BucketStorage.get_input_attr('bucket')
        zip_file_name = zip_file_name + ".zip"
        zip_file_abs_path = zip_file_abs_path + ".zip"
        SysLog().write_debug_log("Started Uploading Archived Terraform(Zip File: %s) into S3 Bucket(Name: %s)" % (zip_file_abs_path, bucket_name))
        s3_client.upload_file(
            zip_file_abs_path,
            bucket_name,
            zip_file_name)

        os.remove(zip_file_abs_path)
示例#8
0
class UploadLambdaSubmitJobZipFile(S3BucketObject):
    bucket = BucketStorage.get_output_attr('bucket')
    key = Settings.RESOURCE_NAME_PREFIX + "/" + BATCH_JOB_FILE_NAME + ".zip"
    source = os.path.join(get_terraform_scripts_and_files_dir(),
                          BATCH_JOB_FILE_NAME + ".zip")
示例#9
0
 def get_admin_container_env_vars(self):
     return [{
         'name': "JAR_FILE",
         'value': "pacman-api-admin.jar"
     }, {
         'name': "CONFIG_PASSWORD",
         'value': self.CONFIG_PASSWORD
     }, {
         'name': "CONFIG_SERVER_URL",
         'value': self.CONFIG_SERVER_URL
     }, {
         'name': "ES_CLUSTER_NAME",
         'value': self.ES_CLUSTER_NAME
     }, {
         'name': "ES_HEIMDALL_HOST_NAME",
         'value': self.ES_HEIMDALL_HOST_NAME
     }, {
         'name': "ES_HEIMDALL_PORT",
         'value': self.ES_HEIMDALL_PORT
     }, {
         'name': "ES_HOST_NAME",
         'value': self.ES_HOST_NAME
     }, {
         'name': "ES_PORT",
         'value': self.ES_PORT
     }, {
         'name': "LOGGING_ES_HOST_NAME",
         'value': self.LOGGING_ES_HOST_NAME
     }, {
         'name': "LOGGING_ES_PORT",
         'value': self.LOGGING_ES_PORT
     }, {
         'name': "PACMAN_HOST_NAME",
         'value': self.PACMAN_HOST_NAME
     }, {
         'name': "RDS_PASSWORD",
         'value': self.RDS_PASSWORD
     }, {
         'name': "RDS_URL",
         'value': self.RDS_URL
     }, {
         'name': "RDS_USERNAME",
         'value': self.RDS_USERNAME
     }, {
         'name': "ES_UPDATE_HOST",
         'value': self.ES_UPDATE_HOST
     }, {
         'name': "ES_UPDATE_PORT",
         'value': self.ES_UPDATE_PORT
     }, {
         'name': "ES_UPDATE_CLUSTER_NAME",
         'value': self.ES_UPDATE_CLUSTER_NAME
     }, {
         'name': "SECURITY_USERNAME",
         'value': "admin"
     }, {
         'name': "SECURITY_PASSWORD",
         'value': "admin@123"
     }, {
         'name': "ACCESS_KEY",
         'value': "test_key_1"
     }, {
         'name': "SECRET_KEY",
         'value': "test_key_2"
     }, {
         'name': "DOMAIN_URL",
         'value': ApplicationLoadBalancer.get_api_server_url('admin')
     }, {
         'name': "ADMIN_SERVER",
         'value': "http://localhost/pacmonitor"
     }, {
         'name': "ROLE_ARN",
         'value': ECSRole.get_output_attr('arn')
     }, {
         'name':
         "JOB_FUNCTION_NAME",
         'value':
         SubmitJobLambdaFunction.get_input_attr('function_name')
     }, {
         'name': "JOB_FUNCTION_ARN",
         'value': SubmitJobLambdaFunction.get_output_attr('arn')
     }, {
         'name': "JOB_LAMBDA_REGION",
         'value': AwsRegion.get_output_attr('name')
     }, {
         'name': "JOB_BUCKET_REGION",
         'value': AwsRegion.get_output_attr('name')
     }, {
         'name':
         "RULE_FUNCTION_NAME",
         'value':
         RuleEngineLambdaFunction.get_input_attr('function_name')
     }, {
         'name': "RULE_FUNCTION_ARN",
         'value': RuleEngineLambdaFunction.get_output_attr('arn')
     }, {
         'name': "RULE_BUCKET_REGION",
         'value': AwsRegion.get_output_attr('name')
     }, {
         'name': "RULE_LAMBDA_REGION",
         'value': AwsRegion.get_output_attr('name')
     }, {
         'name': "RULE_JOB_BUCKET_NAME",
         'value': BucketStorage.get_output_attr('bucket')
     }]
示例#10
0
    def get_provisioners(self):
        script = os.path.join(get_terraform_scripts_dir(), 'sql_replace_placeholder.py')
        db_user_name = MySQLDatabase.get_input_attr('username')
        db_password = MySQLDatabase.get_input_attr('password')
        db_host = MySQLDatabase.get_output_attr('endpoint')
        local_execs = [
            {
                'local-exec': {
                    'command': script,
                    'environment': {
                        'SQL_FILE_PATH': self.dest_file,
                        'ENV_region': AwsRegion.get_output_attr('name'),
                        'ENV_account': AwsAccount.get_output_attr('account_id'),
                        'ENV_eshost': ESDomain.get_http_url(),
                        'ENV_esport': ESDomain.get_es_port(),
                        'ENV_LOGGING_ES_HOST_NAME': ESDomain.get_output_attr('endpoint'),
                        'ENV_LOGGING_ES_PORT': str(ESDomain.get_es_port()),
                        'ENV_ES_HOST_NAME': ESDomain.get_output_attr('endpoint'),
                        'ENV_ES_PORT': str(ESDomain.get_es_port()),
                        'ENV_ES_CLUSTER_NAME': ESDomain.get_input_attr('domain_name'),
                        'ENV_ES_PORT_ADMIN': str(ESDomain.get_es_port()),
                        'ENV_ES_HEIMDALL_HOST_NAME': ESDomain.get_output_attr('endpoint'),
                        'ENV_ES_HEIMDALL_PORT': str(ESDomain.get_es_port()),
                        'ENV_ES_HEIMDALL_CLUSTER_NAME': ESDomain.get_input_attr('domain_name'),
                        'ENV_ES_HEIMDALL_PORT_ADMIN': str(ESDomain.get_es_port()),
                        'ENV_ES_UPDATE_HOST': ESDomain.get_output_attr('endpoint'),
                        'ENV_ES_UPDATE_PORT': str(ESDomain.get_es_port()),
                        'ENV_ES_UPDATE_CLUSTER_NAME': ESDomain.get_input_attr('domain_name'),
                        'ENV_PACMAN_HOST_NAME': ApplicationLoadBalancer.get_http_url(),
                        'ENV_RDS_URL': MySQLDatabase.get_rds_db_url(),
                        'ENV_RDS_USERNAME': MySQLDatabase.get_input_attr('username'),
                        'ENV_RDS_PASSWORD': MySQLDatabase.get_input_attr('password'),
                        'ENV_JOB_BUCKET_REGION': AwsRegion.get_output_attr('name'),
                        'ENV_RULE_JOB_BUCKET_NAME': BucketStorage.get_output_attr('bucket'),
                        'ENV_JOB_LAMBDA_REGION': AwsRegion.get_output_attr('name'),
                        'ENV_JOB_FUNCTION_NAME': SubmitJobLambdaFunction.get_input_attr('function_name'),
                        'ENV_JOB_FUNCTION_ARN': SubmitJobLambdaFunction.get_output_attr('arn'),
                        'ENV_RULE_BUCKET_REGION': AwsRegion.get_output_attr('name'),
                        'ENV_RULE_JOB_BUCKET_NAME': BucketStorage.get_output_attr('bucket'),
                        'ENV_RULE_LAMBDA_REGION': AwsRegion.get_output_attr('name'),
                        'ENV_RULE_FUNCTION_NAME': RuleEngineLambdaFunction.get_input_attr('function_name'),
                        'ENV_RULE_FUNCTION_ARN': RuleEngineLambdaFunction.get_output_attr('arn'),
                        'ENV_CLOUD_INSIGHTS_TOKEN_URL': "http://localhost",
                        'ENV_CLOUD_INSIGHTS_COST_URL': "http://localhost",
                        'ENV_SVC_CORP_USER_ID': "testid",
                        'ENV_SVC_CORP_PASSWORD': "******",
                        'ENV_CERTIFICATE_FEATURE_ENABLED': "false",
                        'ENV_PATCHING_FEATURE_ENABLED': "false",
                        'ENV_VULNERABILITY_FEATURE_ENABLED': str(Settings.get('ENABLE_VULNERABILITY_FEATURE', False)).lower(),
                        'ENV_MAIL_SERVER': Settings.MAIL_SERVER,
                        'ENV_PACMAN_S3': "pacman-email-templates",
                        'ENV_DATA_IN_DIR': "inventory",
                        'ENV_DATA_BKP_DIR': "backup",
                        'ENV_PAC_ROLE': BaseRole.get_input_attr('name'),
                        'ENV_BASE_REGION': AwsRegion.get_output_attr('name'),
                        'ENV_DATA_IN_S3': BucketStorage.get_output_attr('bucket'),
                        'ENV_BASE_ACCOUNT': AwsAccount.get_output_attr('account_id'),
                        'ENV_PAC_RO_ROLE': BaseRole.get_input_attr('name'),
                        'ENV_MAIL_SERVER_PORT': Settings.MAIL_SERVER_PORT,
                        'ENV_MAIL_PROTOCOL': Settings.MAIL_PROTOCOL,
                        'ENV_MAIL_SERVER_USER': Settings.MAIL_SERVER_USER,
                        'ENV_MAIL_SERVER_PWD': Settings.MAIL_SERVER_PWD,
                        'ENV_MAIL_SMTP_AUTH': Settings.MAIL_SMTP_AUTH,
                        'ENV_MAIL_SMTP_SSL_ENABLE': Settings.MAIL_SMTP_SSL_ENABLE,
                        'ENV_MAIL_SMTP_SSL_TEST_CONNECTION': Settings.MAIL_SMTP_SSL_TEST_CONNECTION,
                        'ENV_PACMAN_LOGIN_USER_NAME': "*****@*****.**",
                        'ENV_PACMAN_LOGIN_PASSWORD': "******",
                        'ENV_CONFIG_CREDENTIALS': "dXNlcjpwYWNtYW4=",
                        'ENV_CONFIG_SERVICE_URL': ApplicationLoadBalancer.get_http_url() + "/api/config/rule/prd/latest",
                        'ENV_PACBOT_AUTOFIX_RESOURCEOWNER_FALLBACK_MAILID': Settings.get('USER_EMAIL_ID', ""),
                        'ENV_QUALYS_INFO': Settings.get('QUALYS_INFO', ""),
                        'ENV_QUALYS_API_URL': Settings.get('QUALYS_API_URL', "")
                    },
                    'interpreter': [Settings.PYTHON_INTERPRETER]
                }
            }
        ]

        return local_execs
示例#11
0
class UploadLambdaRuleEngineZipFile(S3BucketObject):
    bucket = BucketStorage.get_output_attr('bucket')
    key = Settings.RESOURCE_NAME_PREFIX + "/" + RULE_ENGINE_JOB_FILE_NAME + ".zip"
    source = os.path.join(
        get_terraform_scripts_and_files_dir(),
        RULE_ENGINE_JOB_FILE_NAME + ".zip")