def main_setup(args): project = args.project # Project print('project validation') if not project_exists(project): print('The project_id passed does not exists') sys.exit(4) apis_dm_name = '-'.join(['apis', args.project]) if not deployment_exists(project, apis_dm_name): apis_template = os.path.join(helpers.BASE_DIR, 'dm', 'apis.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', apis_dm_name, '--template', apis_template, '--properties', ",".join(['concurrent_api_activation:' + str(False)]), '--project', project ] run_command(cmd) # CF Bucket cf_bucket_name = args.cf_bucket_name or 'bucket-cf-' + args.project print('cloud function bucket creation.') cf_bucket_status = bucket_status(cf_bucket_name) if 'NotFound' == cf_bucket_status: bucket_template = os.path.join(helpers.BASE_DIR, 'dm', 'bucket.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', '-'.join([ 'bucket', project, datetime.utcnow().strftime('%Y%m%d%H%M%S') ]), '--template', bucket_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'bucketname:' + cf_bucket_name, ]), '--project', project ] run_command(cmd) # Application (cloud functions and topics) print('application creation.') infra_dm_name = '-'.join(['infra', args.project]) if not deployment_exists(project, infra_dm_name): zip_and_store_cf('transformer', 'transformer.zip', 'gs://' + cf_bucket_name, args.organization_id, args.api_key) zip_and_store_cf('logger', 'logger.zip', 'gs://' + cf_bucket_name, args.organization_id, args.api_key) infra_template = os.path.join(helpers.BASE_DIR, 'dm', 'infra.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', infra_dm_name, '--template', infra_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'cfbucket:' + cf_bucket_name, ]), '--project', project ] run_command(cmd)
def main_connector(args): # validate need to choose translation mapper if not args.quiet: mapper_file = choose_translation_mapper() update_mapper_file_org_id(args.organization_id, mapper_file) connector_project_id = args.connector_project print('connector - partner bucket creation.') partner_bucket_name = args.connector_bucket partner_bucket_status = bucket_status(partner_bucket_name) if "NotFound" == partner_bucket_status: bucket_template = os.path.join(helpers.BASE_DIR, 'connector', 'dm', 'bucket.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', '-'.join([ 'bucket-for-partner', datetime.utcnow().strftime('%Y%m%d%H%M%S') ]), '--template', bucket_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'bucketname:' + partner_bucket_name, ]), '--project', connector_project_id ] run_command(cmd) print('connector - cloud function bucket creation.') cf_bucket_name = args.cf_bucket cf_bucket_status = bucket_status(cf_bucket_name) if "NotFound" == cf_bucket_status: bucket_template = os.path.join(helpers.BASE_DIR, 'connector', 'dm', 'bucket.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', '-'.join( ['bucket-for-cf', datetime.utcnow().strftime('%Y%m%d%H%M%S')]), '--template', bucket_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'bucketname:' + cf_bucket_name, ]), '--project', connector_project_id ] run_command(cmd) print('connector - connector application creation.') infra_dm_name = 'infra-for-partner' if not deployment_exists(connector_project_id, infra_dm_name): zip_and_store_cf('forwardfilelink', 'forwardfilelink.zip', 'gs://' + cf_bucket_name) zip_and_store_cf('flushbuffer', 'flushbuffer.zip', 'gs://' + cf_bucket_name) zip_and_store_cf('configuration', 'configuration.zip', 'gs://' + cf_bucket_name) zip_and_store_cf('translation', 'translation.zip', 'gs://' + cf_bucket_name, translation_sa=args.connector_sa_file) zip_and_store_cf('cleanup', 'cleanup.zip', 'gs://' + cf_bucket_name) infra_template = os.path.join(helpers.BASE_DIR, 'connector', 'dm', 'writeFindingsConnectorInfra.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', infra_dm_name, '--template', infra_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'cfbucket:' + cf_bucket_name, ]), '--project', connector_project_id ] run_command(cmd) if not gae_exists(connector_project_id): print('Enable Google App Engine.') cmd = [ 'gcloud', 'app', 'create', '--region', args.gae_region, '--project', connector_project_id ] run_command(cmd) if not gae_service_exist(connector_project_id, 'default'): print('Deploy blank GAE app to activate Datastore.') cmd = [ 'gcloud', 'app', 'deploy', os.path.join(helpers.BASE_DIR, 'connector', 'gae_app', 'app.yaml'), '--quiet', '--project', connector_project_id ] run_command(cmd) print('connector - connector application turn on bucket notifications.') if not bucket_notification_exists(partner_bucket_name): cmd = [ 'gsutil', 'notification', 'create', '-e', 'OBJECT_FINALIZE', '-t', 'projects/' + connector_project_id + '/topics/forwardfilelink', '-f', 'json', 'gs://' + partner_bucket_name ] run_command(cmd)
def main_notifier_commands(args): # project project = args.notifier_project notifier_path = helpers.BASE_DIR # bucket logger.step('Creating buckets to store cloud functions...') cf_bucket_name = args.notifier_bucket or bucket_name() cf_bucket_status = bucket_status(cf_bucket_name) if cf_bucket_status == 'NotFound': logger.info('Bucket not found, creating...') cmd = [ 'gsutil', 'mb', '-p', project, '-c', 'REGIONAL', '-l', args.region, 'gs://{}/'.format(cf_bucket_name) ] run_command(cmd) else: logger.warn('Bucket already exists, skipping creation') # cloud function logger.step('Deploying cloud function...') appengine_version = args.notifier_appengine_version function_name = appengine_version + '_notifyDefaultHttp' logger.info('Generating application config file...') generate_config_json(notifier_path, appengine_version) logger.info('Uploading cloud function to bucket...') upload_cf(function_name, notifier_path, cf_bucket_name) logger.info('Deploying cloud function...') cmd = [ 'gcloud', 'functions', 'deploy', function_name, '--project', project, '--source', 'gs://{}/{}.zip'.format(cf_bucket_name, function_name), '--region', args.region, '--trigger-http', '--runtime', 'nodejs6', '--timeout', '180', '--memory', '256MB' ] run_command(cmd) # pubsub logger.step('Creating Pub/Sub topics and subscriptions...') if not args.notifier_skip_pubsub: topic_name = appengine_version + '-test' create_topic(project, topic_name) subscription_name = appengine_version + '-noti-subscription' endpoint_url = 'https://' + appengine_version + '-pubsub-dot-' + project + '.appspot.com/_ah/push-handlers/receive_message' create_subscription(project, subscription_name, endpoint_url, topic_name) else: logger.warn( 'Skipping creation since notifier_skip_pubsub parameter is present' ) # gae logger.step('Deploying GAE application...') logger.info('Testing and Building application projects...') cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'pom.xml'), 'clean', 'install', '-DskipTests=true' ] run_command(cmd) logger.info('Deploying pubsub cron service...') gcloud_path = get_sdk_path() cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'dispatcher', 'pom.xml'), 'appengine:deploy', 'appengine:deployQueue', 'appengine:deployCron', '-DskipTests=true', '-Dapp.deploy.project=' + project, '-Dapp.deploy.promote=False', '-Dapp.deploy.stopPreviousVersion=False', '-Dapp.deploy.version=' + appengine_version + '-pubsub', '-Dnotification.namespace=' + appengine_version, '-Dhash.mechanism=' + args.notifier_hash_mecanism, '-DcloudSdkPath=' + gcloud_path ] run_command(cmd) logger.info('Deploying application...') cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'api', 'pom.xml'), 'exec:java', '-DskipTests', '-DGetOpenApiDoc', '-Dendpoints.service.prefix=' + appengine_version + '-api', '-Dendpoints.project.id=' + project ] run_command(cmd) logger.info('Creating GAE API endpoint...') cmd = [ 'gcloud', 'endpoints', 'services', 'deploy', 'openapi.json', '--project', project ] run_command(cmd) logger.info('Deploying API endpoint...') cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'api', 'pom.xml'), 'appengine:deploy', '-DskipTests=true', '-Dapp.deploy.project=' + project, '-Dapp.deploy.promote=False', '-Dapp.deploy.stopPreviousVersion=False', '-Dapp.deploy.version=' + appengine_version + '-api', '-Dnotification.namespace=' + appengine_version, '-DcloudSdkPath=' + gcloud_path, '-Dendpoints.service.prefix=' + appengine_version + '-api', '-Dendpoints.project.id=' + project ] run_command(cmd)
def main_notifier(args): notifier_path = helpers.BASE_DIR project = args.notifier_project appengine_version = args.notifier_appengine_version print('notifier - cloud function bucket creation.') cf_bucket = args.notifier_bucket or bucket_name() cf_bucket_status = bucket_status(cf_bucket) if cf_bucket_status == 'NotFound': bucket_template = os.path.join(helpers.BASE_DIR, 'notifier', 'dm', 'bucket.py') cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', '-'.join(['bucket-cf', project]), '--template', bucket_template, '--properties', ",".join([ 'region:' + scape_to_os(args.region), 'bucketname:' + cf_bucket, ]), '--project', project ] run_command(cmd) print('notifier - notifier application creation.') infra_dm_name = '-'.join(['infra', project]) if not deployment_exists(project, infra_dm_name): # Configure CF function_name = appengine_version + '_notifyDefaultHttp' generate_config_json(notifier_path, appengine_version) upload_cf(function_name, notifier_path, cf_bucket) # Infra deploy infra_template = os.path.join(helpers.BASE_DIR, 'notifier', 'dm', 'notifierInfra.py') endpoint_url = 'https://' + appengine_version + '-pubsub-dot-' + project + '.appspot.com/_ah/push-handlers/receive_message' cmd = [ 'gcloud', 'deployment-manager', 'deployments', 'create', infra_dm_name, '--template', infra_template, '--properties', ",".join([ 'topic_name:' + appengine_version + '-test', 'subscriber_name:' + appengine_version + '-noti-subscription', 'endpoint_url:' + endpoint_url, 'function_name:' + function_name, 'cf_bucket:' + cf_bucket, 'region:' + scape_to_os(args.region), 'skip_pubsub:' + str(args.notifier_skip_pubsub) ]), '--project', project ] run_command(cmd) # Build and run tests of every project cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'pom.xml'), 'clean', 'install', '-DskipTests=true' ] run_command(cmd) # Pubsub deploy print('notifier - deploy pubsub service') gcloud_path = get_sdk_path() cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'dispatcher', 'pom.xml'), 'appengine:deploy', 'appengine:deployQueue', 'appengine:deployCron', '-DskipTests=true', '-Dapp.deploy.project=' + project, '-Dapp.deploy.promote=False', '-Dapp.deploy.stopPreviousVersion=False', '-Dapp.deploy.version=' + appengine_version + '-pubsub', '-Dnotification.namespace=' + appengine_version, '-Dhash.mechanism=' + args.notifier_hash_mecanism, '-DcloudSdkPath=' + gcloud_path ] run_command(cmd) # Api deploy cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'api', 'pom.xml'), 'exec:java', '-DskipTests', '-DGetOpenApiDoc', '-Dendpoints.service.prefix=' + appengine_version + '-api', '-Dendpoints.project.id=' + project ] run_command(cmd) print('notifier - deploy api endpoints') cmd = [ 'gcloud', 'endpoints', 'services', 'deploy', 'openapi.json', '--project', project ] run_command(cmd) cmd = [ 'mvn', '-f', os.path.join(notifier_path, 'notifier', 'app', 'api', 'pom.xml'), 'appengine:deploy', '-DskipTests=true', '-Dapp.deploy.project=' + project, '-Dapp.deploy.promote=False', '-Dapp.deploy.stopPreviousVersion=False', '-Dapp.deploy.version=' + appengine_version + '-api', '-Dnotification.namespace=' + appengine_version, '-DcloudSdkPath=' + gcloud_path, '-Dendpoints.service.prefix=' + appengine_version + '-api', '-Dendpoints.project.id=' + project ] run_command(cmd)
def main_setup_commands(args): # project project = args.project # bucket logger.step('Creating buckets to store cloud functions...') cf_bucket_name = args.cf_bucket_name or 'bucket-cf-' + args.project cf_bucket_status = bucket_status(cf_bucket_name) if cf_bucket_status == 'NotFound': logger.info('Bucket not found, creating...') cmd = [ 'gsutil', 'mb', '-p', project, '-c', 'REGIONAL', '-l', args.region, 'gs://{}/'.format(cf_bucket_name) ] run_command(cmd) else: logger.warn('Bucket already exists, skipping creation') # pub/sub logger.step('Creating Pub/Sub topic...') if not topic_exists(project, 'entrypoint'): logger.info('Creating entrypoint topic') cmd = [ 'gcloud', 'pubsub', 'topics', 'create', 'entrypoint', '--project', project ] run_command(cmd) else: logger.warn('entrypoint topic already exists, skipping creation') if not topic_exists(project, 'redirect'): logger.info('Creating redirect topic') cmd = [ 'gcloud', 'pubsub', 'topics', 'create', 'redirect', '--project', project ] run_command(cmd) else: logger.warn('redirect topic already exists, skipping creation') # cloud functions logger.step('Deploying Cloud Function...') logger.info('Creating zip and uploading transformer function...') zip_and_store_cf('transformer', 'transformer.zip', 'gs://{}'.format(cf_bucket_name), args.organization_id, args.api_key) logger.info('Deploying transformer Cloud Function...') cmd = [ 'gcloud', 'functions', 'deploy', 'transformer', '--project', project, '--source', 'gs://{}/{}.zip'.format(cf_bucket_name, 'transformer'), '--entry-point', 'transform', '--trigger-topic', 'entrypoint', '--region', args.region, '--timeout', '180', '--runtime', 'nodejs6', '--retry' ] run_command(cmd) logger.info('Creating zip and uploading logger function...') zip_and_store_cf('logger', 'logger.zip', 'gs://{}'.format(cf_bucket_name), args.organization_id, args.api_key) logger.info('Deploying logger Cloud Function...') cmd = [ 'gcloud', 'functions', 'deploy', 'logger', '--project', project, '--source', 'gs://{}/{}.zip'.format(cf_bucket_name, 'logger'), '--entry-point', 'log', '--trigger-topic', 'redirect', '--region', args.region, '--timeout', '180', '--runtime', 'nodejs6', '--retry' ] run_command(cmd)