def scp_connect(dest_name, dest_uri, dest_client): ###################################################################### ############### Step 1: Read the environment variables ############### ###################################################################### env = AppEnv() uaa_service = env.get_service(name='uaa_service') dest_service = env.get_service(name='destination_service') sUaaCredentials = dest_service.credentials[ "clientid"] + ':' + dest_service.credentials["clientsecret"] ####################################################################### ##### Step 2: Request a JWT token to access the destination service ### ####################################################################### headers = { 'Authorization': 'Basic ' + base64.b64encode(sUaaCredentials), 'content-type': 'application/x-www-form-urlencoded' } form = [('client_id', dest_service.credentials["clientid"]), ('grant_type', 'client_credentials')] r = requests.post(uaa_service.credentials["url"] + '/oauth/token', data=form, headers=headers) ####################################################################### ###### Step 3: Search your destination in the destination service ##### ####################################################################### token = r.json()["access_token"] headers = {'Authorization': 'Bearer ' + token} r = requests.get(dest_service.credentials["uri"] + '/destination-configuration/v1/destinations/' + dest_name, headers=headers) ####################################################################### ############### Step 4: Access the destination securely ############### ####################################################################### destination = r.json() token = destination["authTokens"][0] headers = { 'Authorization': token["type"] + ' ' + token["value"], 'Accept': 'application/json' } if dest_client: dest_client = '?sap-client=' + dest_client else: #Read sap-client from Destinations configuration dest_client = '?sap-client=' + destination["destinationConfiguration"][ "sap-client"] r = requests.get(destination["destinationConfiguration"]["URL"] + dest_uri + dest_client, headers=headers) return r
def cloudFoundryfyConfig(config: FlaskConfig): """ Optionale Anpassung der Flask-Konfiguration mit CF-Umgebung. """ cfenv = AppEnv() if len(cfenv.app) > 0: logger.info("app %s %d services: %s", cfenv.name, len(cfenv.services), cfenv.app) for service in cfenv.services: logger.info("bound service '%s': %s", service.name, service.env) {% if cookiecutter.use_reldb.startswith('y') -%} vcapdb = cfenv.get_service(label='p-mysql') if vcapdb: logger.info("%s", vcapdb) config['SQLALCHEMY_DATABASE_URI'] = 'mysql://{username}:{password}@{hostname}:{port}/{name}'.format( **vcapdb.credentials) logger.info("MySQL Service %s konfiguriert", vcapdb.credentials['hostname']) elif 'SQLALCHEMY_DATABASE_URI' not in config: logger.critical("Kein Datenbank-Service gebunden!") {%- endif -%} else: cfenv = None return cfenv
def get_connection_string(): env = AppEnv() mssql = env.get_service(name='mssql-service') server = 'tcp:' + mssql.credentials['server'] database = mssql.credentials['database'] username = mssql.credentials['username'] password = mssql.credentials['password'] return 'DRIVER={ODBC Driver 17 for SQL Server};SERVER=' + server + ';DATABASE=' + database + ';UID=' + username + ';PWD=' + password
def credhub_secret(key): ''' Read the VCAP_SERVICES env variable & extract the "demo-certificate" value :return: parsed credhub value as a dict ''' cf_env = AppEnv() credhub_env = cf_env.get_service(label="credhub").get_url(key) return credhub_env
from cfenv import AppEnv from hdbcli import dbapi from cf_logging import flask_logging from sap import xssec from flask import * from flask_socketio import SocketIO from flask_socketio import send, emit, Namespace #create instance of flask app app = Flask(__name__) socketio = SocketIO(app) app_port = int(os.environ.get('PORT', 3000)) #connection with services env = AppEnv() hana = env.get_service(name='spatial-db') uaa_service = env.get_service(name='myuaa').credentials #logging flask_logging.init(app, logging.INFO) logger = logging.getLogger('route.logger') #used to establish connection with HANA DB def connectDB(serviceName): service = env.get_service(name=serviceName) conn = dbapi.connect(address=service.credentials['host'], port= int(service.credentials['port']), user = service.credentials['user'], password = service.credentials['password'], CURRENTSCHEMA=service.credentials['schema']) return conn
from cfenv import AppEnv import os import requests import base64 import json #create an app using flask lib and also get the port info for later use app = Flask(__name__) app.config["DEBUG"] = True cf_port = os.getenv("PORT") ###################################################################### ############### Step 1: Read the environment variables ############### ###################################################################### env = AppEnv() #read all the xsuaa service key values from the env variables uaa_service = env.get_service(name='xsuaa-demo') #read all the connectivity service key values from the env variables conn_service = env.get_service(name='connectivity-demo-lite') # read the client ID and secret for the connectivity service conn_sUaaCredentials = conn_service.credentials[ "clientid"] + ':' + conn_service.credentials["clientsecret"] # read the On premise proxy host and on premise proxy port for the connectivity service proxy_url = conn_service.credentials[ "onpremise_proxy_host"] + ':' + conn_service.credentials[ "onpremise_proxy_port"] ###################################################################### ##### Step 2: Request a JWT token to access the connectivity service## ###################################################################### #create authorization with basic authentication using connectivity credentials as base64 format headers = { 'Authorization':
import os import re import cv2 import numpy as np import imutils from cfenv import AppEnv env = AppEnv() statusService = env.get_service(name='status-topic') if statusService is None: statusKafka = "localhost:9092" statusTopic = "opencv-kafka-demo-status" else: statusKafka = statusService.credentials.get("hostname") statusTopic = statusService.credentials.get("topicName") inImagesService = env.get_service(name=re.compile('raw')) if inImagesService is None: if not os.environ.get('DEVICE_ID'): print("Must provide $DEVICE_ID when not running on Cloud Foundry") exit(1) inImagesKafka = "localhost:9092" inImagesTopic = "opencv-kafka-demo-raw-" + os.environ['DEVICE_ID'] else: inImagesKafka = inImagesService.credentials.get("hostname") inImagesTopic = inImagesService.credentials.get("topicName") print("Found inbound Cloud Foundry service", inImagesService.name) outImagesService = env.get_service(name=re.compile('edgedetector'))
from sap import xssec from cfenv import AppEnv from hdbcli import dbapi import statistics as s from flask import request from flask import abort from sap.cf_logging import flask_logging app = Flask(__name__) env = AppEnv() flask_logging.init(app, logging.INFO) port = int(os.environ.get('PORT', 3000)) hana = env.get_service(label='hana') uaa_service = env.get_service(label='xsuaa').credentials @app.route('/') def domath(): logger = logging.getLogger('route.logger') logger.info('Someone accessed us') average = 0 jokecount = 0 if 'authorization' not in request.headers: abort(403) access_token = request.headers.get('authorization')[7:] security_context = xssec.create_security_context(access_token, uaa_service)
}, 'DAP': { 'AGENCY': 'DOJ', 'SUBAGENCY': 'ATF', }, } DATABASES = {'default': dj_database_url.config()} env = AppEnv() HTTP_AUTH_USER = env.get_credential('HTTP_AUTH_USER') HTTP_AUTH_PASSWORD = env.get_credential('HTTP_AUTH_PASSWORD') ALLOWED_HOSTS = ['localhost'] + env.uris # Service name may well change in the future. Fuzzy match elastic_service = env.get_service(name=re.compile('search')) if elastic_service: HAYSTACK_CONNECTIONS['default'] = { 'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine', 'URL': elastic_service.credentials['uri'], 'INDEX_NAME': 'eregs', } try: from local_settings import * # noqa except ImportError: pass
import os import urllib from cfenv import AppEnv rabbit_env = "" if os.getenv("PROFILE", "CLOUD") == "LOCAL": # Running in a local or dev environment rabbit_env = "amqp://{}:{}@{}:{}/{}?heartbeat=30".format( os.getenv("RABBITMQ_USERNAME", "user"), os.getenv("RABBITMQ_PASSWORD", "bitnami"), os.getenv("RABBITMQ_SERVER", "localhost"), os.getenv("RABBITMQ_PORT", "5672"), urllib.parse.quote_plus(os.getenv("RABBITMQ_VHOST", "/")), ) else: cfenv = AppEnv() rabbit_env = cfenv.get_service(name="rabbitmq-service").get_url("uri") rabbit_env = rabbit_env + "?heartbeat=30" if __name__ == "main": cfenv.name # 'test-app' cfenv.port # 5000 # rabbitmq = cfenv.get_service(label='rabbitmq') # rabbitmq.credentials # {'uri': '...', 'password': '******'} # rabbitmq.get_url(host='hostname', password='******', port='port') # redis://pass:host
import os from cfenv import AppEnv env = AppEnv() with open(os.path.dirname(os.path.realpath(__file__)) + "/development.ini", "a") as config: print env.uris config.write('\n') config.write('ckan.site_url = https://' + env.uris[0] + '\n') postgres = env.get_service(label='postgres') config.write("sqlalchemy.url = " + postgres.credentials['uri'] + '\n') # config.write( "ckan.datastore.write_url = "+postgres.credentials['uri']) # echo 'ckan.datastore.write_url = $DATABASE_URL' > development.ini # ckan.datastore.read_url redis = env.get_service(label='redis32').credentials redis_url = "redis://:{}@{}:{}/0".format(redis['password'], redis['host'], redis['port']) config.write('ckan.redis.url = ' + redis_url + '\n') config.write('\n')
def __init__(self, storeName = 'TestStoreName'): env = AppEnv() port = int(os.getenv("PORT", 9099)) hana = env.get_service(label='hana')
are identical once we have a cursor. ''' if env.name is None: DB_PATH = os.path.expanduser("~/.KarmaBoi/databases/") DB_NAME = 'karmadb' PEOPLE_TABLE = ''' CREATE TABLE IF NOT EXISTS people(id SERIAL PRIMARY KEY, name TEXT, karma INTEGER, shame INTEGER);''' ALSO_TABLE = ''' CREATE TABLE IF NOT EXISTS isalso(id SERIAL PRIMARY KEY, name TEXT, also TEXT); ''' else: try: db_env = env.get_service( label=SERVICE_LABLE) # probably can bind any db by adjusting lable db_creds = db_env.credentials db_uri = db_creds.get('uri') except Exception as e: logger.critical( 'not able to generate db_env - ensure db is bound and lable is correct' ) logger.exception(e) raise PEOPLE_TABLE = ''' CREATE TABLE IF NOT EXISTS people(id SERIAL PRIMARY KEY, name TEXT, karma INTEGER, shame INTEGER); ''' # currently, specific to postgres ALSO_TABLE = ''' CREATE TABLE IF NOT EXISTS isalso(id SERIAL PRIMARY KEY, name TEXT, also TEXT);
'GTM_SITE_ID': '', 'GA_SITE_ID': 'UA-48605964-38', }, 'DAP': { 'AGENCY': 'DOJ', 'SUBAGENCY': 'ATF', }, } env = AppEnv() HTTP_AUTH_USER = env.get_credential('HTTP_AUTH_USER') HTTP_AUTH_PASSWORD = env.get_credential('HTTP_AUTH_PASSWORD') ALLOWED_HOSTS = ['localhost'] + env.uris # Service name may well change in the future. Fuzzy match elastic_service = env.get_service(name=re.compile('search')) if elastic_service: HAYSTACK_CONNECTIONS['default'] = { 'ENGINE': ('haystack.backends.elasticsearch_backend.' 'ElasticsearchSearchEngine'), 'URL': elastic_service.credentials['uri'], 'INDEX_NAME': 'eregs', } try: from local_settings import * # noqa except ImportError: pass
#!/usr/bin/env python from importlib import import_module import os from cfenv import AppEnv env = AppEnv() statusService = env.get_service(name='status-topic') if statusService is None: statusKafka = "localhost:9092" statusTopic = "opencv-kafka-demo-status" else: statusKafka = statusService.credentials.get("hostname") statusTopic = statusService.credentials.get("topicName") import json from kafka import KafkaProducer statusProducer = KafkaProducer( value_serializer=lambda v: json.dumps(v).encode('utf-8'), bootstrap_servers=statusKafka) statusProducer.send(statusTopic, { "status": "starting", "client": "imagesfromopencv", "language": "python" }) imagesService = env.get_service(name='raw-images-topic') if imagesService is None: imagesKafka = "localhost:9092" imagesTopic = "opencv-kafka-demo-raw-images" else:
'class': 'logging.StreamHandler', 'stream': 'ext://sys.stdout', 'formatter': 'default' } }, 'root': { 'level': 'INFO', 'handlers': ['wsgi'] } }) assert 'VCAP_SERVICES' in os.environ, "no VCAP_SERVICES environment variable set" app = Flask(__name__) env = AppEnv() vcap = env.get_service(label='aws-sqs-queue') AWS_REGION = vcap.credentials['aws_region'] AWS_ACCESS_KEY_ID = vcap.credentials['aws_access_key_id'] AWS_SECRET_ACCESS_KEY = vcap.credentials['aws_secret_access_key'] AWS_PRIMARY_QUEUE = vcap.credentials['primary_queue_url'] AWS_SECONDARY_QUEUE = vcap.credentials['secondary_queue_url'] sqs = boto3.client('sqs', region_name=AWS_REGION, aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY) @app.route("/") def index(): app.logger.info("Index")
from flask import Flask, render_template, request, redirect from werkzeug.utils import secure_filename import os import boto3 from cfenv import AppEnv env = AppEnv() print env.name print env.port s3bucket = env.get_service(name='s3bucket') print s3bucket.credentials app = Flask(__name__) #app.config.from_object("config.config") #from .helpers import * ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif']) @app.route("/") def index(): # print "S3_BUCKET: " + app.config["S3_BUCKET"] # print "S3_ACCESS_KEY: " + app.config["S3_ACCESS_KEY"] # print "S3_SECRET_KEY: " + app.config["S3_SECRET_KEY"] # print "S3_LOCATION: " + app.config["S3_LOCATION"] # print s3 return render_template("index.html")
from .base import * import os import re import dj_database_url from cfenv import AppEnv database_url = os.getenv("DATABASE_URL") env = AppEnv() cf_foia_db = env.get_service(name=re.compile('foia-db')) if cf_foia_db: database_url = cf_foia_db.credentials['uri'] # See env.example for an explanation of these settings. SECRET_KEY = os.getenv("FOIA_SECRET_SESSION_KEY") DATABASES = {'default': dj_database_url.parse(database_url)} SHOW_WEBFORM = (os.getenv("FOIA_SHOW_WEBFORM") == "true") ANALYTICS_ID = os.getenv("FOIA_ANALYTICS_ID")
from cfenv import AppEnv from flask import request, abort from sap import xssec from . import main env = AppEnv() uaa_service = env.get_service( name="{{ cookiecutter.uaa_service_instance_name }}" ).credentials @main.route("/", methods=["GET"]) def main_route(): # check if authorization information is provided if "authorization" not in request.headers: abort(403) # check if user is authorized access_token = request.headers.get("authorization")[7:] security_context = xssec.create_security_context(access_token, uaa_service) is_authorized = security_context.check_scope("openid") is_authorized = security_context.check_scope("$XSAPPNAME.user") if not is_authorized: abort(403) return security_context._properties
# Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.10/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.environ.get('TMPDIR', '.') + '/static/' # File storage DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html # In addition to using django-storages, we're maintaining parity between the # local docker-compose environment and the cloud.gov environments; see the # environment section of the prod-api service in docker-compose.yml for where # these values are coming from for local development. s3service = env.get_service(label="s3") AWS_ACCESS_KEY_ID = s3service.credentials.get("access_key_id") AWS_SECRET_ACCESS_KEY = s3service.credentials.get("secret_access_key") AWS_STORAGE_BUCKET_NAME = s3service.credentials.get("bucket") AWS_AUTO_CREATE_BUCKET = True if "endpoint" in s3service.credentials: # For local development, we must override the endpoint, and region is # irrelevant. AWS_S3_ENDPOINT_URL = s3service.credentials["endpoint"] else: # On cloud.gov, we need region and we want django-storages to infer the # correct URL for us rather than setting an endpoint ourselves. AWS_S3_REGION_NAME = s3service.credentials["region"] AWS_S3_OBJECT_PARAMETERS = { 'ContentDisposition': 'attachment', # Browsers should download files }
#from sap.cf_logging import flask_logging # #https://help.sap.com/viewer/0eec0d68141541d1b07893a39944924e/2.0.03/en-US/d12c86af7cb442d1b9f8520e2aba7758.html from hdbcli import dbapi app = Flask(__name__) env = AppEnv() # Get port from environment variable or choose 9099 as local default # If you are testing locally (i.e. not with xs or cf deployments, # Be sure to pull all the python modules locally # with pip using XS_PYTHON unzipped to /tmp # mkdir -p local # pip install -t local -r requirements.txt -f /tmp port = int(os.getenv("PORT", 9099)) hanass = env.get_service(name='HEADLESS_SS') ss_conn = "" chrusr = "" chrpwd = "" loggedin = False def attach(port, host): try: import pydevd pydevd.stoptrace() #I.e.: disconnect if already connected # pydevd.DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = True # pydevd.DebugInfoHolder.DEBUG_TRACE_BREAKPOINTS = 3 # pydevd.DebugInfoHolder.DEBUG_TRACE_LEVEL = 3 pydevd.settrace( port=port,
from pymongo import MongoClient from cfenv import AppEnv #TODO: Make this more robust and/or dynamic for other DB types """Try and pull the CF environment variables for mlab""" try: env = AppEnv() mlab = env.get_service(label='mlab') conn_str = mlab.credentials['uri'] except: pass def get_db(): """Return DB connection information. When running on CF, return the CF details, otherwise return local DB connection for testing""" #TODO: Remove local testing for production environment try: db = MongoClient(conn_str + '?retryWrites=false').get_database() except: client = MongoClient('localhost', 27017) db = client.location #TODO: Refactor to accept dynamic DB locations. Maybe a class to abstract # getting DB tables, etc... return db.locations
CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache', 'LOCATION': os.getenv('TMPDIR', '/tmp'), 'TIMEOUT': 1440, 'OPTIONS': { 'MAX_ENTRIES': 1000 }, } } AWS_STORAGE_BUCKET_NAME = os.getenv('FOIA_S3_STATIC_BUCKET_NAME') AWS_REGION = '' env = AppEnv() cf_s3_bucket = env.get_service(name=re.compile('foia-public-bucket')) if cf_s3_bucket: AWS_STORAGE_BUCKET_NAME = cf_s3_bucket.credentials['bucket'] AWS_REGION = '-%s' % cf_s3_bucket.credentials['region'] AWS_S3_REGION_NAME = cf_s3_bucket.credentials['region'] AWS_ACCESS_KEY_ID = cf_s3_bucket.credentials['access_key_id'] AWS_SECRET_ACCESS_KEY = cf_s3_bucket.credentials['secret_access_key'] AWS_S3_CUSTOM_DOMAIN = 's3%s.amazonaws.com/%s' % (AWS_REGION, AWS_STORAGE_BUCKET_NAME) STATIC_URL = 'https://%s/' % AWS_S3_CUSTOM_DOMAIN STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' # Don't add complex authentication related query parameters for requests AWS_QUERYSTRING_AUTH = False
import os from cfenv import AppEnv env = AppEnv() with open( os.path.dirname(os.path.realpath(__file__)) + "/development.ini", "a") as config: print env.uris config.write('\n') config.write('ckan.site_url = https://' + env.uris[0] + '\n') postgres = env.get_service(label='postgres') config.write("sqlalchemy.url = " + postgres.credentials['uri'] + '\n') # config.write( "ckan.datastore.write_url = "+postgres.credentials['uri']) # echo 'ckan.datastore.write_url = $DATABASE_URL' > development.ini # ckan.datastore.read_url redis = env.get_service(label='redis32').credentials redis_url = "redis://:{}@{}:{}/0".format(redis['password'], redis['host'], redis['port']) config.write('ckan.redis.url = ' + redis_url + '\n') config.write('\n')
import os from flask import Flask from database.db import initialize_db from database.s3 import initialize_s3 from flask_restful import Api from resources.routes import initialize_routes from cfenv import AppEnv app = Flask(__name__) api = Api(app) env = AppEnv() mongodb = env.get_service(label='mongodbent') s3_service = env.get_service(label='dynstrg') s3_creds['accessHost'] = 'https://{}'.format( s3_service.credentials['accessHost']) db_uri = mongodb.credentials[ "database_uri"] if mongodb is not None else 'mongodb://localhost/versusvirus' app.config['MONGODB_SETTINGS'] = {'host': db_uri} app.config['S3'] = s3_creds port = int(os.getenv('PORT', '3000')) initialize_db(app) initialize_s3(app) initialize_routes(api) app.run(host='0.0.0.0', port=port)
# app server from flask import Flask app = Flask(__name__) import os port = int(os.environ.get('PORT', 3000)) # Cloud Foundry from cfenv import AppEnv env = AppEnv() hana_service = 'hana' hana = env.get_service(label=hana_service) # SAP HANA Client from hdbcli import dbapi # SQL statement sql = "select * from SYS.M_DATABASE" # home page @app.route('/') def hello(): return "Hello World" @app.route('/hana') def db(): if hana is None:
import logging from flask import Flask from cfenv import AppEnv from flask import request from flask import abort from sap import xssec from hdbcli import dbapi from cf_logging import flask_logging app = Flask(__name__) env = AppEnv() flask_logging.init(app, logging.INFO) port = int(os.environ.get('PORT', 3000)) hana = env.get_service(label='hana') uaa_service = env.get_service(name='myuaa').credentials @app.route('/') def hello(): logger = logging.getLogger('route.logger') logger.info('Someone accessed us') if 'authorization' not in request.headers: abort(403) access_token = request.headers.get('authorization')[7:] security_context = xssec.create_security_context(access_token, uaa_service) isAuthorized = security_context.check_scope('openid') if not isAuthorized: abort(403)
""" WSGI config for ticketer project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/ """ from cfenv import AppEnv import newrelic.agent import os from django.core.wsgi import get_wsgi_application env = AppEnv() # Initialize New Relic monitoring if on Cloud Foundry ekip_creds = env.get_service(name='ekip-newrelic') if ekip_creds is not None: new_relic_license = ekip_creds.credentials['NEW_RELIC_LICENSE_KEY'] new_relic_app_name = os.environ.get('NEW_RELIC_APP_NAME') if new_relic_license and new_relic_app_name: new_relic_settings = newrelic.agent.global_settings() new_relic_settings.license_key = new_relic_license new_relic_settings.app_name = new_relic_app_name print('Initializing New Relic monitoring') newrelic.agent.initialize() os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") application = get_wsgi_application()
"django.contrib.auth.context_processors.auth", ], }, }, ] WSGI_APPLICATION = "idemia.wsgi.application" # The VCAP_APPLICATION environment variable is set by cloud.gov and # populated with service information needed to connect to the database. VCAP_ENV_VAR = "VCAP_APPLICATION" if VCAP_ENV_VAR in os.environ: # Deployment to Cloud.gov -- Set DB to RDS ENV = AppEnv() RDS_VARS = ENV.get_service(label="aws-rds") DB_INFO = RDS_VARS.credentials DB_DICT = { "ENGINE": "django.db.backends.postgresql", "NAME": DB_INFO["db_name"], "USER": DB_INFO["username"], "PASSWORD": DB_INFO["password"], "HOST": DB_INFO["host"], "PORT": DB_INFO["port"], } else: # Local development -- use local DB info # See README for setting up postgres container DB_DICT = { "ENGINE": "django.db.backends.postgresql",
DATABASES = {'default': dj_database_url.config()} vcap_app = json.loads(os.environ.get('VCAP_APPLICATION', '{}')) ALLOWED_HOSTS = ['localhost'] + vcap_app.get('application_uris', []) vcap_services = json.loads(os.environ.get('VCAP_SERVICES', '{}')) es_config = vcap_services.get('elasticsearch-swarm-1.7.1', []) if es_config: HAYSTACK_CONNECTIONS['default'] = { 'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine', 'URL': es_config[0]['credentials']['uri'], 'INDEX_NAME': 'eregs', } redis = env.get_service(label='redis28-swarm') if redis: # safe because: the password str is telling get_url what key to use when # looking up a value url = redis.get_url( # nosec host='hostname', password='******', port='port') BROKER_URL = 'redis://{}'.format(url) CACHES['regs_gov_cache']['LOCATION'] = BROKER_URL CACHES['regs_gov_cache']['BACKEND'] = 'django_redis.cache.RedisCache' s3 = env.get_service(label='s3') if s3: ATTACHMENT_ACCESS_KEY_ID = s3.credentials.get('access_key_id') ATTACHMENT_SECRET_ACCESS_KEY = s3.credentials.get('secret_access_key') ATTACHMENT_BUCKET = s3.credentials.get('bucket')
} vcap_app = json.loads(os.environ.get('VCAP_APPLICATION', '{}')) ALLOWED_HOSTS = ['localhost'] + vcap_app.get('application_uris', []) vcap_services = json.loads(os.environ.get('VCAP_SERVICES', '{}')) es_config = vcap_services.get('elasticsearch-swarm-1.7.1', []) if es_config: HAYSTACK_CONNECTIONS['default'] = { 'ENGINE': 'haystack.backends.elasticsearch_backend.ElasticsearchSearchEngine', 'URL': es_config[0]['credentials']['uri'], 'INDEX_NAME': 'eregs', } redis = env.get_service(label='redis28-swarm') if redis: # safe because: the password str is telling get_url what key to use when # looking up a value url = redis.get_url( # nosec host='hostname', password='******', port='port') BROKER_URL = 'redis://{}'.format(url) CACHES['regs_gov_cache']['LOCATION'] = BROKER_URL CACHES['regs_gov_cache']['BACKEND'] = 'django_redis.cache.RedisCache' s3 = env.get_service(label='s3') if s3: ATTACHMENT_ACCESS_KEY_ID = s3.credentials.get('access_key_id') ATTACHMENT_SECRET_ACCESS_KEY = s3.credentials.get('secret_access_key') ATTACHMENT_BUCKET = s3.credentials.get('bucket')
import json import requests import flask from flask import Flask, globals, Response, request, g from bert import run_classifier, tokenization, optimization import numpy as np from sapjwt import jwtValidation from sap import xssec from functools import wraps from cfenv import AppEnv app = Flask(__name__) env = AppEnv() uaaCredentials = env.get_service(label='xsuaa').credentials MAX_SEQ_LENGTH = 128 LABELS_LIST = str(os.getenv('LABELS', '')).replace(" ", "").split(",") VOCAB_FILE_PATH = str(os.getenv('VOCAB_FILE_PATH', '')) MODEL_NAME = str(os.getenv('MODEL_NAME', '')) MODEL_SERVER_HOST = str(os.getenv('MODEL_SERVER_HOST', '')) MODEL_SERVER_PORT = int(os.getenv('MODEL_SERVER_PORT', '')) ROOT_CERT = str(os.getenv('ROOT_CERT', '')).replace('\\n', '\n') @app.before_request def before_request(): g._uaaCredentials = uaaCredentials
from cfenv import AppEnv import os env = AppEnv() kavskdb = env.get_service(label='aws-rds-postgresql') basedir = os.path.abspath(os.path.dirname(__file__)) SQLALCHEMY_ECHO = False SQLALCHEMY_TRACK_MODIFICATIONS = True SQLALCHEMY_DATABASE_URI = kavskdb.credentials['uri']
#!/usr/bin/env python from importlib import import_module import os from flask import Flask, render_template, Response app = Flask(__name__) from cfenv import AppEnv env = AppEnv() statusService = env.get_service(name='status-topic') if statusService is None: statusKafka = "localhost:9092" statusTopic = "opencv-kafka-demo-status" else: statusKafka = statusService.credentials.get("hostname") statusTopic = statusService.credentials.get("topicName") import json from kafka import KafkaProducer statusProducer = KafkaProducer( value_serializer=lambda v: json.dumps(v).encode('utf-8'), bootstrap_servers=statusKafka) statusProducer.send(statusTopic, { "status": "starting", "client": "imagewatcher", "language": "python" }) # import camera driver if os.environ.get('CAMERA'):
from hdbcli import dbapi import platform app = Flask(__name__) env = AppEnv() # Get port from environment variable or choose 9099 as local default # If you are testing locally (i.e. not with xs or cf deployments, # Be sure to pull all the python modules locally # with pip using XS_PYTHON unzipped to /tmp # mkdir -p local # pip install -t local -r requirements.txt -f /tmp port = int(os.getenv("PORT", 9099)) hana = env.get_service(label='hana') # This module's Flask webserver will respond to these three routes (URL paths) # If there is no path then just return Hello World and this module's instance number # Requests passed through the app-router will never hit this route. @app.route('/') def hello_world(): output = '<strong>Hello World! I am instance ' + str( os.getenv("CF_INSTANCE_INDEX", 0)) + '</strong> Try these links.</br>\n' output += '<a href="/env">/env</a><br />\n' return output
from .base import * DEBUG = False TEMPLATE_DEBUG = False ALLOWED_HOSTS = [ '.everykidinapark.gov', # Allow domain and subdomains 'kids-prod.18f.gov', 'ekip-prod.app.cloud.gov', # Internal URL for production instance 'kids.18f.gov', 'ekip-staging.app.cloud.gov', # Allow staging URL 'kids-dev.18f.gov', 'ekip-dev.app.cloud.gov' # Allow development URL ] database_url = os.getenv('DATABASE_URL') env = AppEnv() cf_db = env.get_service(name=re.compile('ekip-db')) if cf_db: database_url = cf_db.credentials['uri'] DATABASES = {} DATABASES['default'] = dj_database_url.parse(database_url) DATABASES['default']['CONN_MAX_AGE'] = 60 AWS_S3_REGION_NAME = os.getenv('EKIP_AWS_REGION') AWS_ACCESS_KEY_ID = os.getenv('EKIP_AWS_ACCESS_KEY_ID') AWS_SECRET_ACCESS_KEY = os.getenv('EKIP_AWS_SECRET_ACCESS_KEY') AWS_STORAGE_BUCKET_NAME = os.getenv('EKIP_STATIC_BUCKET_NAME') cf_s3 = env.get_service(name=re.compile('ekip-s3')) if cf_s3: AWS_STORAGE_BUCKET_NAME = cf_s3.credentials['bucket'] AWS_S3_REGION_NAME = cf_s3.credentials['region'] AWS_ACCESS_KEY_ID = cf_s3.credentials['access_key_id']