def test_credentials_missing_relationship_index_throws(self):

        env = self.mockEnvironmentDeploy

        config = Config(env)

        with self.assertRaises(KeyError):
            config.credentials('database', 3)
    def test_credentials_missing_relationship_throws(self):

        env = self.mockEnvironmentDeploy

        config = Config(env)

        with self.assertRaises(KeyError):
            config.credentials('does-not-exist')
def usage_example():

    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # The 'database' relationship is generally the name of primary SQL database of an application.
    # It could be anything, though, as in the case here here where it's called "database".
    credentials = config.credentials('database')

    try:
        formatted = config.formatted_credentials('database', 'pymongo')

        server = '{0}://{1}:{2}@{3}'.format(credentials['scheme'],
                                            credentials['username'],
                                            credentials['password'], formatted)

        client = MongoClient(server)

        collection = client.main.starwars

        post = {"name": "Rey", "occupation": "Jedi"}

        post_id = collection.insert_one(post).inserted_id

        document = collection.find_one({"_id": post_id})

        # Clean up after ourselves.
        collection.drop()

        return 'Found {0} ({1})<br />'.format(document['name'],
                                              document['occupation'])

    except Exception as e:
        return e
def usage_example():

    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # Get the credentials to connect to the Memcached service.
    credentials = config.credentials('memcached')

    try:
        # Try connecting to Memached server.
        memcached = pymemcache.Client(
            (credentials['host'], credentials['port']))
        memcached.set('Memcached::OPT_BINARY_PROTOCOL', True)

        key = "Deploy_day"
        value = "Friday"

        # Set a value.
        memcached.set(key, value)

        # Read it back.
        test = memcached.get(key)

        return 'Found value <strong>{0}</strong> for key <strong>{1}</strong>.'.format(
            test.decode("utf-8"), key)

    except Exception as e:
        return e
Exemple #5
0
def usage_example():

    # Create a new config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # Get the credentials to connect to the Redis service.
    credentials = config.credentials('redis')

    try:
        redis = Redis(credentials['host'], credentials['port'])

        key = "Deploy day"
        value = "Friday"

        # Set a value
        redis.set(key, value)

        # Read it back
        test = redis.get(key)

        return 'Found value <strong>{0}</strong> for key <strong>{1}</strong>.'.format(
            test.decode("utf-8"), key)

    except Exception as e:
        return e
    def test_credentials_existing_relationship_returns(self):

        env = self.mockEnvironmentDeploy

        config = Config(env)

        creds = config.credentials('database')

        self.assertEqual('mysql', creds['scheme'])
        self.assertEqual('mysql:10.2', creds['type'])
    def test_credentials_work_in_local(self):
        env = self.mockEnvironmentDeploy
        env.pop('PLATFORM_APPLICATION', None)
        env.pop('PLATFORM_ENVIRONMENT', None)
        env.pop('PLATFORM_BRANCH', None)

        config = Config(env)

        creds = config.credentials('database')

        self.assertEqual('mysql', creds['scheme'])
        self.assertEqual('mysql:10.2', creds['type'])
def usage_example():
    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # Get the credentials to connect to the RabbitMQ service.
    credentials = config.credentials('rabbitmq')

    try:
        # Connect to the RabbitMQ server
        creds = pika.PlainCredentials(credentials['username'],
                                      credentials['password'])
        parameters = pika.ConnectionParameters(credentials['host'],
                                               credentials['port'],
                                               credentials=creds)

        connection = pika.BlockingConnection(parameters)
        channel = connection.channel()

        # Check to make sure that the recipient queue exists
        channel.queue_declare(queue='deploy_days')

        # Try sending a message over the channel
        channel.basic_publish(exchange='',
                              routing_key='deploy_days',
                              body='Friday!')

        # Receive the message
        def callback(ch, method, properties, body):
            print(" [x] Received {}".format(body))

        # Tell RabbitMQ that this particular function should receive messages from our 'hello' queue
        channel.basic_consume('deploy_days', callback, auto_ack=False)

        # This blocks on waiting for an item from the queue, so comment it out in this demo script.
        # print(' [*] Waiting for messages. To exit press CTRL+C')
        # channel.start_consuming()

        connection.close()

        return " [x] Sent 'Friday!'<br/>"

    except Exception as e:
        return e
Exemple #9
0
def usage_example():
    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()
    # Get the credentials to connect to the Kafka service.
    credentials = config.credentials('kafka')

    try:
        kafka_server = '{}:{}'.format(credentials['host'], credentials['port'])

        # Producer
        producer = KafkaProducer(
            bootstrap_servers=[kafka_server],
            value_serializer=lambda x: dumps(x).encode('utf-8'))
        for e in range(10):
            data = {'number': e}
            producer.send('numtest', value=data)

        # Consumer
        consumer = KafkaConsumer(bootstrap_servers=[kafka_server],
                                 auto_offset_reset='earliest')

        consumer.subscribe(['numtest'])

        output = ''
        # For demonstration purposes so it doesn't block.
        for e in range(10):
            message = next(consumer)
            output += str(loads(
                message.value.decode('UTF-8'))["number"]) + ', '

        # What a real implementation would do instead.
        # for message in consumer:
        #     output += loads(message.value.decode('UTF-8'))["number"]

        return output

    except Exception as e:
        return e
Exemple #10
0
def usage_example():

    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # Get the credentials to connect to the Solr service.
    credentials = config.credentials('solr')

    try:
        formatted_url = config.formatted_credentials('solr', 'pysolr')

        # Create a new Solr Client using config variables
        client = pysolr.Solr(formatted_url)

        # Add a document
        message = ''
        doc_1 = {"id": 123, "name": "Valentina Tereshkova"}

        result0 = client.add([doc_1])
        client.commit()
        message += 'Adding one document. Status (0 is success): {0} <br />'.format(
            et.fromstring(result0)[0][0].text)

        # Select one document
        query = client.search('*:*')
        message += '\nSelecting documents (1 expected): {0} <br />'.format(
            str(query.hits))

        # Delete one document
        result1 = client.delete(doc_1['id'])
        client.commit()
        message += '\nDeleting one document. Status (0 is success): {0}'.format(
            et.fromstring(result1)[0][0].text)

        return message

    except Exception as e:
        return e
Exemple #11
0
STATIC_URL = '/static/'
# Default STATIC_ROOT, i.e. for local testing.
STATIC_ROOT = os.path.join(BASE_DIR, 'static')

# Import some Platform.sh settings from the environment.
config = Config()
if config.is_valid_platform():

    if config.appDir:
        STATIC_ROOT = os.path.join(config.appDir, 'static')
    if config.projectEntropy:
        SECRET_KEY = config.projectEntropy

    if not config.in_build():
        db_settings = config.credentials('database')
        DATABASES = {
            'default': {
                'ENGINE': 'django.db.backends.postgresql',
                'NAME': db_settings['path'],
                'USER': db_settings['username'],
                'PASSWORD': db_settings['password'],
                'HOST': db_settings['host'],
                'PORT': db_settings['port'],
            },
            'sqlite': {
                'ENGINE': 'django.db.backends.sqlite3',
                'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
            }
        }
Exemple #12
0
def usage_example():
    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # The 'database' relationship is generally the name of primary SQL database of an application.
    # That's not required, but much of our default automation code assumes it.' \
    database = config.credentials('postgresql')

    try:
        # Connect to the database.
        conn_params = {
            'host': database['host'],
            'port': database['port'],
            'dbname': database['path'],
            'user': database['username'],
            'password': database['password']
        }

        conn = psycopg2.connect(**conn_params)

        # Open a cursor to perform database operations.
        cur = conn.cursor()

        cur.execute("DROP TABLE IF EXISTS People")

        # Creating a table.
        sql = '''
                CREATE TABLE IF NOT EXISTS People (
                id SERIAL PRIMARY KEY,
                name VARCHAR(30) NOT NULL,
                city VARCHAR(30) NOT NULL
                )
                '''

        cur.execute(sql)

        # Insert data.
        sql = '''
                INSERT INTO People (name, city) VALUES
                ('Neil Armstrong', 'Moon'),
                ('Buzz Aldrin', 'Glen Ridge'),
                ('Sally Ride', 'La Jolla');
                '''

        cur.execute(sql)

        # Show table.
        sql = '''SELECT * FROM People'''
        cur.execute(sql)
        result = cur.fetchall()

        table = '''<table>
<thead>
<tr><th>Name</th><th>City</th></tr>
</thead>
<tbody>'''

        if result:
            for record in result:
                table += '''<tr><td>{0}</td><td>{1}</td><tr>\n'''.format(
                    record[1], record[2])
            table += '''</tbody>\n</table>\n'''

        # Drop table
        sql = "DROP TABLE People"
        cur.execute(sql)

        # Close communication with the database
        cur.close()
        conn.close()

        return table

    except Exception as e:
        return e
except AttributeError:
    port = 0

HOST, PORT = "127.0.0.1", port

# PostgreSQL database connection string (data source name)
# including both the credentials and the server address.
SQLALCHEMY_DATABASE_URI = config.formatted_credentials("database",
                                                       "postgresql_dsn")

# Redis cache configuration without Sentinel support.
# This requires the no-sentinel branch of the PyBossa
# repository in order to work.
REDIS_SENTINEL = []
REDIS_CACHE_ENABLED = True
REDIS_HOST = config.credentials("cache")["host"]
REDIS_PORT = config.credentials("cache")["port"]
REDIS_KEYPREFIX = "pybossa_cache"
REDIS_MASTER = "mymaster"
REDIS_DB = 0

# Session secrets, automatically derived from the default
# platform.sh entropy, which is created during the
# first deployment and doesn't change over time.
ITSDANGEROUSKEY = config["PROJECT_ENTROPY"]
SECRET_KEY = config["PROJECT_ENTROPY"]
SECRET = config["PROJECT_ENTROPY"]

# File upload support, enabled for project thumbnails.
# You may need to provision more capacity in from
# .platform/services.yaml for other uses.
Exemple #14
0
def usage_example():
    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # Get the credentials to connect to the InfluxDB service.
    credentials = config.credentials('influxdb')

    try:

        # user = '******'
        # password = '******'
        #
        # url_string = "http://{0}:{1}/query".format(credentials['host'], credentials['port'])
        # data_string = "q=CREATE USER {0} WITH PASSWORD '{1}' WITH ALL PRIVILEGES".format(user, password)
        #
        # client = InfluxDBClient(host=credentials['host'], port=credentials['port'])
        #
        # r = requests.post(url_string, data=data_string)
        #
        # client = InfluxDBClient(host=credentials['ip'], port=credentials['port'], username=user, password=password)

        # dbname = 'deploys'
        # client.create_database(dbname)

        points = [{
            "deploy_time": 0.64,
            "time": 1546556400,
            "fields": {
                "host": "server01",
                "region": "us-west"
            },
            "additional": {
                "cpucount": 10
            }
        }, {
            "deploy_time": 0.84,
            "time": 1547161200,
            "fields": {
                "host": "server01",
                "region": "us-west"
            },
            "additional": {
                "cpucount": 10
            }
        }]

        #
        # # Read the data back
        # result = client.query('select * from deploy_time LIMIT 5')
        #
        # # if result:
        #
        # table = "<<<TABLE" \
        #         "<table>" \
        #         "<thead>" \
        #         "<tr><th>ID</th><th>Name</th></tr>" \
        #         "</thead>" \
        #         "<tbody>" \
        #         "TABLE;"
        #
        # if result:
        #
        #     for res in result:
        #         table += "<tr><td>{0}</td><td>{1}</td><tr>\n".format(result['time'], result['value'])
        #
        #     table += "</tbody>\n</table>\n"
        #
        # Drop the database.
        # client.drop_database(dbname)
        #
        #
        # return table

        # user_list = client.get_list_privileges(username=USER)

        return 'success'

    except Exception as e:
        return traceback.format_exc(), sys.exc_info()[0]
Exemple #15
0
def usage_example():

    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # The 'database' relationship is generally the name of primary SQL database of an application.
    # That's not required, but much of our default automation code assumes it.'
    credentials = config.credentials('database')

    try:
        # Connect to the database using PDO. If using some other abstraction layer you would inject the values
        # from `database` into whatever your abstraction layer asks for.

        conn = pymysql.connect(host=credentials['host'],
                               port=credentials['port'],
                               database=credentials['path'],
                               user=credentials['username'],
                               password=credentials['password'])

        sql = '''
                CREATE TABLE People (
                id SERIAL PRIMARY KEY,
                name VARCHAR(30) NOT NULL,
                city VARCHAR(30) NOT NULL
                )
                '''

        cur = conn.cursor()
        cur.execute(sql)

        sql = '''
                INSERT INTO People (name, city) VALUES
                ('Neil Armstrong', 'Moon'),
                ('Buzz Aldrin', 'Glen Ridge'),
                ('Sally Ride', 'La Jolla');
                '''

        cur.execute(sql)

        # Show table.
        sql = '''SELECT * FROM People'''
        cur.execute(sql)
        result = cur.fetchall()

        table = '''<table>
<thead>
<tr><th>Name</th><th>City</th></tr>
</thead>
<tbody>'''

        if result:
            for record in result:
                table += '''<tr><td>{0}</td><td>{1}</td><tr>\n'''.format(
                    record[1], record[2])
            table += '''</tbody>\n</table>\n'''

        # Drop table
        sql = '''DROP TABLE People'''
        cur.execute(sql)

        # Close communication with the database
        cur.close()
        conn.close()

        return table

    except Exception as e:
        return e
def usage_example():

    # Create a new Config object to ease reading the Platform.sh environment variables.
    # You can alternatively use os.environ yourself.
    config = Config()

    # Get the credentials to connect to the Elasticsearch service.
    credentials = config.credentials('elasticsearch')

    try:
        # The Elasticsearch library lets you connect to multiple hosts.
        # On Platform.sh Standard there is only a single host so just register that.
        hosts = {
            "scheme": credentials['scheme'],
            "host": credentials['host'],
            "port": credentials['port']
        }

        # Create an Elasticsearch client object.
        client = elasticsearch.Elasticsearch([hosts])

        # Index a few documents
        es_index = 'my_index'
        es_type = 'People'

        params = {
            "index": es_index,
            "type": es_type,
            "body": {"name": ''}
        }

        names = ['Ada Lovelace', 'Alonzo Church', 'Barbara Liskov']

        ids = {}

        for name in names:
            params['body']['name'] = name
            ids[name] = client.index(index=params["index"], doc_type=params["type"], body=params['body'])

        # Force just-added items to be indexed.
        client.indices.refresh(index=es_index)

        # Search for documents.
        result = client.search(index=es_index, body={
            'query': {
                'match': {
                    'name': 'Barbara Liskov'
                }
            }
        })

        table = '''<table>
<thead>
<tr><th>ID</th><th>Name</th></tr>
</thead>
<tbody>'''

        if result['hits']['hits']:
            for record in result['hits']['hits']:
                table += '''<tr><td>{0}</td><td>{1}</td><tr>\n'''.format(record['_id'], record['_source']['name'])
            table += '''</tbody>\n</table>\n'''

        # Delete documents.
        params = {
            "index": es_index,
            "type": es_type,
        }

        for name in names:
            client.delete(index=params['index'], doc_type=params['type'], id=ids[name]['_id'])

        return table

    except Exception as e:
        return e