works_index)

        log_import(integration)

    # Import S3 configuration.
    s3_conf = Configuration.integration('S3')
    if s3_conf:
        username = s3_conf.get('access_key')
        password = s3_conf.get('secret_key')
        del s3_conf['access_key']
        del s3_conf['secret_key']

        integration = EI(protocol=EI.S3, goal=EI.STORAGE_GOAL)
        _db.add(integration)
        integration.username = username
        integration.password = password

        S3_SETTINGS = [
            S3Uploader.BOOK_COVERS_BUCKET_KEY,
            S3Uploader.OA_CONTENT_BUCKET_KEY,
        ]
        for k, v in s3_conf.items():
            if not k in S3_SETTINGS:
                log.warn('No ExternalIntegration goal for "%s" S3 bucket' % k)
                continue
            integration.setting(unicode(k)).value = unicode(v)

        log_import(integration)

finally:
    _db.commit()
Esempio n. 2
0
"""Move log details from the Configuration file into the
database as ExternalIntegrations
"""

import os
import sys
import logging
from nose.tools import set_trace

bin_dir = os.path.split(__file__)[0]
package_dir = os.path.join(bin_dir, "..")
sys.path.append(os.path.abspath(package_dir))

from config import Configuration
from model import (
    ExternalIntegration as EI,
    production_session,
)

_db = production_session()
log = logging.getLogger(name="Log configuration import")
loggly_conf = Configuration.integration(u'loggly')

if loggly_conf:
    integration = EI(goal=EI.LOGGING_GOAL, protocol=EI.LOGGLY)
    _db.add(integration)
    integration.url = loggly_conf.get(
        'url', 'https://logs-01.loggly.com/inputs/%(token)s/tag/python/')
    integration.password = loggly_conf.get('token')
_db.commit()
database as ExternalIntegrations
"""

import os
import sys
import logging
from nose.tools import set_trace

bin_dir = os.path.split(__file__)[0]
package_dir = os.path.join(bin_dir, "..")
sys.path.append(os.path.abspath(package_dir))

from config import Configuration
from model import (
    ExternalIntegration as EI,
    production_session,
)
_db = production_session()
log = logging.getLogger(name="Log configuration import")
loggly_conf = Configuration.integration(u'loggly')

if loggly_conf:
    integration = EI(goal=EI.LOGGING_GOAL, protocol=EI.LOGGLY)
    _db.add(integration)
    integration.url = loggly_conf.get(
        'url', 'https://logs-01.loggly.com/inputs/%(token)s/tag/python/'
    )
    integration.password = loggly_conf.get('token')
_db.commit()

            )

        log_import(integration)

    # Import S3 configuration.
    s3_conf = Configuration.integration('S3')
    if s3_conf:
        username = s3_conf.get('access_key')
        password = s3_conf.get('secret_key')
        del s3_conf['access_key']
        del s3_conf['secret_key']

        integration = EI(protocol=EI.S3, goal=EI.STORAGE_GOAL)
        _db.add(integration)
        integration.username = username
        integration.password = password

        S3_SETTINGS = [
            S3Uploader.BOOK_COVERS_BUCKET_KEY,
            S3Uploader.OA_CONTENT_BUCKET_KEY,
        ]
        for k, v in s3_conf.items():
            if not k in S3_SETTINGS:
                log.warn('No ExternalIntegration goal for "%s" S3 bucket' % k)
                continue
            integration.setting(unicode(k)).value = unicode(v)

        log_import(integration)

finally:
    _db.commit()