def teardown(self): # Close the session. self._db.close() # Roll back all database changes that happened during this # test, whether in the session that was just closed or some # other session. self.transaction.rollback() # Remove any database objects cached in the model classes but # associated with the now-rolled-back session. Collection.reset_cache() ConfigurationSetting.reset_cache() DataSource.reset_cache() DeliveryMechanism.reset_cache() ExternalIntegration.reset_cache() Genre.reset_cache() Library.reset_cache() # Also roll back any record of those changes in the # Configuration instance. for key in [ Configuration.SITE_CONFIGURATION_LAST_UPDATE, Configuration.LAST_CHECKED_FOR_SITE_CONFIGURATION_UPDATE ]: if key in Configuration.instance: del (Configuration.instance[key]) if self.search_mock: self.search_mock.stop()
def teardown(self): # Close the session. self._db.close() # Roll back all database changes that happened during this # test, whether in the session that was just closed or some # other session. self.transaction.rollback() # Remove any database objects cached in the model classes but # associated with the now-rolled-back session. Collection.reset_cache() ConfigurationSetting.reset_cache() DataSource.reset_cache() DeliveryMechanism.reset_cache() ExternalIntegration.reset_cache() Genre.reset_cache() Library.reset_cache() # Also roll back any record of those changes in the # Configuration instance. for key in [ Configuration.SITE_CONFIGURATION_LAST_UPDATE, Configuration.LAST_CHECKED_FOR_SITE_CONFIGURATION_UPDATE ]: if key in Configuration.instance: del(Configuration.instance[key]) if self.search_mock: self.search_mock.stop()
def from_configuration(cls, _db, testing=False): """Return the logging policy as configured in the database. :param _db: A database connection. If None, the default logging policy will be used. :param testing: A boolean indicating whether a unit test is happening right now. If True, the database configuration will be ignored in favor of a known test-friendly policy. (It's okay to pass in False during a test *of this method*.) :return: A 3-tuple (internal_log_level, database_log_level, handlers). `internal_log_level` is the log level to be used for most log messages. `database_log_level` is the log level to be applied to the loggers for the database connector and other verbose third-party libraries. `handlers` is a list of Handler objects that will be associated with the top-level logger. """ # Establish defaults, in case the database is not initialized or # it is initialized but logging is not configured. (internal_log_level, internal_log_format, database_log_level, message_template) = cls._defaults(testing) handlers = [] from model import ExternalIntegration if _db and not testing: goal = ExternalIntegration.LOGGING_GOAL internal = ExternalIntegration.lookup( _db, ExternalIntegration.INTERNAL_LOGGING, goal) loggly = ExternalIntegration.lookup(_db, ExternalIntegration.LOGGLY, goal) if internal: internal_log_level = internal.setting( cls.LOG_LEVEL).setdefault(internal_log_level) internal_log_format = internal.setting( cls.LOG_FORMAT).setdefault(internal_log_format) database_log_level = internal.setting( cls.DATABASE_LOG_LEVEL).setdefault(database_log_level) message_template = internal.setting( cls.LOG_MESSAGE_TEMPLATE).setdefault(message_template) if loggly: handlers.append(cls.loggly_handler(loggly)) # handlers is either empty or it contains a loggly handler. # Let's also add a handler that logs to standard error. handlers.append(logging.StreamHandler()) for handler in handlers: cls.set_formatter(handler, internal_log_format, message_template) return internal_log_level, database_log_level, handlers
def from_configuration(cls, _db, testing=False): from model import (ExternalIntegration, ConfigurationSetting) (internal_log_level, internal_log_format, database_log_level, message_template) = cls._defaults(testing) app_name = cls.DEFAULT_APP_NAME if _db and not testing: goal = ExternalIntegration.LOGGING_GOAL internal = ExternalIntegration.lookup( _db, ExternalIntegration.INTERNAL_LOGGING, goal) if internal: internal_log_format = (internal.setting(cls.LOG_FORMAT).value or internal_log_format) message_template = (internal.setting( cls.LOG_MESSAGE_TEMPLATE).value or message_template) internal_log_level = (ConfigurationSetting.sitewide( _db, Configuration.LOG_LEVEL).value or internal_log_level) database_log_level = (ConfigurationSetting.sitewide( _db, Configuration.DATABASE_LOG_LEVEL).value or database_log_level) app_name = ConfigurationSetting.sitewide( _db, Configuration.LOG_APP_NAME).value or app_name handler = logging.StreamHandler() cls.set_formatter(handler, internal_log_format, message_template, app_name) return (handler, internal_log_level, database_log_level)
def _external_integration(self, protocol, goal=None, settings=None, libraries=None, **kwargs): integration = None if not libraries: integration, ignore = get_one_or_create(self._db, ExternalIntegration, protocol=protocol, goal=goal) else: if not isinstance(libraries, list): libraries = [libraries] # Try to find an existing integration for one of the given # libraries. for library in libraries: integration = ExternalIntegration.lookup(self._db, protocol, goal, library=libraries[0]) if integration: break if not integration: # Otherwise, create a brand new integration specifically # for the library. integration = ExternalIntegration( protocol=protocol, goal=goal, ) integration.libraries.extend(libraries) self._db.add(integration) for attr, value in kwargs.items(): setattr(integration, attr, value) settings = settings or dict() for key, value in settings.items(): integration.set_setting(key, value) return integration
def __init__(self, _db, url=None, works_index=None): self.log = logging.getLogger("External search index") self.works_index = None self.works_alias = None integration = None if not _db: raise CannotLoadConfiguration( "Cannot load Elasticsearch configuration without a database.", ) if not url or not works_index: integration = ExternalIntegration.lookup( _db, ExternalIntegration.ELASTICSEARCH, goal=ExternalIntegration.SEARCH_GOAL) if not integration: raise CannotLoadConfiguration( "No Elasticsearch integration configured.") url = url or integration.url if not works_index: setting = integration.setting(self.WORKS_INDEX_KEY) works_index = setting.value_or_default( self.DEFAULT_WORKS_INDEX) if not url: raise CannotLoadConfiguration( "No URL configured to Elasticsearch server.") if not ExternalSearchIndex.__client: use_ssl = url.startswith('https://') self.log.info( "Connecting to index %s in Elasticsearch cluster at %s", works_index, url) ExternalSearchIndex.__client = Elasticsearch(url, use_ssl=use_ssl, timeout=20, maxsize=25) self.indices = self.__client.indices self.search = self.__client.search self.index = self.__client.index self.delete = self.__client.delete self.exists = self.__client.exists # Sets self.works_index and self.works_alias values. # Document upload runs against the works_index. # Search queries run against works_alias. if works_index and integration: self.set_works_index_and_alias(works_index) self.update_integration_settings(integration) def bulk(docs, **kwargs): return elasticsearch_bulk(self.__client, docs, **kwargs) self.bulk = bulk
def from_config(cls, library): _db = Session.object_session(library) integration = ExternalIntegration.lookup( _db, ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL, library=library) if not integration: raise CannotLoadConfiguration( "No MARC export service is configured for this library") return cls(_db, library, integration)
def from_config(cls, library): _db = Session.object_session(library) integration = ExternalIntegration.lookup( _db, ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL, library=library ) if not integration: raise CannotLoadConfiguration( "No MARC export service is configured for this library" ) return cls(_db, library, integration)
def _external_integration(self, protocol, goal=None, settings=None, libraries=None, **kwargs ): integration = None if not libraries: integration, ignore = get_one_or_create( self._db, ExternalIntegration, protocol=protocol, goal=goal ) else: if not isinstance(libraries, list): libraries = [libraries] # Try to find an existing integration for one of the given # libraries. for library in libraries: integration = ExternalIntegration.lookup( self._db, protocol, goal, library=libraries[0] ) if integration: break if not integration: # Otherwise, create a brand new integration specifically # for the library. integration = ExternalIntegration( protocol=protocol, goal=goal, ) integration.libraries.extend(libraries) self._db.add(integration) for attr, value in kwargs.items(): setattr(integration, attr, value) settings = settings or dict() for key, value in settings.items(): integration.set_setting(key, value) return integration
def get_storage_settings(cls, _db): integrations = ExternalIntegration.for_goal( _db, ExternalIntegration.STORAGE_GOAL) cls.SETTING['options'] = [cls.DEFAULT_MIRROR_INTEGRATION] for integration in integrations: # Only add an integration to choose from if it has a # MARC File Bucket field in its settings. [configuration_setting ] = [s for s in integration.settings if s.key == "marc_bucket"] if configuration_setting.value: cls.SETTING['options'].append( dict(key=str(integration.id), label=integration.name)) return cls.SETTING
def for_collection(cls, collection, purpose): """Create a MirrorUploader for the given Collection. :param collection: Use the mirror configuration for this Collection. :param purpose: Use the purpose of the mirror configuration. :return: A MirrorUploader, or None if the Collection has no mirror integration. """ from model import ExternalIntegration try: from model import Session _db = Session.object_session(collection) integration = ExternalIntegration.for_collection_and_purpose(_db, collection, purpose) except CannotLoadConfiguration, e: return None
def from_configuration(cls, _db, testing=False): settings = None cloudwatch = None app_name = cls.DEFAULT_APP_NAME if _db and not testing: goal = ExternalIntegration.LOGGING_GOAL settings = ExternalIntegration.lookup( _db, ExternalIntegration.CLOUDWATCH, goal) app_name = ConfigurationSetting.sitewide( _db, Configuration.LOG_APP_NAME).value or app_name if settings: cloudwatch = cls.get_handler(settings, testing) cls.set_formatter(cloudwatch, app_name) return cloudwatch
def from_configuration(cls, _db, testing=False): loggly = None from model import (ExternalIntegration, ConfigurationSetting) app_name = cls.DEFAULT_APP_NAME if _db and not testing: goal = ExternalIntegration.LOGGING_GOAL loggly = ExternalIntegration.lookup( _db, ExternalIntegration.LOGGLY, goal ) app_name = ConfigurationSetting.sitewide(_db, Configuration.LOG_APP_NAME).value or app_name if loggly: loggly = Loggly.loggly_handler(loggly) cls.set_formatter(loggly, app_name) return loggly
def test_run(self): cmd_args = [ "--vendor-id=LIBR", "--node-value=abc12", "--delegate=http://server1/AdobeAuth/", "--delegate=http://server2/AdobeAuth/", ] script = ConfigureVendorIDScript(self._db) script.do_run(self._db, cmd_args=cmd_args) # The ExternalIntegration is properly configured. integration = ExternalIntegration.lookup( self._db, ExternalIntegration.ADOBE_VENDOR_ID, ExternalIntegration.DRM_GOAL) eq_("LIBR", integration.setting(Configuration.ADOBE_VENDOR_ID).value) eq_( "abc12", integration.setting( Configuration.ADOBE_VENDOR_ID_NODE_VALUE).value) eq_(["http://server1/AdobeAuth/", "http://server2/AdobeAuth/"], integration.setting( Configuration.ADOBE_VENDOR_ID_DELEGATE_URL).json_value) # The script won't run if --node-value or --delegate have obviously # wrong values. cmd_args = [ "--vendor-id=LIBR", "--node-value=not a hex number", ] assert_raises_regexp(ValueError, "invalid literal for int", script.do_run, self._db, cmd_args=cmd_args) cmd_args = [ "--vendor-id=LIBR", "--node-value=abce", "--delegate=http://random-site/", ] assert_raises_regexp(ValueError, "Invalid delegate: http://random-site/", script.do_run, self._db, cmd_args=cmd_args)
def vendor_id(cls, _db): """Look up the Adobe Vendor ID configuration for this registry. :return: a 3-tuple (vendor ID, node value, [delegates]) """ from model import ExternalIntegration integration = ExternalIntegration.lookup( _db, ExternalIntegration.ADOBE_VENDOR_ID, ExternalIntegration.DRM_GOAL) if not integration: return None, None, [] setting = integration.setting(cls.ADOBE_VENDOR_ID_DELEGATE_URL) delegates = [] try: delegates = setting.json_value or [] except ValueError, e: cls.log.warn("Invalid Adobe Vendor ID delegates configured.")
def from_configuration(cls, _db, testing=False): (internal_log_format, message_template) = cls._defaults(testing) app_name = cls.DEFAULT_APP_NAME if _db and not testing: goal = ExternalIntegration.LOGGING_GOAL internal = ExternalIntegration.lookup( _db, ExternalIntegration.INTERNAL_LOGGING, goal) if internal: internal_log_format = (internal.setting(cls.LOG_FORMAT).value or internal_log_format) message_template = (internal.setting( cls.LOG_MESSAGE_TEMPLATE).value or message_template) app_name = ConfigurationSetting.sitewide( _db, Configuration.LOG_APP_NAME).value or app_name handler = logging.StreamHandler() cls.set_formatter(handler, log_format=internal_log_format, message_template=message_template, app_name=app_name) return handler
log = logging.getLogger(name="Core configuration import") def log_import(integration_or_setting): log.info("CREATED: %r" % integration_or_setting) try: Configuration.load() _db = production_session() # Import CDN configuration. cdn_conf = Configuration.integration(u'CDN') if cdn_conf and isinstance(cdn_conf, dict): for k, v in cdn_conf.items(): cdn = EI(protocol=EI.CDN, goal=EI.CDN_GOAL) _db.add(cdn) cdn.url = unicode(v) cdn.setting(Configuration.CDN_MIRRORED_DOMAIN_KEY).value = unicode(k) log_import(cdn) # Import Elasticsearch configuration. elasticsearch_conf = Configuration.integration(u'Elasticsearch') if elasticsearch_conf: url = elasticsearch_conf.get('url') works_index = elasticsearch_conf.get(ExternalSearchIndex.WORKS_INDEX_KEY) integration = EI(protocol=EI.ELASTICSEARCH, goal=EI.SEARCH_GOAL) _db.add(integration) if url:
"""Move log details from the Configuration file into the database as ExternalIntegrations """ import os import sys import logging from nose.tools import set_trace bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) from config import Configuration from model import ( ExternalIntegration as EI, production_session, ) _db = production_session() log = logging.getLogger(name="Log configuration import") loggly_conf = Configuration.integration(u'loggly') if loggly_conf: integration = EI(goal=EI.LOGGING_GOAL, protocol=EI.LOGGLY) _db.add(integration) integration.url = loggly_conf.get( 'url', 'https://logs-01.loggly.com/inputs/%(token)s/tag/python/') integration.password = loggly_conf.get('token') _db.commit()
def log_import(integration_or_setting): log.info("CREATED: %r" % integration_or_setting) try: Configuration.load() _db = production_session() # Import CDN configuration. cdn_conf = Configuration.integration(u'CDN') if cdn_conf and isinstance(cdn_conf, dict): for k, v in cdn_conf.items(): cdn = EI(protocol=EI.CDN, goal=EI.CDN_GOAL) _db.add(cdn) cdn.url = unicode(v) cdn.setting( Configuration.CDN_MIRRORED_DOMAIN_KEY).value = unicode(k) log_import(cdn) # Import Elasticsearch configuration. elasticsearch_conf = Configuration.integration(u'Elasticsearch') if elasticsearch_conf: url = elasticsearch_conf.get('url') works_index = elasticsearch_conf.get( ExternalSearchIndex.WORKS_INDEX_KEY) integration = EI(protocol=EI.ELASTICSEARCH, goal=EI.SEARCH_GOAL) _db.add(integration)
database as ExternalIntegrations """ import os import sys import logging from nose.tools import set_trace bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) from config import Configuration from model import ( ExternalIntegration as EI, production_session, ) _db = production_session() log = logging.getLogger(name="Log configuration import") loggly_conf = Configuration.integration(u'loggly') if loggly_conf: integration = EI(goal=EI.LOGGING_GOAL, protocol=EI.LOGGLY) _db.add(integration) integration.url = loggly_conf.get( 'url', 'https://logs-01.loggly.com/inputs/%(token)s/tag/python/' ) integration.password = loggly_conf.get('token') _db.commit()
import sys import logging from nose.tools import set_trace bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) from model import ( production_session, ExternalIntegration as EI, ) _db = production_session() try: integration = EI.lookup(_db, EI.METADATA_WRANGLER, EI.METADATA_GOAL) if integration: for setting in integration.settings: if setting.key == 'username': # A username (or client_id) is no longer required. _db.delete(setting) if setting.key == 'password': # The password (previously client_secret) must be reset to # register for a shared_secret. setting.value = None _db.commit() _db.close() except Exception as e: _db.close() raise e
def search_integration(cls, _db): """Look up the ExternalIntegration for ElasticSearch.""" return ExternalIntegration.lookup( _db, ExternalIntegration.ELASTICSEARCH, goal=ExternalIntegration.SEARCH_GOAL )