def cached_app(config=None, testing=False): global app if not app: base_url = urlparse(conf.get('webserver', 'base_url'))[2] if not base_url or base_url == '/': base_url = "" app = create_app() app = DispatcherMiddleware(root_app, {base_url: app}) return app
def get_ldap_connection(dn=None, password=None): tls_configuration = None use_ssl = False try: cacert = conf.get("ldap", "cacert") tls_configuration = Tls(validate=ssl.CERT_REQUIRED, ca_certs_file=cacert) use_ssl = True except: pass server = Server(conf.get("ldap", "uri"), use_ssl, tls_configuration) conn = Connection(server, native(dn), native(password)) if not conn.bind(): log.error("Cannot bind to ldap server: %s ", conn.last_error) raise Exception("Cannot bind to ldap server") return conn
def connect(self): neo4j_url = conf.get('graphdb', 'neo4j_url') neo4j_user = conf.get('graphdb', 'neo4j_user') neo4j_pass = conf.get('graphdb', 'neo4j_pass') if self.attempts > 10: raise DatabookException("Attempted to connect > 10 times") try: log.info("Connecting {0} {1} {2}".format(neo4j_url, neo4j_user, neo4j_pass)) self.driver = GraphDatabase.driver(neo4j_url, auth=basic_auth(neo4j_user, neo4j_pass)) session = self.driver.session() self.connected = True self.attempts = 0 return session except ServiceUnavailable as su: log.error("Neo4j is not available") self.connected = False self.attempts += 1 raise
def try_login(username, password): conn = get_ldap_connection(conf.get("ldap", "bind_user"), conf.get("ldap", "bind_password")) search_filter = "(&({0})({1}={2}))".format( conf.get("ldap", "user_filter"), conf.get("ldap", "user_name_attr"), username) search_scopes = {"LEVEL": LEVEL, "SUBTREE": SUBTREE, "BASE": BASE} search_scope = LEVEL if conf.has_option("ldap", "search_scope"): search_scope = SUBTREE if conf.get( "ldap", "search_scope") == "SUBTREE" else LEVEL # todo: BASE or ONELEVEL? res = conn.search(native(conf.get("ldap", "basedn")), native(search_filter), search_scope=native(search_scope)) # todo: use list or result? if not res: log.info("Cannot find user %s", username) raise Exception("Invalid username or password") entry = conn.response[0] conn.unbind() if 'dn' not in entry: # The search filter for the user did not return any values, so an # invalid user was used for credentials. raise Exception("Invalid username or password") try: conn = get_ldap_connection(entry['dn'], password) except KeyError as e: log.error(""" Unable to parse LDAP structure. If you're using Active Directory and not specifying an OU, you must set search_scope=SUBTREE in airflow.cfg. %s """ % traceback.format_exc()) raise Exception( "Could not parse LDAP structure. Try setting search_scope in airflow.cfg, or check logs" ) if not conn: log.info("Password incorrect for user %s", username) raise Exception("Invalid username or password") return DefaultUser(username)
def configure_logging(): logging_class_path = '' try: # Prepare the classpath so we are sure that the config folder # is on the python classpath and it is reachable prepare_classpath() logging_class_path = conf.get('core', 'logging_config_class') except DatabookConfigException: log.debug('Could not find key logging_config_class in config') if logging_class_path: try: logging_config = import_string(logging_class_path) # Make sure that the variable is in scope assert (isinstance(logging_config, dict)) log.info( 'Successfully imported user-defined logging config from %s', logging_class_path) except Exception as err: # Import default logging configurations. raise ImportError( 'Unable to load custom logging from {} due to {}'.format( logging_class_path, err)) else: from databook.config_templates.databook_local_settings import ( DEFAULT_LOGGING_CONFIG as logging_config) log.debug( 'Unable to load custom logging, using default config instead') try: # Try to init logging dictConfig(logging_config) except ValueError as e: log.warning( 'Unable to load the config, contains a configuration error.') # When there is an error in the config, escalate the exception # otherwise Databook would silently fall back on the default config raise e return logging_config
from elasticsearch import Elasticsearch from databook import configuration as conf base_url = conf.get('elasticsearch', 'base_url') es = Elasticsearch([base_url]) def search_elastic(searchterm, doc_type=None, page_start=0, size=20): body = { "from": page_start, "size": size, "query": { "bool": { "must": [{ "match": { "name": searchterm } }] # "must_not": [{"match": {"description": "beta"}}], # "filter": [{"term": {"category": "search"}}] } } } if doc_type is not None: return es.search(index="dataportal-node", doc_type=doc_type, body=body) return es.search(index="dataportal-node", body=body)
# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from databook import configuration as conf # TODO: Logging format and level should be configured # in this file instead of from databook.cfg. LOG_LEVEL = conf.get('core', 'LOGGING_LEVEL').upper() LOG_FORMAT = conf.get('core', 'LOG_FORMAT') BASE_LOG_FOLDER = conf.get('core', 'BASE_LOG_FOLDER') DEFAULT_LOGGING_CONFIG = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { 'databook': { 'format': LOG_FORMAT, }, }, 'handlers': { 'console': {
def prepare_classpath(): config_path = os.path.join(conf.get('core', 'databook_home'), 'config') config_path = os.path.expanduser(config_path) if config_path not in sys.path: sys.path.append(config_path)
def configure_vars(): global DATABOOK_HOME DATABOOK_HOME = os.path.expanduser(conf.get('core', 'DATABOOK_HOME'))
HEADER = """\ ____ __ __ __ / __ \____ _/ /_____ _/ /_ ____ ____ / /__ / / / / __ `/ __/ __ `/ __ \/ __ \/ __ \/ //_/ / /_/ / /_/ / /_/ /_/ / /_/ / /_/ / /_/ / ,< /_____/\__,_/\__/\__,_/_.___/\____/\____/_/|_| """ BASE_LOG_URL = '/admin/databook/log' LOGGING_LEVEL = logging.INFO # the prefix to append to gunicorn worker processes after init GUNICORN_WORKER_READY_PREFIX = "[ready] " LOG_FORMAT = conf.get('core', 'log_format') SIMPLE_LOG_FORMAT = conf.get('core', 'simple_log_format') DATABOOK_HOME = None def prepare_classpath(): config_path = os.path.join(conf.get('core', 'databook_home'), 'config') config_path = os.path.expanduser(config_path) if config_path not in sys.path: sys.path.append(config_path) def configure_logging(): logging_class_path = ''