each=ConfigSection( help="Information about a single HDFS cluster", members=dict( # Deprecated NN_HOST=Config("namenode_host", help="Host/IP for name node"), NN_THRIFT_PORT=Config("thrift_port", help="Thrift port for name node", default=10090, type=int), NN_HDFS_PORT=Config("hdfs_port", help="Hadoop IPC port for the name node", default=8020, type=int), # End deprecation FS_DEFAULTFS=Config( "fs_defaultfs", help="The equivalent of fs.defaultFS (aka fs.default.name)", default="hdfs://localhost:8020"), LOGICAL_NAME=Config("logical_name", default="", type=str, help=_t('NameNode logical name.')), WEBHDFS_URL=Config( "webhdfs_url", help="The URL to WebHDFS/HttpFS service. Defaults to " + "the WebHDFS URL on the NameNode.", type=str, default=None), NN_KERBEROS_PRINCIPAL=Config( "nn_kerberos_principal", help="Kerberos principal for NameNode", default="hdfs", type=str), DN_KERBEROS_PRINCIPAL=Config( "dn_kerberos_principal", help="Kerberos principal for DataNode", default="hdfs", type=str), SECURITY_ENABLED=Config( "security_enabled", help="Is running with Kerberos authentication", default=False, type=coerce_bool), TEMP_DIR=Config("temp_dir", help="HDFS directory for temporary files", default='/tmp', type=str), HADOOP_HDFS_HOME=Config( key="hadoop_hdfs_home", default=os.environ.get("HADOOP_HDFS_HOME", "/usr/lib/hadoop-hdfs"), help= ("Path to Hadoop HDFS home - HADOOP_HOME or HADOOP_HDFS_HOME in " + "hadoop parlance. For tarball installations, it is the root of " + "the untarred directory. For packages, " + "it is /usr/lib/hadoop-hdfs." + "Defaults to the environment varible HADOOP_BIN when set, " + "or '/usr/bin/hadoop'."), ), HADOOP_BIN=Config( key="hadoop_bin", default=os.environ.get("HADOOP_BIN", "/usr/bin/hadoop"), help= ("Path to your Hadoop launcher script. E.g. /usr/bin/hadoop. " + "Defaults to the environment varible HADOOP_BIN when set, " + "or '/usr/bin/hadoop'.")), HADOOP_CONF_DIR=Config( key="hadoop_conf_dir", default=os.environ.get( "HADOOP_CONF_DIR", "/etc/hadoop/conf"), help= ("Directory to pass to hadoop_bin (from Hadoop configuration) " + "as the --config flag. Defaults to the environment variable " + "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")), )))
SMTP = ConfigSection( key='smtp', help=_('Configuration options for connecting to an external SMTP server.'), members=dict( HOST = Config( key="host", help=_("The SMTP server for email notification delivery."), type=str, default="localhost" ), PORT = Config( key="port", help=_("The SMTP server port."), type=int, default=25 ), USER = Config( key="user", help=_("The username for the SMTP host."), type=str, default="" ), PASSWORD = Config( key="password", help=_("The password for the SMTP user."), type=str, private=True, default="" ), USE_TLS = Config( key="tls", help=_("Whether to use a TLS (secure) connection when talking to the SMTP server."), type=coerce_bool, default=False ), DEFAULT_FROM= Config( key="default_from_email", help=_("Default email address to use for various automated notifications from Hue."), type=str, dynamic_default=default_from_email ), ) )
def get_default_abfs_fs(): return ABFS_CLUSTERS['default'].FS_DEFAULTFS.get() ADLS_CLUSTERS = UnspecifiedConfigSection( "adls_clusters", help="One entry for each ADLS cluster", each=ConfigSection( help="Information about a single ADLS cluster", members=dict( FS_DEFAULTFS=Config( "fs_defaultfs", help="adl://<account_name>.azuredatalakestore.net", type=str, default=None), WEBHDFS_URL=Config( "webhdfs_url", help="https://<account_name>.azuredatalakestore.net/webhdfs/v1", type=str, default=None), ))) AZURE_ACCOUNTS = UnspecifiedConfigSection( "azure_accounts", help="One entry for each Azure account", each=ConfigSection( help="Information about a single azure account", members=dict( CLIENT_ID=Config( key="client_id",
each=ConfigSection( help='Information about single AWS account', members=dict( ACCESS_KEY_ID=Config(key='access_key_id', type=str, private=True, dynamic_default=get_default_access_key_id), ACCESS_KEY_ID_SCRIPT=Config( key='access_key_id_script', default=None, private=True, type=coerce_password_from_script, help=_( "Execute this script to produce the AWS access key ID.")), SECRET_ACCESS_KEY=Config(key='secret_access_key', type=str, private=True, dynamic_default=get_default_secret_key), SECRET_ACCESS_KEY_SCRIPT=Config( key='secret_access_key_script', default=None, private=True, type=coerce_password_from_script, help=_( "Execute this script to produce the AWS secret access key." )), SECURITY_TOKEN=Config( key='security_token', type=str, private=True, ), ALLOW_ENVIRONMENT_CREDENTIALS=Config( help= 'Allow to use environment sources of credentials (environment variables, EC2 profile).', key='allow_environment_credentials', default=True, type=coerce_bool), REGION=Config(key='region', default='us-east-1', type=str))))
each=ConfigSection( help=_('Information about single AWS account'), members=dict( ACCESS_KEY_ID=Config(key='access_key_id', type=str, private=True, dynamic_default=get_default_access_key_id), ACCESS_KEY_ID_SCRIPT=Config( key='access_key_id_script', default=None, private=True, type=coerce_password_from_script, help=_( "Execute this script to produce the AWS access key ID.")), SECRET_ACCESS_KEY=Config(key='secret_access_key', type=str, private=True, dynamic_default=get_default_secret_key), SECRET_ACCESS_KEY_SCRIPT=Config( key='secret_access_key_script', default=None, private=True, type=coerce_password_from_script, help=_( "Execute this script to produce the AWS secret access key." )), SECURITY_TOKEN=Config( key='security_token', type=str, private=True, ), ALLOW_ENVIRONMENT_CREDENTIALS=Config( help= _('Allow to use environment sources of credentials (environment variables, EC2 profile).' ), key='allow_environment_credentials', default=True, type=coerce_bool), REGION=Config(key='region', default='us-east-1', type=str), HOST=Config(help=_('Alternate address for the S3 endpoint.'), key='host', default=None, type=str), PROXY_ADDRESS=Config( help=_('Proxy address to use for the S3 connection.'), key='proxy_address', default=None, type=str), PROXY_PORT=Config( help=_('Proxy port to use for the S3 connection.'), key='proxy_port', default=8080, type=int), PROXY_USER=Config( help=_('Proxy user to use for the S3 connection.'), key='proxy_user', default=None, type=str), PROXY_PASS=Config( help=_('Proxy password to use for the S3 connection.'), key='proxy_pass', default=None, type=str), CALLING_FORMAT=Config(key='calling_format', default=DEFAULT_CALLING_FORMAT, type=str), IS_SECURE=Config(key='is_secure', default=True, type=coerce_bool))))
default="beeswax") QUERY_SERVERS = UnspecifiedConfigSection( "query_servers", help=_("One entry for each Query Server that can execute some queries."), each=ConfigSection( help=_("Information about a single Query Server"), members=dict( SERVER_HOST = Config( key="server_host", help=_("Host where the Query Server Thrift daemon is running."), private=True, default="localhost"), SERVER_PORT = Config( key="server_port", help=_("Configure the port the Query Server Thrift server."), default=8002, type=int), SUPPORT_DDL = Config( key='support_ddl', default=True, type=coerce_bool, help=_('If DDL queries are supported (e.g. DROP can be sent directly to this server).')) ) ) ) # Deprecated! To remove in Hue 3 # Multiple sections are now available in QUERY_SERVERS BEESWAX_SERVER_HOST = Config( key="beeswax_server_host",
each=ConfigSection( help="Information about a single HDFS cluster", members=dict( FS_DEFAULTFS=Config( "fs_defaultfs", help="The equivalent of fs.defaultFS (aka fs.default.name)", default="hdfs://localhost:8020"), LOGICAL_NAME=Config("logical_name", default="", type=str, help=_t('NameNode logical name.')), WEBHDFS_URL=Config( "webhdfs_url", help="The URL to WebHDFS/HttpFS service. Defaults to " + "the WebHDFS URL on the NameNode.", type=str, default="http://localhost:50070/webhdfs/v1"), NN_KERBEROS_PRINCIPAL=Config( "nn_kerberos_principal", help="Kerberos principal for NameNode", # Unused default="hdfs", type=str), DN_KERBEROS_PRINCIPAL=Config( "dn_kerberos_principal", help="Kerberos principal for DataNode", # Unused default="hdfs", type=str), SECURITY_ENABLED=Config( "security_enabled", help="Is running with Kerberos authentication", default=False, type=coerce_bool), SSL_CERT_CA_VERIFY=Config( "ssl_cert_ca_verify", help= "In secure mode (HTTPS), if SSL certificates from YARN Rest APIs have to be verified against certificate authority", dynamic_default=default_ssl_validate, type=coerce_bool), TEMP_DIR=Config("temp_dir", help="HDFS directory for temporary files", default='/tmp', type=str), HADOOP_CONF_DIR=Config( key="hadoop_conf_dir", default=os.environ.get( "HADOOP_CONF_DIR", "/etc/hadoop/conf"), help= ("Directory of the Hadoop configuration) Defaults to the environment variable " + "HADOOP_CONF_DIR when set, or '/etc/hadoop/conf'.")))))
each=ConfigSection( help=_t("RDBMS server configuration."), members=dict( NICE_NAME=Config( key='nice_name', help=_t('Nice name of server.'), type=str, default=None, ), NAME=Config( key='name', help=_t( 'Database name or path to database file. If provided, then choosing other databases will not be permitted.' ), type=str, default='', ), ENGINE=Config( key='engine', help=_t( 'Database engine, such as postgresql_psycopg2, mysql, or sqlite3.' ), type=coerce_database, default='django.db.backends.sqlite3', ), USER=Config( key='user', help=_t('Database username.'), type=str, default='', ), PASSWORD=Config( key='password', help=_t('Database password.'), type=str, default='', ), PASSWORD_SCRIPT=Config( key='password_script', help=_t( 'Execute this script to produce the database password. This will be used when `password` is not set.' ), type=coerce_password_from_script, default=None, ), HOST=Config( key='host', help=_t('Database host.'), type=str, default='', ), PORT=Config( key='port', help=_t('Database port.'), type=int, default=0, ), OPTIONS=Config( key='options', help=_t( 'Database options to send to the server when connecting.'), type=coerce_json_dict, default='{}'))))
each=ConfigSection( help="Information about a single HDFS cluster", members=dict( NN_HOST=Config("namenode_host", help="IP for name node"), NN_THRIFT_PORT=Config("thrift_port", help="Thrift port for name node", default=10090, type=int), NN_HDFS_PORT=Config("hdfs_port", help="Hadoop IPC port for the name node", default=8020, type=int), NN_HTTP_PORT=Config("http_port", help="Hadoop HTTP port for the name node", default=50070, type=int), NN_KERBEROS_PRINCIPAL=Config( "nn_kerberos_principal", help="Kerberos principal for NameNode", default="hdfs", type=str), DN_KERBEROS_PRINCIPAL=Config( "dn_kerberos_principal", help="Kerberos principal for DataNode", default="hdfs", type=str), SECURITY_ENABLED=Config( "security_enabled", help="Is running with Kerberos authentication", default=False, type=coerce_bool), TEMP_DIR=Config("temp_dir", help="HDFS directory for temporary files", default='/tmp', type=str), )))
# limitations under the License. """ Configuration options for the Shell UI. This file specifies the structure that the shell configuration should follow. See conf/hue.ini to configure which shells are available. """ from desktop.lib.conf import Config, ConfigSection, UnspecifiedConfigSection import shell.utils as utils SHELL_TYPES = UnspecifiedConfigSection( key='shelltypes', each=ConfigSection( members=dict(nice_name=Config(key='nice_name', required=True), command=Config(key='command', required=True), help_doc=Config(key='help', required=False), environment=UnspecifiedConfigSection( key='environment', each=ConfigSection(members=dict( value=Config(key='value', required=True), doc=Config(key='doc', required=False))))))) SHELL_BUFFER_AMOUNT = Config( key="shell_buffer_amount", help="Configure the number of output characters buffered for each shell", default=524288, type=int) SHELL_TIMEOUT = Config(key="shell_timeout", help="Number of seconds to keep shells open for users", default=600, type=int)
else: return value CLUSTERS = UnspecifiedConfigSection( "clusters", help="One entry for each Zookeeper cluster", each=ConfigSection( help="Information about a single Zookeeper cluster", members=dict( HOST_PORTS=Config( "host_ports", help= "Zookeeper ensemble. Comma separated list of Host/Port, e.g. localhost:2181,localhost:2182,localhost:2183", default="localhost:2181", type=coerce_string, ), REST_URL=Config( "rest_url", help="The URL of the REST contrib service.", default="http://localhost:9998", type=str, ), PRINCIPAL_NAME=Config( "principal_name", help="Name of Kerberos principal when using security", default="zookeeper", type=str, ), )))
# to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Configuration options for the reverse sparksql application. """ from django.utils.translation import ugettext_lazy as _ from desktop.lib.conf import Config, ConfigSection SPARK_SERVICE = ConfigSection( key='spark-service', help=_('Configuration options for Oauth 1.0 authentication'), members=dict( SPARK_SQL_URL = Config( key="spark_sql_url", help=_("The Consumer key of the application."), type=str, default="http://localhost:8080/di-data-service/" ) ) )
# Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Configuration options for the reverse monitor application. """ from django.utils.translation import ugettext_lazy as _ from desktop.lib.conf import Config, ConfigSection DATA_SERVICE = ConfigSection( key='di-service', help=_('Configuration options for Oauth 1.0 authentication'), members=dict(DI_DATA_SERVICE_URL=Config( key="di_data_service_url", help=_("The Consumer key of the application."), type=str, default="http://localhost:8080/di-data-service/")))
LOG = logging.getLogger(__name__) def has_kafka(): return KAFKA.IS_ENABLED.get() def has_kafka_api(): return bool(KAFKA.API_URL.get()) KAFKA = ConfigSection( key='kafka', help=_t("""Configuration options for Kafka API integration"""), members=dict( IS_ENABLED=Config(key="is_enabled", help=_t("Enable the Kafka integration."), type=coerce_bool, default=False), API_URL=Config(key='api_url', help=_t('URL of Kafka REST API.'), default=None), KSQL_API_URL=Config(key='ksql_api_url', help=_t('URL of ksqlDB API.'), default='http://localhost:8088'), SCHEMA_REGISTRY_API_URL=Config(key='schema_registry_api_url', help=_t('URL of Schema Registry API.'), default='http://localhost:8081'), ))
each=ConfigSection( help="Information about a single HDFS cluster", members=dict( FS_DEFAULTFS=Config( "fs_defaultfs", help="The equivalent of fs.defaultFS (aka fs.default.name)", default="hdfs://localhost:8020"), LOGICAL_NAME=Config("logical_name", default="", type=str, help=_t('NameNode logical name.')), WEBHDFS_URL=Config( "webhdfs_url", help="The URL to WebHDFS/HttpFS service. Defaults to " + "the WebHDFS URL on the NameNode.", type=str, default="http://localhost:50070/webhdfs/v1"), NN_KERBEROS_PRINCIPAL=Config( "nn_kerberos_principal", help="Kerberos principal for NameNode", default="hdfs", type=str), DN_KERBEROS_PRINCIPAL=Config( "dn_kerberos_principal", help="Kerberos principal for DataNode", default="hdfs", type=str), SECURITY_ENABLED=Config( "security_enabled", help="Is running with Kerberos authentication", default=False, type=coerce_bool), TEMP_DIR=Config("temp_dir", help="HDFS directory for temporary files", default='/tmp', type=str), )))
else: from django.utils.translation import ugettext_lazy as _t LOG = logging.getLogger(__name__) def has_kafka(): return KAFKA.IS_ENABLED.get() def has_kafka_api(): return bool(KAFKA.API_URL.get()) KAFKA = ConfigSection( key='kafka', help=_t("""Configuration options for Kafka API integration"""), members=dict( IS_ENABLED=Config(key="is_enabled", help=_t("Enable the Kafka integration."), type=coerce_bool, default=False), # Deprecated API_URL=Config(key='api_url', help=_t('Base URL of Kafka REST API.'), default=None), KSQL_API_URL=Config(key='ksql_api_url', help=_t('Base URL of ksqlDB API.'), default='http://127.0.0.1:8088'), ))
SSL = ConfigSection( key='ssl', help=_t('SSL configuration for the server.'), members=dict( ENABLED=Config(key="enabled", help=_t("SSL communication enabled for this server."), type=coerce_bool, default=False), CACERTS=Config( key="cacerts", help=_t("Path to Certificate Authority certificates."), type=str, dynamic_default=default_ssl_cacerts, ), KEY=Config( key="key", help=_t("Path to the private key file, e.g. /etc/hue/key.pem"), type=str, default=None), CERT=Config( key="cert", help=_t( "Path to the public certificate file, e.g. /etc/hue/cert.pem"), type=str, default=None), VALIDATE=Config( key="validate", help=_t( "Choose whether Hue should validate certificates received from the server." ), type=coerce_bool, dynamic_default=default_ssl_validate, )))
import logging from django.utils.translation import ugettext_lazy as _t from desktop.lib.conf import Config, ConfigSection, coerce_bool LOG = logging.getLogger(__name__) def has_kafka(): return KAFKA.IS_ENABLED.get() def has_kafka_api(): return bool(KAFKA.API_URL.get()) KAFKA = ConfigSection( key='kafka', help=_t("""Configuration options for Kafka management"""), members=dict( IS_ENABLED=Config(key="is_enabled", help=_t("Enable the Kafka integration."), type=coerce_bool, default=False), API_URL=Config(key='api_url', help=_t('Base URL of Kafka REST API.'), default=None), ))
PASSWORD_POLICY = ConfigSection( key="password_policy", help=_("Configuration options for user password policy"), members=dict( IS_ENABLED = Config( key="is_enabled", help=_("Enable user password policy."), type=coerce_bool, default=False), PWD_RULE = Config( key="pwd_regex", help=_("The regular expression of password rule. The default rule requires that " "a password must be at least 8 characters long, and must contain both " "uppercase and lowercase letters, at least one number, and at least one " "special character."), type=str, default="^(?=.*?[A-Z])(?=(.*[a-z]){1,})(?=(.*[\d]){1,})(?=(.*[\W_]){1,}).{8,}$"), PWD_HINT = Config( key="pwd_hint", help=_("Message about the password rule defined in pwd_regex"), type=str, default="The password must be at least 8 characters long, and must contain both " + \ "uppercase and lowercase letters, at least one number, and at least " + \ "one special character."), PWD_ERROR_MESSAGE = Config( key="pwd_error_message", help=_("The error message displayed if the provided password does not " "meet the enhanced password rule"), type=str, default="The password must be at least 8 characters long, and must contain both " + \ "uppercase and lowercase letters, at least one number, and at least " + \ "one special character.") ) )
# with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from desktop.lib.conf import Config, UnspecifiedConfigSection, ConfigSection, validate_path, coerce_bool CLUSTERS = UnspecifiedConfigSection( "clusters", help="One entry for each Zookeeper cluster", each=ConfigSection( help="Information about a single Zookeeper cluster", members=dict( HOST_PORTS=Config( "host_ports", help= "Zookeeper ensemble. Comma separated list of Host/Port, e.g. localhost:2181,localhost:2182,localhost:2183", default="localhost:2181", ), REST_URL=Config( "rest_url", help="The URL of the REST contrib service.", default="http://localhost:9998", ), )))
OPTIMIZER = ConfigSection( key='optimizer', help=_t("""Configuration options for Optimizer API"""), members=dict( HOSTNAME=Config( key='hostname', help=_t('Hostname to Optimizer API or compatible service.'), default='navoptapi.us-west-1.optimizer.altus.cloudera.com'), AUTH_KEY_ID=Config(key="auth_key_id", help=_t("The name of the key of the service."), private=False, default=None), AUTH_KEY_SECRET=Config( key="auth_key_secret", help=_t( "The private part of the key associated with the auth_key."), private=True, dynamic_default=get_optimizer_password_script), AUTH_KEY_SECRET_SCRIPT=Config( key="auth_key_secret_script", help=_t( "Execute this script to produce the auth_key secret. This will be used when `auth_key_secret` is not set." ), private=True, type=coerce_password_from_script, default=None), TENANT_ID=Config( key="tenant_id", help=_t( "The name of the workload where queries are uploaded and optimizations are calculated from. Automatically guessed from auth_key and cluster_id if not specified." ), private=True, default=None), CLUSTER_ID=Config( key="cluster_id", help=_t( "The name of the cluster used to determine the tenant id when this one is not specified. Defaults to the cluster Id or 'default'." ), private=True, default=DEFAULT_CLUSTER_ID.get()), APPLY_SENTRY_PERMISSIONS=Config( key="apply_sentry_permissions", help=_t( "Perform Sentry privilege filtering. Default to true automatically if the cluster is secure." ), dynamic_default=get_security_default, type=coerce_bool), CACHEABLE_TTL=Config( key='cacheable_ttl', type=int, help=_t( 'The cache TTL in milliseconds for the assist/autocomplete/etc calls. Set to 0 to disable the cache.' ), default=10 * 24 * 60 * 60 * 1000), AUTO_UPLOAD_QUERIES=Config( key="auto_upload_queries", help=_t( "Automatically upload queries after their execution in order to improve recommendations." ), default=True, type=coerce_bool), AUTO_UPLOAD_DDL=Config( key="auto_upload_ddl", help=_t( "Automatically upload queried tables DDL in order to improve recommendations." ), default=True, type=coerce_bool), AUTO_UPLOAD_STATS=Config( key="auto_upload_stats", help=_t( "Automatically upload queried tables and columns stats in order to improve recommendations." ), default=False, type=coerce_bool), QUERY_HISTORY_UPLOAD_LIMIT=Config( key="query_history_upload_limit", help=_t( "Allow admins to upload the last N executed queries in the quick start wizard. Use 0 to disable." ), default=10000, type=int), ))
OPTIMIZER = ConfigSection( key='optimizer', help=_t("""Configuration options for Optimizer API"""), members=dict( CACHEABLE_TTL=Config( key='cacheable_ttl', type=int, help=_t( 'The cache TTL in milliseconds for the assist/autocomplete/etc calls. Set to 0 to disable the cache.' ), default=432000000), API_URL=Config( key='api_url', help=_t( 'Base URL to Optimizer API (e.g. - https://alpha.optimizer.cloudera.com/)' ), default=None), PRODUCT_NAME=Config( key="product_name", help=_t( "The name of the product or group which will have API access to the emails associated with it." ), private=True, default=None), PRODUCT_SECRET=Config( key="product_secret", help=_t("A secret passphrase associated with the productName."), private=True, default=None), PRODUCT_SECRET_SCRIPT=Config( key="product_secret_script", help=_t( "Execute this script to produce the product secret. This will be used when `product_secret` is not set." ), private=True, type=coerce_password_from_script, default=None), PRODUCT_AUTH_SECRET=Config( key="product_auth_secret", help=_t("A secret passphrase associated with the productName."), private=True, default=None), PRODUCT_AUTH_SECRET_SCRIPT=Config( key="product_auth_secret_script", help=_t( "Execute this script to produce the product secret. This will be used when `product_secret` is not set." ), private=True, type=coerce_password_from_script, default=None), EMAIL=Config( key="email", help=_t( "The email of the Optimizer account you want to associate with the Product." ), private=True, dynamic_default=get_auth_username), EMAIL_PASSWORD=Config( key="email_password", help=_t( "The password associated with the Optimizer account you to associate with the Product." ), private=True, default=None), EMAIL_PASSWORD_SCRIPT=Config( key="password_script", help=_t( "Execute this script to produce the email password. This will be used when `email_password` is not set." ), private=True, type=coerce_password_from_script, default=None), SSL_CERT_CA_VERIFY=Config( key="ssl_cert_ca_verify", help=_t( "In secure mode (HTTPS), if Optimizer SSL certificates have to be verified against certificate authority" ), dynamic_default=default_ssl_validate, type=coerce_bool), ))
# cf. admin wizard too INTERPRETERS = UnspecifiedConfigSection( "interpreters", help="One entry for each type of snippet.", each=ConfigSection( help=_t( "Define the name and how to connect and execute the language."), members=dict( NAME=Config( "name", help=_t("The name of the snippet."), default="SQL", type=str, ), INTERFACE=Config( "interface", help= "The backend connection to use to communicate with the server.", default="hiveserver2", type=str, ), OPTIONS=Config( key='options', help=_t('Specific options for connecting to the server.'), type=coerce_json_dict, default='{}')))) INTERPRETERS_SHOWN_ON_WHEEL = Config( key="interpreters_shown_on_wheel", help=_t( "Comma separated list of interpreters that should be shown on the wheel. "
each=ConfigSection( help="Information about a single Zookeeper cluster", members=dict( ZK_HOST_PORTS=Config( "zk_host_ports", help="Zookeeper ensemble. Comma separated list of Host/Port, e.g. localhost:2181,localhost:2182,localhost:2183", default="localhost:2181", type=coerce_string, ), ZK_REST_URL=Config( "zk_rest_url", help="The URL of the REST contrib service.", default="http://localhost:9998", type=str, ), BROKERS_PATH=Config( "brokers_path", help="Path to brokers info in Zookeeper Znode hierarchy, e.g. /brokers/ids", default="/brokers/ids", type=str, ), CONSUMERS_PATH=Config( "consumers_path", help="Path to consumers info in Zookeeper Znode hierarchy, e.g. /consumers", default="/consumers", type=str, ), TOPICS_PATH=Config( "topics_path", help="Path to topics info in Zookeeper Znode hierarchy, e.g. /brokers/topics", default="/brokers/topics", type=str, ), ) )
OPTIMIZER = ConfigSection( key='optimizer', help=_t("""Configuration options for Optimizer API"""), members=dict( MODE=Config( key='mode', help=_t('Mode of optimization: off, local, api.'), default='off' ), INTERFACE=Config( key='interface', help=_t('Type of optimizer connector to use, e.g. optimizer, navopt, dummy.'), default='navopt' ), HOSTNAME=Config( key='hostname', help=_t('Hostname of Optimizer API service.'), default='navoptapi.us-west-1.optimizer.altus.cloudera.com' ), AUTH_KEY_ID=Config( key="auth_key_id", help=_t("The name of the key of the service."), private=False, default=None ), AUTH_KEY_SECRET=Config( key="auth_key_secret", help=_t("The private part of the key associated with the auth_key."), private=True, dynamic_default=get_optimizer_password_script ), AUTH_KEY_SECRET_SCRIPT=Config( key="auth_key_secret_script", help=_t("Execute this script to produce the auth_key secret. This will be used when `auth_key_secret` is not set."), private=True, type=coerce_password_from_script, default=None ), TENANT_ID=Config( key="tenant_id", help=_t("The name of the workload where queries are uploaded and optimizations are calculated from. " "Automatically guessed from auth_key and cluster_id if not specified."), private=True, default=None ), CLUSTER_ID=Config( key="cluster_id", help=_t("The name of the cluster used to determine the tenant id when this one is not specified. " "Defaults to the cluster Id or 'default'."), private=True, default=DEFAULT_CLUSTER_ID.get() ), APPLY_SENTRY_PERMISSIONS=Config( key="apply_sentry_permissions", help=_t("Perform Sentry privilege filtering. Default to true automatically if the cluster is secure."), dynamic_default=get_security_default, type=coerce_bool ), CACHEABLE_TTL=Config( key='cacheable_ttl', type=int, help=_t('The cache TTL in milliseconds for the assist/autocomplete/etc calls. Set to 0 to disable the cache.'), default=10 * 24 * 60 * 60 * 1000), AUTO_UPLOAD_QUERIES=Config( key="auto_upload_queries", help=_t("Automatically upload queries after their execution in order to improve recommendations."), default=True, type=coerce_bool ), AUTO_UPLOAD_DDL=Config( key="auto_upload_ddl", help=_t("Automatically upload queried tables DDL in order to improve recommendations."), default=True, type=coerce_bool ), AUTO_UPLOAD_STATS=Config( key="auto_upload_stats", help=_t("Automatically upload queried tables and columns stats in order to improve recommendations."), default=False, type=coerce_bool ), ENABLE_PREDICT=Config( key="enable_predict", help=_t("Enables the predict API for editor typeahead."), default=False, type=coerce_bool ), QUERY_HISTORY_UPLOAD_LIMIT=Config( key="query_history_upload_limit", help=_t("Allow admins to upload the last N executed queries in the quick start wizard. Use 0 to disable."), default=10000, type=int ), ) )
'analytics': settings.get('sql') and settings['sql'].get('analytics'), 'nesting': settings.get('sql') and settings['sql'].get('nesting'), } for interpreter in get_ordered_interpreters(user) if interpreter['interface'] in ('hiveserver2', 'jdbc', 'rdbms')] return engines ENGINES = UnspecifiedConfigSection( "engines", help="One entry for each type of snippet.", each=ConfigSection(help=_t( "Name of the interface to use as query engine for the dashboard, e.g. solr, sql." ), members=dict( ANALYTICS=Config( "analytics", help=_t("Support analytics facets or not."), default=False, type=coerce_bool, ), NESTING=Config( "nesting", help=_t("Support nested documents or not."), default=False, type=coerce_bool, ), )))
OPTIMIZER = ConfigSection( key='optimizer', help=_t("""Configuration options for Optimizer API"""), members=dict( API_URL=Config( key='api_url', help= _t('Base URL to Optimizer API (e.g. - https://alpha.optimizer.cloudera.com/)' ), default=None), PRODUCT_NAME=Config( key="product_name", help=_t( "The name of the product or group which will have API access to the emails associated with it." ), private=True, default=None), PRODUCT_SECRET=Config( key="product_secret", help=_t("A secret passphrase associated with the productName."), private=True, dynamic_default=get_auth_password), PRODUCT_SECRET_SCRIPT=Config( key="product_secret_script", help=_t( "Execute this script to produce the product secret. This will be used when `product_secret` is not set." ), private=True, type=coerce_password_from_script, default=None), PRODUCT_AUTH_SECRET=Config( key="product_auth_secret", help=_t("A secret passphrase associated with the productName."), private=True, dynamic_default=get_auth_password), PRODUCT_AUTH_SECRET_SCRIPT=Config( key="product_auth_secret_script", help=_t( "Execute this script to produce the product secret. This will be used when `product_secret` is not set." ), private=True, type=coerce_password_from_script, default=None), EMAIL=Config( key="email", help=_t( "The email of the Optimizer account you want to associate with the Product." ), private=True, dynamic_default=get_auth_username), EMAIL_PASSWORD=Config( key="email_password", help=_t( "The password associated with the Optimizer account you to associate with the Product." ), private=True, dynamic_default=get_auth_password), EMAIL_PASSWORD_SCRIPT=Config( key="password_script", help=_t( "Execute this script to produce the email password. This will be used when `email_password` is not set." ), private=True, type=coerce_password_from_script, default=None), SSL_CERT_CA_VERIFY=Config( key="ssl_cert_ca_verify", help=_t( "In secure mode (HTTPS), if Optimizer SSL certificates have to be verified against certificate authority" ), dynamic_default=default_ssl_validate, type=coerce_bool), MOCKING=Config(key="mocking", help=_t("Use mock data"), default=False, type=coerce_bool)))
default="100MB") HUE_ENV = Config( key = "hue_env", help = "The environment of HUE: DEV/TEST/PROD", private = True, type = ucase, default = "DEV") DATABASES = UnspecifiedConfigSection( key = "databases", help = "The databases are supported for exporting", each = ConfigSection(members=dict( DEFAULT_SCHEMA = Config(key="default_schema", help = "The default schema of the databse for exporting", private = True), DEFAULT_TABLESPACE = Config(key="default_tablespace", help = "The default tablespace of the databse for exporting", private = True))) ) EXPORT_DBS = UnspecifiedConfigSection( key = "export_dbs", help = "The databases for exporting in different environments", each = ConfigSection(members=dict( DBS = Config(key="dbs", help = "Comma separated database names", private = True)))) def to_uppercase(conf): if isinstance(conf, dict): return dict([ (k.upper(), to_uppercase(v)) for (k,v) in conf.items() ])
INTERPRETERS = UnspecifiedConfigSection( "interpreters", help="One entry for each type of snippet.", each=ConfigSection( help=_t( "Define the name and how to connect and execute the language."), members=dict( NAME=Config( "name", help=_t("The name of the snippet."), default="SQL", type=str, ), INTERFACE=Config( "interface", help= "The backend connection to use to communicate with the server.", default="hiveserver2", type=str, ), OPTIONS=Config( key='options', help=_t('Specific options for connecting to the server.'), type=coerce_json_dict, default='{}')))) INTERPRETERS_SHOWN_ON_WHEEL = Config( key="interpreters_shown_on_wheel", help=_t( "Comma separated list of interpreters that should be shown on the wheel. "
def get_default_adls_fs(): return ADLS_CLUSTERS['default'].FS_DEFAULTFS.get() ADLS_CLUSTERS = UnspecifiedConfigSection( "adls_clusters", help="One entry for each ADLS cluster", each=ConfigSection( help="Information about a single ADLS cluster", members=dict( FS_DEFAULTFS=Config( "fs_defaultfs", help="adl://<account_name>.azuredatalakestore.net", type=str, default=None), WEBHDFS_URL=Config( "webhdfs_url", help="https://<account_name>.azuredatalakestore.net/webhdfs/v1", type=str, default=None), ))) AZURE_ACCOUNTS = UnspecifiedConfigSection( "azure_accounts", help="One entry for each Azure account", each=ConfigSection(help="Information about a single azure account", members=dict(CLIENT_ID=Config("client_id", help="", default=None), CLIENT_SECRET=Config("client_secret",