def _get_rbac_service(self): """Get RBAC service host and port info from environment.""" return { PROTOCOL: ENVIRONMENT.get_value("RBAC_SERVICE_PROTOCOL", default="http"), HOST: ENVIRONMENT.get_value("RBAC_SERVICE_HOST", default="localhost"), PORT: ENVIRONMENT.get_value("RBAC_SERVICE_PORT", default="8111"), PATH: ENVIRONMENT.get_value("RBAC_SERVICE_PATH", default="/r/insights/platform/rbac/v1/access/"), }
def _get_rbac_service(self): # pylint: disable=no-self-use """Get RBAC service host and port info from environment.""" rbac_conn_info = { PROTOCOL: ENVIRONMENT.get_value('RBAC_SERVICE_PROTOCOL', default='http'), HOST: ENVIRONMENT.get_value('RBAC_SERVICE_HOST', default='localhost'), PORT: ENVIRONMENT.get_value('RBAC_SERVICE_PORT', default='8111'), PATH: ENVIRONMENT.get_value('RBAC_SERVICE_PATH', default='/r/insights/platform/rbac/v1/access/') } return rbac_conn_info
def create_service_admin(self, service_email): # pylint: disable=R0201 """Create the Service Admin.""" # noqa: E402 pylint: disable=C0413 from django.contrib.auth.models import User service_user = ENVIRONMENT.get_value('SERVICE_ADMIN_USER', default='admin') service_pass = ENVIRONMENT.get_value('SERVICE_ADMIN_PASSWORD', default='pass') User.objects.create_superuser(service_user, service_email, service_pass) logger.info('Created Service Admin: %s.', service_email)
def create_service_admin(apps, schema_editor): """Create the Service Admin.""" User = apps.get_model('api', 'User') service_email = ENVIRONMENT.get_value('SERVICE_ADMIN_EMAIL', default='*****@*****.**') service_user = ENVIRONMENT.get_value('SERVICE_ADMIN_USER', default='admin') service_pass = ENVIRONMENT.get_value('SERVICE_ADMIN_PASSWORD', default='pass') User.objects.get_or_create(username=service_user, email=service_email, password=make_password(service_pass), is_superuser=True, is_staff=True)
def test_check_service_admin_exists(self): """Test the check and proceed of the service admin.""" User.objects.all().delete() service_email = ENVIRONMENT.get_value('SERVICE_ADMIN_EMAIL', default='*****@*****.**') service_user = ENVIRONMENT.get_value('SERVICE_ADMIN_USER', default='admin') service_pass = ENVIRONMENT.get_value('SERVICE_ADMIN_PASSWORD', default='pass') User.objects.create_superuser(service_user, service_email, service_pass) self.assertTrue(User.objects.filter(email=service_email).count() == 1) api_config = apps.get_app_config('api') api_config.check_and_create_service_admin() self.assertTrue(User.objects.filter(email=service_email).count() != 0)
def __init__(self): """Establish RBAC connection information.""" rbac_conn_info = self._get_rbac_service() self.protocol = rbac_conn_info.get(PROTOCOL) self.host = rbac_conn_info.get(HOST) self.port = rbac_conn_info.get(PORT) self.path = rbac_conn_info.get(PATH) self.cache_ttl = int(ENVIRONMENT.get_value('RBAC_CACHE_TTL', default='30'))
def test_check_service_admin(self): """Test the check and create of service admin.""" User.objects.all().delete() service_email = ENVIRONMENT.get_value('SERVICE_ADMIN_EMAIL', default='*****@*****.**') self.assertTrue(User.objects.filter(email=service_email).count() == 0) api_config = apps.get_app_config('api') api_config.check_and_create_service_admin() self.assertTrue(User.objects.filter(email=service_email).count() != 0)
def test_create_service_admin(self): """Test the creation of the service admin.""" service_email = ENVIRONMENT.get_value('SERVICE_ADMIN_EMAIL', default='*****@*****.**') # An admin user is created using migratons. # Wipe it before testing creation. User.objects.filter(email=service_email).first().delete() api_config = apps.get_app_config('api') api_config.create_service_admin(service_email) self.assertTrue(User.objects.filter(email=service_email).count() != 0)
def check_and_create_service_admin(self): # pylint: disable=R0201 """Check for the service admin and create it if necessary.""" # noqa: E402 pylint: disable=C0413 from django.contrib.auth.models import User service_email = ENVIRONMENT.get_value('SERVICE_ADMIN_EMAIL', default='*****@*****.**') admin_not_present = User.objects.filter( email=service_email).count() == 0 if admin_not_present: self.create_service_admin(service_email) else: logger.info('Service Admin: %s.', service_email)
class Config: """Configuration for service.""" # SOURCES_TOPIC = ENVIRONMENT.get_value("SOURCES_KAFKA_TOPIC", default="platform.sources.event-stream") SOURCES_TOPIC = CONFIGURATOR.get_kafka_topic( "platform.sources.event-stream") SOURCES_KAFKA_HOST = CONFIGURATOR.get_kafka_broker_host() SOURCES_KAFKA_PORT = CONFIGURATOR.get_kafka_broker_port() SOURCES_KAFKA_ADDRESS = f"{SOURCES_KAFKA_HOST}:{SOURCES_KAFKA_PORT}" SOURCES_API_HOST = CONFIGURATOR.get_endpoint_host("sources-api", "svc", "localhost") SOURCES_API_PORT = CONFIGURATOR.get_endpoint_port("sources-api", "svc", "3000") SOURCES_API_URL = f"http://{SOURCES_API_HOST}:{SOURCES_API_PORT}" SOURCES_API_PREFIX = ENVIRONMENT.get_value("SOURCES_API_PREFIX", default="/api/v1.0") SOURCES_INTERNAL_API_PREFIX = ENVIRONMENT.get_value( "SOURCES_INTERNAL_API_PREFIX", default="/internal/v1.0") SOURCES_FAKE_HEADER = ENVIRONMENT.get_value( "SOURCES_FAKE_HEADER", default= ("eyJpZGVudGl0eSI6IHsiYWNjb3VudF9udW1iZXIiOiAiMTIzNDUiLCAidXNlciI6IHsiaXNfb3J" "nX2FkbWluIjogImZhbHNlIiwgInVzZXJuYW1lIjogInNvdXJjZXMiLCAiZW1haWwiOiAic291cm" "Nlc0Bzb3VyY2VzLmlvIn0sICJpbnRlcm5hbCI6IHsib3JnX2lkIjogIjU0MzIxIn19fQ==" ), ) SOURCES_FAKE_CLUSTER_HEADER = ENVIRONMENT.get_value( "SOURCES_FAKE_CLUSTER_HEADER", default= ("eyJpZGVudGl0eSI6IHsiYWNjb3VudF9udW1iZXIiOiAiMTIzNDUiLCAiYXV0aF90eXBlIjogInVoYy1" "hdXRoIiwgInR5cGUiOiAiU3lzdGVtIiwgInN5c3RlbSI6IHsiY2x1c3Rlcl9pZCI6ICIwYmIyOTEzNS1k" "NmQxLTQ3OGItYjViNi02YmQxMjljYjZkNWQifSwgImludGVybmFsIjogeyJvcmdfaWQiOiAiNTQzMjEifX19" ), ) RETRY_SECONDS = ENVIRONMENT.int("RETRY_SECONDS", default=10)
REPORT_CHECK_INTERVAL = datetime.timedelta( minutes=ENVIRONMENT.int("SCHEDULE_CHECK_INTERVAL", default=60)) CHECK_REPORT_UPDATES_DEF = { "task": "masu.celery.tasks.check_report_updates", "schedule": REPORT_CHECK_INTERVAL.seconds, "args": [], } app.conf.beat_schedule["check-report-updates"] = CHECK_REPORT_UPDATES_DEF # Specify the day of the month for removal of expired report data. REMOVE_EXPIRED_REPORT_DATA_ON_DAY = ENVIRONMENT.int( "REMOVE_EXPIRED_REPORT_DATA_ON_DAY", default=1) # Specify the time of the day for removal of expired report data. REMOVE_EXPIRED_REPORT_UTC_TIME = ENVIRONMENT.get_value( "REMOVE_EXPIRED_REPORT_UTC_TIME", default="00:00") if REMOVE_EXPIRED_REPORT_DATA_ON_DAY != 0: CLEANING_DAY = REMOVE_EXPIRED_REPORT_DATA_ON_DAY CLEANING_TIME = REMOVE_EXPIRED_REPORT_UTC_TIME HOUR, MINUTE = CLEANING_TIME.split(":") REMOVE_EXPIRED_DATA_DEF = { "task": "masu.celery.tasks.remove_expired_data", "schedule": crontab(hour=int(HOUR), minute=int(MINUTE), day_of_month=CLEANING_DAY), "args": [], } app.conf.beat_schedule["remove-expired-data"] = REMOVE_EXPIRED_DATA_DEF
# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. # """Utility for emailing users on creation and password reset.""" from django.core.mail import send_mail from django.template.loader import render_to_string from koku.env import ENVIRONMENT SUBJECT = 'Welcome to Hybrid Cloud Cost Management' SENDER = ENVIRONMENT.get_value('EMAIL_SENDER', default='*****@*****.**') APP_DOMAIN = ENVIRONMENT.get_value('APP_DOMAIN', default='project-koku.com') APP_NAMESPACE = ENVIRONMENT.get_value('APP_NAMESPACE', default='home') DEFAULT_LOGIN = f'http://koku-ui-{APP_NAMESPACE}.{APP_DOMAIN}' LOGIN_LINK = ENVIRONMENT.get_value('LOGIN_LINK', default=DEFAULT_LOGIN) def new_user_login_email(username, email, uuid, token): """Send an email with a login link for new users.""" msg_params = {'username': username, 'login_link': LOGIN_LINK} msg_plain = render_to_string('welcome.txt', msg_params) msg_html = render_to_string('welcome.html', msg_params) send_mail(SUBJECT, msg_plain, SENDER, [email], html_message=msg_html)
# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. # """Utility for emailing users on creation and password reset.""" from django.core.mail import send_mail from django.template.loader import render_to_string from koku.env import ENVIRONMENT SUBJECT = 'Welcome to Hybrid Cost Management' SENDER = ENVIRONMENT.get_value('EMAIL_SENDER', default='*****@*****.**') DEFAULT_RESET = 'https://koku-ui.project-koku.com/password-reset.html' RESET_LINK = ENVIRONMENT.get_value('PASSWORD_RESET_LINK', default=DEFAULT_RESET) def new_user_reset_email(username, email, uuid, token): """Send an email with a password reset link for new users.""" reset_link = RESET_LINK + '?uuid=' + uuid + '&token=' + token msg_params = {'username': username, 'reset_link': reset_link} msg_plain = render_to_string('welcome.txt', msg_params) msg_html = render_to_string('welcome.html', msg_params) send_mail(SUBJECT, msg_plain, SENDER, [email], html_message=msg_html)
# Toggle to enable/disable scheduled checks for new reports. if ENVIRONMENT.bool("SCHEDULE_REPORT_CHECKS", default=False): # The interval to scan for new reports. REPORT_CHECK_INTERVAL = datetime.timedelta( minutes=int(os.getenv("SCHEDULE_CHECK_INTERVAL", "60"))) CHECK_REPORT_UPDATES_DEF = { "task": "masu.celery.tasks.check_report_updates", "schedule": REPORT_CHECK_INTERVAL.seconds, "args": [], } app.conf.beat_schedule["check-report-updates"] = CHECK_REPORT_UPDATES_DEF # Specify the day of the month for removal of expired report data. REMOVE_EXPIRED_REPORT_DATA_ON_DAY = int( ENVIRONMENT.get_value("REMOVE_EXPIRED_REPORT_DATA_ON_DAY", default="1")) # Specify the time of the day for removal of expired report data. REMOVE_EXPIRED_REPORT_UTC_TIME = ENVIRONMENT.get_value( "REMOVE_EXPIRED_REPORT_UTC_TIME", default="00:00") if REMOVE_EXPIRED_REPORT_DATA_ON_DAY != 0: CLEANING_DAY = REMOVE_EXPIRED_REPORT_DATA_ON_DAY CLEANING_TIME = REMOVE_EXPIRED_REPORT_UTC_TIME HOUR, MINUTE = CLEANING_TIME.split(":") REMOVE_EXPIRED_DATA_DEF = { "task": "masu.celery.tasks.remove_expired_data", "schedule": crontab(hour=int(HOUR), minute=int(MINUTE), day_of_month=CLEANING_DAY),
class Config: """Configuration for app.""" DEBUG = ENVIRONMENT.bool("DEVELOPMENT", default=False) # Set method for retreiving CUR accounts. 'db' or 'network' ACCOUNT_ACCESS_TYPE = ENVIRONMENT.get_value( "ACCOUNT_ACCESS_TYPE", default=DEFAULT_ACCOUNT_ACCCESS_TYPE) # Data directory for processing incoming data. This is the OCP PVC mount point. PVC_DIR = ENVIRONMENT.get_value("PVC_DIR", default=DEFAULT_PVC_DIR) # File retention time for cleaning out the volume (in seconds) # defaults to 1 day VOLUME_FILE_RETENTION = ENVIRONMENT.int( "VOLUME_FILE_RETENTION", default=DEFAULT_VOLUME_FILE_RETENTION) # OCP intermediate report storage INSIGHTS_LOCAL_REPORT_DIR = f"{PVC_DIR}/insights_local" # Processing intermediate report storage TMP_DIR = f"{PVC_DIR}/processing" # S3 path root for warehoused data WAREHOUSE_PATH = "data" CSV_DATA_TYPE = "csv" PARQUET_DATA_TYPE = "parquet" REPORT_PROCESSING_BATCH_SIZE = ENVIRONMENT.int( "REPORT_PROCESSING_BATCH_SIZE", default=DEFAULT_REPORT_PROCESSING_BATCH_SIZE) AWS_DATETIME_STR_FORMAT = "%Y-%m-%dT%H:%M:%SZ" OCP_DATETIME_STR_FORMAT = "%Y-%m-%d %H:%M:%S +0000 UTC" AZURE_DATETIME_STR_FORMAT = "%Y-%m-%d" # Override the service's current date time time. Format: "%Y-%m-%d %H:%M:%S" MASU_DATE_OVERRIDE = ENVIRONMENT.get_value( "DATE_OVERRIDE", default=DEFAULT_MASU_DATE_OVERRIDE) # Retention policy for the number of months of report data to keep. MASU_RETAIN_NUM_MONTHS = settings.RETAIN_NUM_MONTHS MASU_RETAIN_NUM_MONTHS_LINE_ITEM_ONLY = ENVIRONMENT.int( "RETAIN_NUM_MONTHS", default=DEFAULT_MASU_RETAIN_NUM_MONTHS_LINE_ITEM_ONLY) # TODO: Remove this if/when reporting model files are owned by masu # The decimal precision of our database Numeric columns REPORTING_DECIMAL_PRECISION = 9 # Specify the number of months (bills) to ingest INITIAL_INGEST_NUM_MONTHS = ENVIRONMENT.int( "INITIAL_INGEST_NUM_MONTHS", default=DEFAULT_INITIAL_INGEST_NUM_MONTHS) # Override the initial ingest requirement to allow INITIAL_INGEST_NUM_MONTHS INGEST_OVERRIDE = ENVIRONMENT.bool("INITIAL_INGEST_OVERRIDE", default=DEFAULT_INGEST_OVERRIDE) # Trino enablement TRINO_ENABLED = ENVIRONMENT.bool("ENABLE_PARQUET_PROCESSING", default=DEFAULT_ENABLE_PARQUET_PROCESSING) # Insights Kafka INSIGHTS_KAFKA_HOST = CONFIGURATOR.get_kafka_broker_host() INSIGHTS_KAFKA_PORT = CONFIGURATOR.get_kafka_broker_port() INSIGHTS_KAFKA_ADDRESS = f"{INSIGHTS_KAFKA_HOST}:{INSIGHTS_KAFKA_PORT}" HCCM_TOPIC = CONFIGURATOR.get_kafka_topic("platform.upload.hccm") VALIDATION_TOPIC = CONFIGURATOR.get_kafka_topic( "platform.upload.validation") # Flag to signal whether or not to connect to upload service KAFKA_CONNECT = ENVIRONMENT.bool("KAFKA_CONNECT", default=DEFAULT_KAFKA_CONNECT) RETRY_SECONDS = ENVIRONMENT.int("RETRY_SECONDS", default=DEFAULT_RETRY_SECONDS) DEL_RECORD_LIMIT = ENVIRONMENT.int("DELETE_CYCLE_RECORD_LIMIT", default=DEFAULT_DEL_RECORD_LIMIT) MAX_ITERATIONS = ENVIRONMENT.int("DELETE_CYCLE_MAX_RETRY", default=DEFAULT_MAX_ITERATIONS)