def test_airflow_home_default(self): with env_vars(AIRFLOW_HOME=None): self.assertEqual( configuration.get_airflow_home(), configuration.expand_env_var('~/airflow'))
def test_airflow_home_override(self): with env_vars(AIRFLOW_HOME='/path/to/airflow'): self.assertEqual( configuration.get_airflow_home(), '/path/to/airflow')
def create_airflow_rest_connection(): from airflow.contrib.auth.backends.password_auth import PasswordUser import base64 import os session = settings.Session() exists = session.query(models.User).filter(models.User.username == 'application').scalar() if exists is None: LoggingMixin().log.info("creating 'application' user for mini-BRS...") # create 'application' user random_key = str(base64.urlsafe_b64encode(os.urandom(32))) user = PasswordUser(models.User()) user.username = '******' user.email = '*****@*****.**' user.password = random_key session.add(user) session.commit() session.close() # create 'application' airflow connection rest = Connection( conn_id='rest', login='******', password=random_key ) session = settings.Session() session.add(rest) session.commit() session.close() # create 'admin' user # admin_password = str(base64.urlsafe_b64encode(os.urandom(32))) config_parser = configuration.AirflowConfigParser() config_parser.read( configuration.get_airflow_config( configuration.get_airflow_home() ) ) u = config_parser.get( section='core', key='username' ) p = config_parser.get( section='core', key='password' ) user = PasswordUser(models.User()) user.username = u user.email = '*****@*****.**' user.password = p user.superuser = True session = settings.Session() session.add(user) session.commit() session.close() config_parser.remove_option( section='core', option='username' ) config_parser.remove_option( section='core', option='password' ) file = open(configuration.get_airflow_config(configuration.get_airflow_home()), 'w') config_parser.write(file) file.close()
def test_airflow_home_default(self): with env_vars(AIRFLOW_HOME=None): self.assertEqual(configuration.get_airflow_home(), configuration.expand_env_var('~/airflow'))
def create_dags(): global dag_creation_dates global new_dags global email_notify_required new_dags = [] dag_creation_dates = json.loads(Variable.get(key='dag_creation_dates')) email_notify_required = is_email_notification_required() try: for table in config.get('tables'): with open(configuration.get_airflow_home() + '/dags/templates/main.py.jinja2') as file_: template = Template(file_.read()) if dag_creation_dates.get(table) is not None: start_date = dag_creation_dates.get(table) else: start_date = get_start_date(config.get('start_date')) dag_creation_dates[table] = str(start_date) output = template.render( data={ 'dag_id': table, 'frequency': config.get('frequency'), 'storage_type': storage_type, 'start_date': start_date, 'email_required': email_notify_required } ) with open(configuration.get_airflow_home() + '/dags/generated/dag_' + '{}'.format(table).replace(' ', '_') + '.py', 'w') as f: f.write(output) new_dags.append('dag_' + '{}'.format(table).replace(' ', '_') + '.py') if len(r_config) != 0: for table in r_config: for exec_date in r_config.get(table): execution_date = str(exec_date).replace(' ', 'T')[0:19] with open(configuration.get_airflow_home() + '/dags/templates/recovery_template.py.jinja2') as file_: template = Template(file_.read()) output = template.render( data={'dag_id': table, 'frequency': config.get('frequency'), 'storage_type': storage_type, 'execution_date': execution_date}) with open(configuration.get_airflow_home() + '/dags/generated/r_dag_' + '{}_{}'.format( table, execution_date).replace(' ', '_') + '.py', 'w') as f: f.write(output) e = '{}'.format(execution_date).replace(' ', 'T') new_dags.append('r_dag_' + '{}_{}'.format(table, e).replace(' ', '_') + '.py') md_dag_ids = settings.Session.query(Dags.dag_id, Dags.fileloc).all() for record in md_dag_ids: (d_id, loc) = record filename = loc[str(loc).rfind('/') + 1:] if filename == 'dag_generator.py' or filename == 'dag_cleanup.py': continue if filename not in new_dags: try: if os.path.exists(str(loc)): os.remove(str(loc)) else: LoggingMixin().log.warning("{} file doesn't exists !".format(filename)) requests.delete( url="http://{}:8080/api/experimental/dags/{}".format( socket.gethostbyname(socket.gethostname()), str(d_id) ), auth=(rest.login, rest.password) ) dag_creation_dates.pop(d_id) except Exception as e: LoggingMixin().log.error(str(e)) Variable.set(key='dag_creation_dates', value=json.dumps(dag_creation_dates)) except AirflowException: raise ConfigVariableNotFoundException()
def test_airflow_home_override(self): with unittest.mock.patch.dict('os.environ', AIRFLOW_HOME='/path/to/airflow'): self.assertEqual(get_airflow_home(), '/path/to/airflow')
def test_airflow_home_default(self): with unittest.mock.patch.dict('os.environ'): if 'AIRFLOW_HOME' in os.environ: del os.environ['AIRFLOW_HOME'] self.assertEqual(get_airflow_home(), expand_env_var('~/airflow'))
def test_airflow_home_override(self): with env_vars(AIRFLOW_HOME='/path/to/airflow'): self.assertEqual(configuration.get_airflow_home(), '/path/to/airflow')
def dagbag(self): dagbag = DagBag(dag_folder=configuration.get_airflow_home() + '/dags/generated') return dagbag