def execute(self, context): # check if table is created # if not, create them logger.info('Setting up operator') with redis_session() as r: # TODO: find other way to handle data race (round-robin?) while r.keys('*'): logger.info('Not finished previous run, wait for 300 seconds.') time.sleep(300) start = time.perf_counter() if self.mode == 'local': _setup_local() db_cfg = Config.DATABASE conn_id = os.getenv('AIRFLOW_POSTGRES_CONN_ID') elif self.mode == 'redshift': db_cfg = Config.AWS['REDSHIFT'] conn_id = os.getenv('AIRFLOW_REDSHIFT_CONN_ID') cfg = ConnectionConfig(conn_id=conn_id, host=db_cfg['HOST'], login=db_cfg['USERNAME'], password=db_cfg['PASSWORD'], schema=db_cfg['DB_NAME'], port=db_cfg['PORT']) update_connection(cfg) for key, val in self.set_variable_keys.items(): logger.info(f'Setting key="{key}" to Airflow Variable') variable = get_variable(key=key) if variable is None: variable = Variable(key=key) variable.set_val(value=val) with create_session() as sess: sess.add(variable) end = time.perf_counter() logger.info( f'Process Time [{self.__class__.__name__}]: {end-start:.3f} sec.')