def build_task_run_info(self): task_run_env_uid = get_uuid() import dbnd logging.debug("Created new task run env with uid '%s'", task_run_env_uid) machine = environ.get(ENV_DBND__ENV_MACHINE, "") if environ.get(ENV_DBND__ENV_IMAGE, None): machine += " image=%s" % environ.get(ENV_DBND__ENV_IMAGE) return TaskRunEnvInfo( uid=task_run_env_uid, databand_version=dbnd.__version__, user_code_version=self.source_version, user_code_committed=True, cmd_line=subprocess.list2cmdline(sys.argv), user=self.user or dbnd_getuser(), machine=machine, project_root=project_path(), user_data=safe_string(self.user_data, max_value_len=500), heartbeat=utcnow(), )
# targets use_connections = parameter( description= "use the airflow connection to connect to a cloud environment in databand targets (s3://.. , gcp://..)", default=True, )[bool] def __init__(self, *args, **kwargs): super(AirflowConfig, self).__init__(*args, **kwargs) if not self.optimize_airflow_db_access: self.disable_db_ping_on_connect = False self.disable_dag_concurrency_rules = False username = dbnd_getuser() default_args = { "owner": username, "depends_on_past": False, "start_date": datetime(2015, 6, 1), "email": ["*****@*****.**"], "email_on_failure": False, "email_on_retry": False, "retries": 0, "retry_delay": timedelta(minutes=5), # 'queue': 'bash_queue', # 'pool': 'backfill', # 'priority_weight': 10, # 'end_date': datetime(2016, 1, 1), }