def test_execution_date_serialize_deserialize(self): datetime_obj = datetime.now() serialized_datetime = \ AirflowKubernetesScheduler._datetime_to_label_safe_datestring( datetime_obj) new_datetime_obj = AirflowKubernetesScheduler._label_safe_datestring_to_datetime( serialized_datetime) self.assertEqual(datetime_obj, new_datetime_obj)
def test_execution_date_serialize_deserialize(self): datetime_obj = datetime.now() serialized_datetime = \ AirflowKubernetesScheduler._datetime_to_label_safe_datestring( datetime_obj) new_datetime_obj = AirflowKubernetesScheduler._label_safe_datestring_to_datetime( serialized_datetime) self.assertEquals(datetime_obj, new_datetime_obj)
def test_make_safe_label_value(self): for dag_id, task_id in self._cases(): safe_dag_id = AirflowKubernetesScheduler._make_safe_label_value( dag_id) self.assertTrue(self._is_safe_label_value(safe_dag_id)) safe_task_id = AirflowKubernetesScheduler._make_safe_label_value( task_id) self.assertTrue(self._is_safe_label_value(safe_task_id)) id = "my_dag_id" self.assertEqual( id, AirflowKubernetesScheduler._make_safe_label_value(id)) id = "my_dag_id_" + "a" * 64 self.assertEqual( "my_dag_id_" + "a" * 43 + "-0ce114c45", AirflowKubernetesScheduler._make_safe_label_value(id))
def test_make_safe_label_value(self): for dag_id, task_id in self._cases(): safe_dag_id = AirflowKubernetesScheduler._make_safe_label_value(dag_id) self.assertTrue(self._is_safe_label_value(safe_dag_id)) safe_task_id = AirflowKubernetesScheduler._make_safe_label_value(task_id) self.assertTrue(self._is_safe_label_value(safe_task_id)) id = "my_dag_id" self.assertEqual( id, AirflowKubernetesScheduler._make_safe_label_value(id) ) id = "my_dag_id_" + "a" * 64 self.assertEqual( "my_dag_id_" + "a" * 43 + "-0ce114c45", AirflowKubernetesScheduler._make_safe_label_value(id) )
def _set_parameters(self, ti, try_number=0): self.parameters['dag_id'] = ti.dag_id self.parameters['task_id'] = ti.task_id self.parameters['try_number'] = str( try_number if try_number else ti.try_number) self.parameters[ 'execution_date'] = AirflowKubernetesScheduler._datetime_to_label_safe_datestring( ti.execution_date)
def create_sync_pod(self): return Pod( namespace=configuration.conf.get("kubernetes", "namespace"), name=AirflowKubernetesScheduler._create_pod_id("airflow-sync", "sync-worker"), image=self.kube_config.kube_image, image_pull_policy=self.kube_config.kube_image_pull_policy, cmds=[ "sh", "-c", "tar -cz --exclude=__pycache__ -C $AIRFLOW_HOME/dags . | base64" ], labels={ 'cluster': "airflow", }, service_account_name=self.kube_config.worker_service_account_name, image_pull_secrets=self.kube_config.image_pull_secrets, envs={} )
def test_create_pod_id(self): for dag_id, task_id in self._cases(): pod_name = AirflowKubernetesScheduler._create_pod_id( dag_id, task_id) self.assertTrue(self._is_valid_pod_id(pod_name))
def test_create_pod_id(self): for dag_id, task_id in self._cases(): pod_name = AirflowKubernetesScheduler._create_pod_id(dag_id, task_id) self.assertTrue(self._is_valid_name(pod_name))