Ejemplo n.º 1
0
    def test_execution_date_serialize_deserialize(self):
        datetime_obj = datetime.now()
        serialized_datetime = \
            AirflowKubernetesScheduler._datetime_to_label_safe_datestring(
                datetime_obj)
        new_datetime_obj = AirflowKubernetesScheduler._label_safe_datestring_to_datetime(
            serialized_datetime)

        self.assertEqual(datetime_obj, new_datetime_obj)
 def test_make_safe_label_value(self):
     for dag_id, task_id in self._cases():
         safe_dag_id = AirflowKubernetesScheduler._make_safe_label_value(
             dag_id)
         self.assertTrue(self._is_safe_label_value(safe_dag_id))
         safe_task_id = AirflowKubernetesScheduler._make_safe_label_value(
             task_id)
         self.assertTrue(self._is_safe_label_value(safe_task_id))
         id = "my_dag_id"
         self.assertEqual(
             id, AirflowKubernetesScheduler._make_safe_label_value(id))
         id = "my_dag_id_" + "a" * 64
         self.assertEqual(
             "my_dag_id_" + "a" * 43 + "-0ce114c45",
             AirflowKubernetesScheduler._make_safe_label_value(id))
Ejemplo n.º 3
0
def generate_pod_yaml(args):
    """Generates yaml files for each task in the DAG. Used for testing output of KubernetesExecutor"""

    from kubernetes.client.api_client import ApiClient

    from airflow.executors.kubernetes_executor import AirflowKubernetesScheduler, KubeConfig
    from airflow.kubernetes import pod_generator
    from airflow.kubernetes.pod_generator import PodGenerator
    from airflow.kubernetes.worker_configuration import WorkerConfiguration
    from airflow.settings import pod_mutation_hook

    execution_date = args.execution_date
    dag = get_dag(subdir=args.subdir, dag_id=args.dag_id)
    yaml_output_path = args.output_path
    kube_config = KubeConfig()
    for task in dag.tasks:
        ti = TaskInstance(task, execution_date)
        pod = PodGenerator.construct_pod(
            dag_id=args.dag_id,
            task_id=ti.task_id,
            pod_id=AirflowKubernetesScheduler._create_pod_id(  # pylint: disable=W0212
                args.dag_id, ti.task_id),
            try_number=ti.try_number,
            kube_image=kube_config.kube_image,
            date=ti.execution_date,
            command=ti.command_as_list(),
            pod_override_object=PodGenerator.from_obj(ti.executor_config),
            worker_uuid="worker-config",
            namespace=kube_config.executor_namespace,
            base_worker_pod=WorkerConfiguration(
                kube_config=kube_config).as_pod())
        pod_mutation_hook(pod)
        api_client = ApiClient()
        date_string = pod_generator.datetime_to_label_safe_datestring(
            execution_date)
        yaml_file_name = f"{args.dag_id}_{ti.task_id}_{date_string}.yml"
        os.makedirs(os.path.dirname(yaml_output_path +
                                    "/airflow_yaml_output/"),
                    exist_ok=True)
        with open(yaml_output_path + "/airflow_yaml_output/" + yaml_file_name,
                  "w") as output:
            sanitized_pod = api_client.sanitize_for_serialization(pod)
            output.write(yaml.dump(sanitized_pod))
    print(
        f"YAML output can be found at {yaml_output_path}/airflow_yaml_output/")
Ejemplo n.º 4
0
 def test_create_pod_id(self):
     for dag_id, task_id in self._cases():
         pod_name = AirflowKubernetesScheduler._create_pod_id(
             dag_id, task_id)
         self.assertTrue(self._is_valid_pod_id(pod_name))
Ejemplo n.º 5
0
 def test_create_pod_id(self):
     for dag_id, task_id in self._cases():
         pod_name = PodGenerator.make_unique_pod_id(
             AirflowKubernetesScheduler._create_pod_id(dag_id, task_id))
         self.assertTrue(self._is_valid_pod_id(pod_name))