def test_extract_env_and_secrets(self):
        # Test when secrets and envs are not empty
        secrets = [
            Secret('env', None, 's1'),
            Secret('volume', 'KEY2', 's2', 'key-2'),
            Secret('env', None, 's3')
        ]
        envs = {
            'ENV1': 'val1',
            'ENV2': 'val2'
        }
        configmaps = ['configmap_a', 'configmap_b']
        pod_runtime_envs = [PodRuntimeInfoEnv("ENV3", "status.podIP")]
        pod = Pod(
            image='v3.14',
            envs=envs,
            cmds=[],
            secrets=secrets,
            configmaps=configmaps,
            pod_runtime_info_envs=pod_runtime_envs)
        self.expected['spec']['containers'][0]['env'] = [
            {'name': 'ENV1', 'value': 'val1'},
            {'name': 'ENV2', 'value': 'val2'},
            {
                'name': 'ENV3',
                'valueFrom': {
                    'fieldRef': {
                        'fieldPath': 'status.podIP'
                    }
                }
            }
        ]
        self.expected['spec']['containers'][0]['envFrom'] = [{
            'secretRef': {
                'name': 's1'
            }
        }, {
            'secretRef': {
                'name': 's3'
            }
        }, {
            'configMapRef': {
                'name': 'configmap_a'
            }
        }, {
            'configMapRef': {
                'name': 'configmap_b'
            }
        }]

        KubernetesRequestFactory.extract_env_and_secrets(pod, self.input_req)
        self.input_req['spec']['containers'][0]['env'].sort(key=lambda x: x['name'])
        self.assertEqual(self.input_req, self.expected)
Beispiel #2
0
def handle_container_environment_variables(
        env_vars: List[V1EnvVar],
) -> Tuple[Dict[str, str], List[Secret], List[str], List[PodRuntimeInfoEnv]]:
    secrets = []
    plain_env_vars = {}
    config_maps = []
    runtime_env_vars = []
    for env_var in env_vars or []:
        value_from: V1EnvVarSource = env_var.value_from
        if value_from:
            if value_from.resource_field_ref:
                # not handled for now
                continue
            if value_from.field_ref:
                field_ref: V1ObjectFieldSelector = value_from.field_ref
                runtime_env_vars.append(
                    PodRuntimeInfoEnv(
                        field_path=field_ref.field_path, name=env_var.name
                    )
                )
                continue

            if value_from.config_map_key_ref:
                key_ref: V1ConfigMapKeySelector = value_from.config_map_key_ref
                config_maps.append(key_ref.name)
                continue

            if value_from.secret_key_ref:
                key_ref: V1SecretKeySelector = value_from.secret_key_ref
                secrets.append(
                    Secret(
                        deploy_type="env",
                        deploy_target=env_var.name,
                        secret=key_ref.name,
                        key=key_ref.key,
                    )
                )
                continue

        plain_env_vars[env_var.name] = env_var.value

    return plain_env_vars, secrets, config_maps, runtime_env_vars
Beispiel #3
0
 def test_extract_env_and_secret_order(self):
     envs = {
         'ENV': 'val1',
     }
     pod_runtime_envs = [PodRuntimeInfoEnv('RUNTIME_ENV', 'status.podIP')]
     pod = Pod(image='v3.14',
               envs=envs,
               cmds=[],
               pod_runtime_info_envs=pod_runtime_envs)
     self.expected['spec']['containers'][0]['env'] = [{
         'name': 'RUNTIME_ENV',
         'valueFrom': {
             'fieldRef': {
                 'fieldPath': 'status.podIP'
             }
         }
     }, {
         'name': 'ENV',
         'value': 'val1'
     }]
     KubernetesRequestFactory.extract_env_and_secrets(pod, self.input_req)
     self.assertEqual(self.input_req, self.expected)
DAG_NAME = "spark_batch_job_distributed_mode"
ENV = os.environ.get("ENV")

docker_image = "dcr.flix.tech/data/flux/k8s-spark-example:latest"

envs = {
    "SERVICE_NAME": DAG_NAME,
    "CONTAINER_IMAGE": docker_image,
    "SPARK_DRIVER_PORT": "35000",
    "PY_FILES": "/workspace/dist/libs.zip,/workspace/dist/dependencies.zip",
    "PYTHON_FILE": "/workspace/python/pi.py",
}

pod_runtime_info_envs = [
    PodRuntimeInfoEnv('MY_POD_NAMESPACE', 'metadata.namespace'),
    PodRuntimeInfoEnv('MY_POD_NAME', 'metadata.name'),
    PodRuntimeInfoEnv('MY_POD_IP', 'status.podIP')
]

args = {'owner': 'Airflow', 'start_date': airflow.utils.dates.days_ago(2)}
# base path returned zip dag path
base_path = os.path.split(__file__)[0]

plain_txt = read_packaged_file(f"{base_path}/plain_files/plain.txt")

with DAG(dag_id=DAG_NAME, default_args=args,
         schedule_interval='30 0 * * *') as dag:
    # Use the zip binary, which is only found in this special docker image
    read_local_file = BashOperator(task_id='read_local_file',
                                   bash_command=f"echo {plain_txt}")