def test_port(): port = Port('http', 80) k = KubernetesPodOperator(namespace='default', image="ubuntu:16.04", cmds=["bash", "-cx"], arguments=["echo 10"], labels={"foo": "bar"}, name="test", task_id="task", ports=[port]) k.execute(None)
""" Code that goes along with the Airflow located at: http://airflow.readthedocs.org/en/latest/tutorial.html """ from datetime import datetime, timedelta from airflow import DAG from airflow.contrib.kubernetes.pod import Port from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator from airflow.operators.bash_operator import BashOperator from airflow.operators.docker_operator import DockerOperator port = Port('http', 80) env_vars = { "KUBERNETES_SERVICE_HOST": "10.0.0.1", "KUBERNETES_SERVICE_PORT": "443", "KUBERNETES_SERVICE_PORT_HTTPS": "443" } default_args = { "owner": "airflow", "depends_on_past": False, "start_date": datetime(2020, 6, 20), "email": ["*****@*****.**"], "email_on_failure": False, "email_on_retry": False, "retries": 1, "retry_delay": timedelta(minutes=1), # 'queue': 'bash_queue', # 'pool': 'backfill',
def convert_ports(container: V1Container) -> List[Port]: ports: List[V1ContainerPort] = container.ports return [ Port(name=port.name, container_port=port.container_port) for port in ports or [] ]
from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator from airflow.contrib.kubernetes.pod import Resources from airflow.contrib.kubernetes.pod import Port from datetime import datetime, timedelta default_args = { 'owner': 'massi', 'depends_on_past': False, 'start_date': datetime(2019, 10, 26), 'email': ['*****@*****.**'], 'retries': 1, 'retry_delay': timedelta(minutes=1), } dag = DAG(dag_id='test_kubernetes', default_args = default_args) port = Port("Http", 8080) pod_task = KubernetesPodOperator(namespace='dev', image="python:3.6", cmds=["python","-c"], arguments=[f"from time import sleep; sleep(5); print('slept for 5 seconds')"], labels={"foo": "bar"}, name=f"sleeper-agent-start", task_id=f"sleeper-agent-start-task", get_logs=True, dag=dag, affinity=solver_affinity, resources=Resources(request_cpu='100m'), in_cluster=True )
from airflow import DAG from datetime import datetime, timedelta from pendulum import timezone from airflow.contrib.kubernetes.pod import Port from airflow.contrib.kubernetes.volume import Volume from airflow.contrib.kubernetes.volume_mount import VolumeMount from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator from airflow.operators.bash_operator import BashOperator from airflow.operators.dummy_operator import DummyOperator from airflow.utils.dates import days_ago TZ = timezone("America/Chicago") MAILTO = ['*****@*****.**'] PORT = Port('http', 80) default_args = { 'owner': 'QW', 'depends_on_past': False, 'start_date': datetime(2020, 4, 17, 1, 00, 0, tzinfo=TZ), 'email': MAILTO, 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'retry_delay': timedelta(minutes=1), 'task_concurrency': 1, 'max_active_runs': 1, 'pool': 'default_pool' # 'queue': 'bash_queue', # 'priority_weight': 10,