def _format_connections(conns: List[Connection], fmt: str) -> str:
    if fmt == '.env':
        connections_env = ""
        for conn in conns:
            connections_env += f"{conn.conn_id}={conn.get_uri()}\n"
        return connections_env

    connections_dict = {}
    for conn in conns:
        connections_dict[conn.conn_id] = {
            'conn_type': conn.conn_type,
            'description': conn.description,
            'host': conn.host,
            'login': conn.login,
            'password': conn.password,
            'schema': conn.schema,
            'port': conn.port,
            'extra': conn.extra,
        }

    if fmt == '.yaml':
        return yaml.dump(connections_dict)

    if fmt == '.json':
        return json.dumps(connections_dict, indent=2)

    return json.dumps(connections_dict)
示例#2
0
def _format_connections(conns: List[Connection], file_format: str,
                        serialization_format: str) -> str:
    if serialization_format == 'json':
        serializer_func = lambda x: json.dumps(_connection_to_dict(x))
    elif serialization_format == 'uri':
        serializer_func = Connection.get_uri
    else:
        raise SystemExit(
            f"Received unexpected value for `--serialization-format`: {serialization_format!r}"
        )
    if file_format == '.env':
        connections_env = ""
        for conn in conns:
            connections_env += f"{conn.conn_id}={serializer_func(conn)}\n"
        return connections_env

    connections_dict = {}
    for conn in conns:
        connections_dict[conn.conn_id] = _connection_to_dict(conn)

    if file_format == '.yaml':
        return yaml.dump(connections_dict)

    if file_format == '.json':
        return json.dumps(connections_dict, indent=2)

    return json.dumps(connections_dict)
示例#3
0
def generate_pod_yaml(args):
    """Generates yaml files for each task in the DAG. Used for testing output of KubernetesExecutor"""
    execution_date = args.execution_date
    dag = get_dag(subdir=args.subdir, dag_id=args.dag_id)
    yaml_output_path = args.output_path
    dr = DagRun(dag.dag_id, execution_date=execution_date)
    kube_config = KubeConfig()
    for task in dag.tasks:
        ti = TaskInstance(task, None)
        ti.dag_run = dr
        pod = PodGenerator.construct_pod(
            dag_id=args.dag_id,
            task_id=ti.task_id,
            pod_id=create_pod_id(args.dag_id, ti.task_id),
            try_number=ti.try_number,
            kube_image=kube_config.kube_image,
            date=ti.execution_date,
            args=ti.command_as_list(),
            pod_override_object=PodGenerator.from_obj(ti.executor_config),
            scheduler_job_id="worker-config",
            namespace=kube_config.executor_namespace,
            base_worker_pod=PodGenerator.deserialize_model_file(kube_config.pod_template_file),
        )
        pod_mutation_hook(pod)
        api_client = ApiClient()
        date_string = pod_generator.datetime_to_label_safe_datestring(execution_date)
        yaml_file_name = f"{args.dag_id}_{ti.task_id}_{date_string}.yml"
        os.makedirs(os.path.dirname(yaml_output_path + "/airflow_yaml_output/"), exist_ok=True)
        with open(yaml_output_path + "/airflow_yaml_output/" + yaml_file_name, "w") as output:
            sanitized_pod = api_client.sanitize_for_serialization(pod)
            output.write(yaml.dump(sanitized_pod))
    print(f"YAML output can be found at {yaml_output_path}/airflow_yaml_output/")
示例#4
0
 def dry_run(self) -> None:
     """
     Prints out the pod definition that would be created by this operator.
     Does not include labels specific to the task instance (since there isn't
     one in a dry_run) and excludes all empty elements.
     """
     pod = self.build_pod_request_obj()
     print(yaml.dump(_prune_dict(pod.to_dict(), mode='strict')))
示例#5
0
 def print_as_yaml(self, data: Dict):
     """Renders dict as yaml text representation"""
     yaml_content = yaml.dump(data)
     self.print(Syntax(yaml_content, "yaml", theme="ansi_dark"),
                soft_wrap=True)
示例#6
0
文件: eks.py 项目: leahecole/airflow
    def generate_config_file(
        self,
        eks_cluster_name: str,
        pod_namespace: Optional[str],
        pod_username: Optional[str] = None,
        pod_context: Optional[str] = None,
    ) -> Generator[str, None, None]:
        """
        Writes the kubeconfig file given an EKS Cluster.

        :param eks_cluster_name: The name of the cluster to generate kubeconfig file for.
        :param pod_namespace: The namespace to run within kubernetes.
        """
        if pod_username:
            warnings.warn(
                "This pod_username parameter is deprecated, because changing the value does not make any "
                "visible changes to the user.",
                DeprecationWarning,
                stacklevel=2,
            )
        if pod_context:
            warnings.warn(
                "This pod_context parameter is deprecated, because changing the value does not make any "
                "visible changes to the user.",
                DeprecationWarning,
                stacklevel=2,
            )
        # Set up the client
        eks_client = self.conn

        # Get cluster details
        cluster = eks_client.describe_cluster(name=eks_cluster_name)
        cluster_cert = cluster["cluster"]["certificateAuthority"]["data"]
        cluster_ep = cluster["cluster"]["endpoint"]

        cluster_config = {
            "apiVersion":
            "v1",
            "kind":
            "Config",
            "clusters": [{
                "cluster": {
                    "server": cluster_ep,
                    "certificate-authority-data": cluster_cert
                },
                "name": eks_cluster_name,
            }],
            "contexts": [{
                "context": {
                    "cluster": eks_cluster_name,
                    "namespace": pod_namespace,
                    "user": _POD_USERNAME,
                },
                "name": _CONTEXT_NAME,
            }],
            "current-context":
            _CONTEXT_NAME,
            "preferences": {},
            "users": [{
                "name": _POD_USERNAME,
                "user": {
                    "exec": {
                        "apiVersion":
                        AUTHENTICATION_API_VERSION,
                        "command":
                        sys.executable,
                        "args": [
                            "-m",
                            "airflow.providers.amazon.aws.utils.eks_get_token",
                            *(["--region-name", self.region_name]
                              if self.region_name is not None else []),
                            *(["--aws-conn-id", self.aws_conn_id]
                              if self.aws_conn_id is not None else []),
                            "--cluster-name",
                            eks_cluster_name,
                        ],
                        "env": [{
                            "name": "AIRFLOW__LOGGING__LOGGING_LEVEL",
                            "value": "FATAL",
                        }],
                        "interactiveMode":
                        "Never",
                    }
                },
            }],
        }
        config_text = yaml.dump(cluster_config, default_flow_style=False)

        with tempfile.NamedTemporaryFile(mode='w') as config_file:
            config_file.write(config_text)
            config_file.flush()
            yield config_file.name