Exemplo n.º 1
0
def _default_context_fields() -> Dict[Text, Any]:
    """Return a dictionary that contains the default context values.

    Return:
        A new context containing information about the runtime environment.
    """
    global TELEMETRY_CONTEXT

    if not TELEMETRY_CONTEXT:
        # Make sure to update the example in docs/docs/telemetry/telemetry.mdx
        # if you change / add context
        TELEMETRY_CONTEXT = {
            "os": {
                "name": platform.system(),
                "version": platform.release()
            },
            "ci": in_continuous_integration(),
            "project": model.project_fingerprint(),
            "directory": _hash_directory_path(os.getcwd()),
            "python": sys.version.split(" ")[0],
            "rasa_open_source": rasa.__version__,
            "cpu": multiprocessing.cpu_count(),
            "docker": _is_docker(),
        }

    # avoid returning the cached dict --> caller could modify the dictionary...
    # usually we would use `lru_cache`, but that doesn't return a dict copy and
    # doesn't work on inner functions, so we need to roll our own caching...
    return TELEMETRY_CONTEXT.copy()
Exemplo n.º 2
0
def _default_context_fields() -> Dict[Text, Any]:
    """Return a dictionary that contains the default context values.

    Return:
        A new context containing information about the runtime environment.
    """
    import tensorflow as tf

    return {
        "os": {"name": platform.system(), "version": platform.release()},
        "ci": in_continuous_integration(),
        "project": model.project_fingerprint(),
        "directory": _hash_directory_path(os.getcwd()),
        "python": sys.version.split(" ")[0],
        "rasa_open_source": rasa.__version__,
        "gpu": len(tf.config.list_physical_devices("GPU")),
        "cpu": multiprocessing.cpu_count(),
        "docker": _is_docker(),
    }