Exemplo n.º 1
0
 def run(self) -> Tuple[Optional[DeepLearningConfig], Optional[Run]]:
     """
     The main entry point for training and testing models from the commandline. This chooses a model to train
     via a commandline argument, runs training or testing, and writes all required info to disk and logs.
     :return: If submitting to AzureML, returns the model configuration that was used for training,
     including commandline overrides applied (if any).
     """
     # Usually, when we set logging to DEBUG, we want diagnostics about the model
     # build itself, but not the tons of debug information that AzureML submissions create.
     logging_to_stdout(logging.INFO if is_local_rank_zero() else "ERROR")
     initialize_rpdb()
     user_agent.append(azure_util.INNEREYE_SDK_NAME, azure_util.INNEREYE_SDK_VERSION)
     self.parse_and_load_model()
     if self.lightning_container.perform_cross_validation:
         if self.model_config is None:
             raise NotImplementedError("Cross validation for LightingContainer models is not yet supported.")
         # force hyperdrive usage if performing cross validation
         self.azure_config.hyperdrive = True
     run_object: Optional[Run] = None
     if self.azure_config.azureml:
         run_object = self.submit_to_azureml()
     else:
         self.run_in_situ()
     if self.model_config is None:
         return self.lightning_container, run_object
     return self.model_config, run_object
Exemplo n.º 2
0
    def __append_telemetry(self):
        if not self.telemetry_set:
            self.telemetry_set = True
            try:
                from azureml._base_sdk_common.user_agent import append

                append("AzureMLCluster-DASK", "0.1")
            except ImportError:
                pass
Exemplo n.º 3
0
import threading
import dask
import pathlib

from distributed.utils import (
    LoopRunner,
    log_errors,
    format_bytes,
)
from tornado.ioloop import PeriodicCallback

logger = logging.getLogger(__name__)

try:
    from azureml._base_sdk_common.user_agent import append
    append('AzureMLCluster-DASK', '0.1')
except:
    pass


class AzureMLCluster(Cluster):
    """ Deploy a Dask cluster using Azure ML
    This creates a dask scheduler and workers on an Azure ML Compute Target.
    Parameters
    ----------
    workspace: azureml.core.Workspace (required)
        Azure ML Workspace - see https://aka.ms/azureml/workspace
    compute_target: azureml.core.ComputeTarget (required)
        Azure ML Compute Target - see https://aka.ms/azureml/computetarget
    environment_definition: azureml.core.Environment (required)
        Azure ML Environment - see https://aka.ms/azureml/environments
Exemplo n.º 4
0
    "Model",
    "PrivateEndPoint",
    "PrivateEndPointConfig",
    "Run",
    "RunConfiguration",
    "ScriptRun",
    "ScriptRunConfig",
    "Webservice",
    "Workspace",
    "attach_legacy_compute_target",
    "get_run",
    "is_compute_target_prepared",
    "prepare_compute_target",
    "remove_legacy_compute_target",
    "LinkedService",
    "SynapseWorkspaceLinkedServiceConfiguration"
]

user_agent.append("azureml-sdk-core", __version__)
# Appending the Arcadia environment variable to user agent string to indicate request origin.
if _ArcadiaAuthentication._is_arcadia_environment():
    user_agent.append(_ArcadiaAuthentication._ARCADIA_ENVIRONMENT_VARIABLE_VALUE)

RUN_TYPE_PROVIDERS_ENTRYPOINT_KEY = "azureml_run_type_providers"
for entrypoint in pkg_resources.iter_entry_points(RUN_TYPE_PROVIDERS_ENTRYPOINT_KEY):
    try:
        Run.add_type_provider(entrypoint.name, entrypoint.load())
    except Exception as e:
        module_logger.warning("Failure while loading {}. Failed to load entrypoint {} with exception {}.".format(
            RUN_TYPE_PROVIDERS_ENTRYPOINT_KEY, entrypoint, e))