def run(): # torch.multiprocessing.freeze_support() optimizer = Optimizer(optimizer_config) for parameters in optimizer.get_parameters(): hyperparameters = Namespace(**parameters["parameters"]) model = PyTorchLightningModel(hparams=hyperparameters) comet_logger = CometLogger( api_key=get_config("comet.api_key"), rest_api_key=get_config("comet.api_key"), optimizer_data=parameters, ) trainer = Trainer( max_epochs=1, # early_stop_callback=True, # requires val_loss be logged logger=[comet_logger], # num_processes=2, # distributed_backend='ddp_cpu' ) trainer.fit(model)
def __init__( self, upload_experiment, api_key, output_dir, force_reupload, mlflow_store_uri, answer, email, ): # type: (bool, str, str, bool, str, Optional[bool], str) -> None self.answer = answer self.email = email self.config = get_config() # Display the start banner LOGGER.info(BANNER) # get_api_key self.api_key, self.token = self.get_api_key_or_login(api_key) # May not need this always? self.api_client = API(self.api_key, cache=False) self.workspace = self.config["comet.workspace"] # Get a workspace if not self.workspace: details = self.api_client.get_account_details() self.workspace = details["defaultWorkspaceName"] if output_dir is None: output_dir = tempfile.mkdtemp() # MLFlow conversion self.store = _get_store(mlflow_store_uri) try: self.model_registry_store = get_model_registry_store( mlflow_store_uri) except UnsupportedModelRegistryStoreURIException: self.model_registry_store = None # Most of list_experiments returns a list anyway self.mlflow_experiments = list(self.store.list_experiments()) self.len_experiments = len( self.mlflow_experiments) # We start counting at 0 self.summary = { "experiments": 0, "runs": 0, "tags": 0, "params": 0, "metrics": 0, "artifacts": 0, } self.upload_experiment = upload_experiment self.output_dir = output_dir self.force_reupload = force_reupload self.mlflow_store_uri = mlflow_store_uri
def __init__( self, api_key: Optional[str] = None, save_dir: Optional[str] = None, workspace: Optional[str] = None, project_name: Optional[str] = None, rest_api_key: Optional[str] = None, experiment_name: Optional[str] = None, experiment_key: Optional[str] = None, offline: bool = False, **kwargs, ): if not _COMET_AVAILABLE: raise ImportError( "You want to use `comet_ml` logger which is not installed yet," " install it with `pip install comet-ml`.") super().__init__() self._experiment = None # Determine online or offline mode based on which arguments were passed to CometLogger api_key = api_key or get_api_key(None, get_config()) if api_key is not None and save_dir is not None: self.mode = "offline" if offline else "online" self.api_key = api_key self._save_dir = save_dir elif api_key is not None: self.mode = "online" self.api_key = api_key self._save_dir = None elif save_dir is not None: self.mode = "offline" self._save_dir = save_dir else: # If neither api_key nor save_dir are passed as arguments, raise an exception raise MisconfigurationException( "CometLogger requires either api_key or save_dir during initialization." ) log.info(f"CometLogger will be initialized in {self.mode} mode") self.workspace = workspace self._project_name = project_name self._experiment_key = experiment_key self._experiment_name = experiment_name self._kwargs = kwargs self._future_experiment_key = None if rest_api_key is not None: # Comet.ml rest API, used to determine version number self.rest_api_key = rest_api_key self.comet_api = API(self.rest_api_key) else: self.rest_api_key = None self.comet_api = None self._kwargs = kwargs
def init(parsed_args, remaining): try: from cookiecutter.main import cookiecutter from cookiecutter.exceptions import OutputDirExistsException from click.exceptions import Abort except ImportError: LOGGER.error( "Please install cookiecutter with `pip install cookiecutter>1.7.0`" ) sys.exit(1) valid_languages = ["python", "r"] if parsed_args.language.lower() in valid_languages: directory = parsed_args.language.lower() else: LOGGER.error("comet init currently only support these languages: %s", valid_languages) sys.exit(1) print("Building Comet example script from recipe...") print("=" * 50) print( "After initializing, please cd into your new project and run the script." ) print() if not parsed_args.replay: print("Please supply values for the following items:") print() template = os.environ.get("COMET_INIT_RECIPE_PATH", "https://github.com/comet-ml/comet-recipes.git") try: cookiecutter( template, replay=parsed_args.replay, overwrite_if_exists=parsed_args.force, output_dir=parsed_args.output, directory=directory, extra_context={"comet_api_key": get_config("comet.api_key")}, ) except OutputDirExistsException: print() LOGGER.error("directory already exists; use `comet init -f`") sys.exit(1) except Abort: print() LOGGER.error("comet init was aborted") sys.exit(1)
def optimize(parsed, subcommand_args): if parsed.OPTIMIZER is None: parsed.OPTIMIZER = parsed.PYTHON_SCRIPT parsed.PYTHON_SCRIPT = None # Pass it on, in case it needs/wants it: subcommand_args += [parsed.OPTIMIZER] if parsed.trials is not None: subcommand_args += ["--trials", str(parsed.trials)] # Is the COMET_API_KEY available? config = get_config() api_key = config["comet.api_key"] if api_key is None: raise Exception( """Please set your API key: see https://www.comet.ml/docs/python-sdk/advanced/#python-configuration""" ) if not (os.path.isfile(parsed.OPTIMIZER) or len(parsed.OPTIMIZER) == 32): raise Exception("Optimizer should be either file or id: '%s'" % parsed.OPTIMIZER) # Create a new Optimizer, or use existing one: if parsed.PYTHON_SCRIPT is None: # Don't echo URL if PYTHON_SCRIPT isn't listed: opt = Optimizer(parsed.OPTIMIZER, trials=parsed.trials, verbose=0) else: if not os.path.isfile(parsed.PYTHON_SCRIPT): raise Exception("Python script file '%s' not found" % parsed.PYTHON_SCRIPT) opt = Optimizer(parsed.OPTIMIZER, trials=parsed.trials) if parsed.dump is not None: with open(parsed.dump, "w") as fp: fp.write(str(opt.status())) if parsed.PYTHON_SCRIPT is None: # Just print the optimizer_id print(opt.id) # And exit sys.exit(0) environ = os.environ.copy() environ["COMET_OPTIMIZER_ID"] = opt.id COMET_EXECUTABLE = parsed.executable or sys.executable bootstrap_dir = os.path.dirname(comet_ml.bootstrap.__file__) # Prepend the bootstrap dir to a potentially existing PYTHON PATH, prepend # so we are sure that we are the first one to be executed and we cannot be # sure that other sitecustomize.py files would call us if "PYTHONPATH" in environ: if bootstrap_dir not in environ["PYTHONPATH"]: environ["PYTHONPATH"] = "%s:%s" % (bootstrap_dir, environ["PYTHONPATH"]) else: environ["PYTHONPATH"] = bootstrap_dir command_line = [COMET_EXECUTABLE, parsed.PYTHON_SCRIPT] + subcommand_args subprocesses = [] # type: List[subprocess.Popen] for j in range(parsed.parallel): environ["COMET_OPTIMIZER_PROCESS_ID"] = str(j) sub = subprocess.Popen(command_line, env=environ) subprocesses.append(sub) exit_code = 0 try: for sub in subprocesses: sub.wait() if sub.returncode != 0: exit_code = 1 except KeyboardInterrupt: # Ask nicely for subprocesses to exit for sub in subprocesses: # TODO: Check behavior on Windows sub.send_signal(signal.SIGINT) # Check that all subprocesses exit cleanly i = 0 while i < 60: all_dead = True for sub in subprocesses: sub.poll() alive = sub.returncode is None all_dead = all_dead and not alive if all_dead: break i += 1 time.sleep(1) # Timeout, hard-kill all the remaining subprocess if i >= 60: for sub in subprocesses: sub.poll() if sub.returncode is None: sub.kill() print() results = opt.status() for key in ["algorithm", "status"]: print(" ", "%s:" % key, results[key]) if isinstance(results["endTime"], float) and isinstance( results["startTime"], float): print( " ", "time:", (results["endTime"] - results["startTime"]) / 1000, "seconds", ) sys.exit(exit_code)
assert step in {"last", "all"} api = comet_ml.api.API() # --------------------------------------- # ----- Select exps based on tags ----- # --------------------------------------- if not args.tags: assert args.exp_id exps = [api.get_experiment_by_id(args.exp_id)] else: all_tags, keep_tags, remove_tags = parse_tags(args.tags) download_dir = download_dir / "&".join(sorted(all_tags)) print("Selecting experiments with tags", all_tags) conf = dict(config.get_config()) exps = api.get_experiments( workspace=conf.get("comet.workspace"), project_name=conf.get("comet.project_name") or "climategan", ) exps = list( filter(lambda e: has_right_tags(e, keep_tags, remove_tags), exps)) if args.running: exps = [e for e in exps if e.alive] # ------------------------- # ----- Print setup ----- # ------------------------- print("Processing {} experiments in {} with post processes {}".format( len(exps), str(download_dir), post_processes))
def check(args, rest=None): # Called via `comet upload EXP.zip` if args.debug: activate_debug() config = get_config() LOGGER.info("Comet Check") LOGGER.info("=" * 80) print("") LOGGER.info("Checking connectivity to server...") print("") # Clientlib server_address = sanitize_url(config["comet.url_override"]) server_address_config_origin = config_source( config.get_config_origin("comet.url_override")) LOGGER.info("Configured server address %r", server_address) if server_address_config_origin: LOGGER.info("Server address was configured in %s", server_address_config_origin) else: LOGGER.info("Server address is the default one") print("") server_connected = check_server_connection(server_address) print("") if server_connected: LOGGER.info("Server connection is ok") else: LOGGER.warning("Server connection is not ok") # Rest API LOGGER.info("=" * 80) LOGGER.info("Checking connectivity to Rest API...") LOGGER.info("=" * 80) root_url = sanitize_url(get_root_url(config["comet.url_override"])) rest_api_url = url_join(root_url, *["api/rest/", "v2" + "/"]) LOGGER.info("Configured Rest API address %r", rest_api_url) if server_address_config_origin: LOGGER.info("Rest API address was configured in %s", server_address_config_origin) else: LOGGER.info("Rest API address is the default one") print("") rest_api_connected = check_rest_api_connection(rest_api_url) print("") if rest_api_connected: LOGGER.info("REST API connection is ok") else: LOGGER.warning("REST API connection is not ok") # Websocket LOGGER.info("=" * 80) LOGGER.info("Checking connectivity to Websocket Server") LOGGER.info("=" * 80) websocket_url = config["comet.ws_url_override"] if websocket_url is None: websocket_url = get_default_ws_url(server_address) LOGGER.warning( "No WS address configured on client side, fallbacking on default WS address %r, if that's incorrect set the WS url through the `comet.ws_url_override` config key", websocket_url, ) websocket_url_config_origin = None else: websocket_url = websocket_url websocket_url_config_origin = config_source( config.get_config_origin("comet.ws_url_override")) LOGGER.info( "Configured WS address %r", websocket_url, ) if websocket_url_config_origin: LOGGER.info("WS address was configured in %s", websocket_url_config_origin) print("") ws_connected = check_ws_connection(websocket_url, args.debug) print("") if ws_connected: LOGGER.info("Websocket connection is ok") else: LOGGER.warning("Websocket connection is not ok") # Optimizer LOGGER.info("=" * 80) LOGGER.info("Checking connectivity to Optimizer Server") LOGGER.info("=" * 80) optimizer_url = sanitize_url(config["comet.optimizer_url"]) optimizer_url_config_origin = config_source( config.get_config_origin("comet.optimizer_url")) LOGGER.info( "Configured Optimizer address %r", optimizer_url, ) if optimizer_url_config_origin: LOGGER.info("Optimizer address was configured in %s", optimizer_url_config_origin) else: LOGGER.info("Optimizer address is the default one") print("") optimizer_connected = check_optimizer_connection(optimizer_url) print("") if optimizer_connected: LOGGER.info("Optimizer connection is ok") else: LOGGER.warning("Optimizer connection is not ok") # Predictor LOGGER.info("=" * 80) LOGGER.info("Checking connectivity to Predictor Server") LOGGER.info("=" * 80) predictor_url = sanitize_url(config["comet.predictor_url"]) predictor_url_config_origin = config_source( config.get_config_origin("comet.predictor_url")) LOGGER.info( "Configured Predictor address %r", predictor_url, ) if predictor_url_config_origin: LOGGER.info("Predictor address was configured in %s", predictor_url_config_origin) else: LOGGER.info("Predictor address is the default one") print("") predictor_connected = check_predictor_connection(predictor_url) print("") if predictor_connected: LOGGER.info("Predictor connection is ok") else: LOGGER.warning("Predictor connection is not ok") print("") print("") LOGGER.info("Summary") LOGGER.info("-" * 80) LOGGER.info("Server connectivity\t\t\t%s", server_connected) LOGGER.info("Rest API connectivity\t\t%r", rest_api_connected) LOGGER.info("WS server connectivity\t\t%r", ws_connected) LOGGER.info("Optimizer server connectivity\t%r", optimizer_connected) LOGGER.info("Predictor server connectivity\t%r", predictor_connected)
""" config_path = Path(config_path) model = CenterTrack( model=model_type, config_path=config_path, epoch_size=epoch_size, batch_size=batch_size, n_workers=n_workers, img_scale=img_scale, label_scale=label_scale, grayscale=grayscale, ) print(f"# parameters: {sum(p.numel() for p in model.parameters()):,}") api_key = get_api_key(None, get_config()) project_name = "vsrl" exp_name = f"detector_{config_path.name.split('.')[0]}" if use_logger: logger = CometLogger( api_key, save_dir, project_name=project_name, experiment_name=exp_name, force_offline=force_offline, log_env_gpu=False, log_env_cpu=False, ) logger.log_hyperparams({ "img_scale": img_scale,