def train(user_conf): """ Parameters ---------- user_conf : dict Json dict (created with json.dumps) with the user's configuration parameters that will replace the defaults. Must be loaded with json.loads() For example: user_conf={'num_classes': 'null', 'lr_step_decay': '0.1', 'lr_step_schedule': '[0.7, 0.9]', 'use_early_stopping': 'false'} """ CONF = config.CONF # Update the conf with the user input for group, val in sorted(CONF.items()): for g_key, g_val in sorted(val.items()): g_val['value'] = json.loads(user_conf[g_key]) # Check the configuration try: config.check_conf(conf=CONF) except Exception as e: raise BadRequest(e) CONF = config.conf_dict(conf=CONF) timestamp = datetime.now().strftime('%Y-%m-%d_%H%M%S') config.print_conf_table(CONF) K.clear_session() # remove the model loaded for prediction train_fn(TIMESTAMP=timestamp, CONF=CONF) # Sync with NextCloud folders (if NextCloud is available) try: mount_nextcloud(paths.get_models_dir(), 'ncplants:/models') except Exception as e: print(e)
def train(**args): """ Train an image classifier """ update_with_query_conf(user_args=args) CONF = config.conf_dict timestamp = datetime.now().strftime('%Y-%m-%d_%H%M%S') config.print_conf_table(CONF) K.clear_session() # remove the model loaded for prediction train_fn(TIMESTAMP=timestamp, CONF=CONF) # Sync with NextCloud folders (if NextCloud is available) try: mount_nextcloud(paths.get_models_dir(), 'ncplants:/models') except Exception as e: print(e)
import re from collections import OrderedDict import urllib.request import requests from tensorflow.keras import backend as K from webargs import fields from aiohttp.web import HTTPBadRequest from speechclas import paths, utils, config, label_wav from speechclas.data_utils import mount_nextcloud from speechclas.train_runfile import train_fn # Mount NextCloud folders (if NextCloud is available) try: mount_nextcloud('ncplants:/data/dataset_files', paths.get_splits_dir()) mount_nextcloud('ncplants:/data/images', paths.get_audio_dir()) #mount_nextcloud('ncplants:/models', paths.get_models_dir()) except Exception as e: print(e) # Empty model variables for inference (will be loaded the first time we perform inference) loaded = False graph, model, conf, class_names, class_info = None, None, None, None, None # Additional parameters allowed_extensions = set(['wav']) # allow only certain file extensions top_K = 5 # number of top classes predictions to return def load_inference_model():