def __init__( self, tokenizer: Component, tracker: Tracker, network_parameters: Dict[str, Any], template_path: str, save_path: str, load_path: str = None, template_type: str = "DefaultTemplate", word_vocab: Component = None, bow_embedder: Component = None, embedder: Component = None, slot_filler: Component = None, intent_classifier: Component = None, database: Component = None, api_call_action: str = None, # TODO: make it unrequired use_action_mask: bool = False, debug: bool = False, **kwargs): super().__init__(load_path=load_path, save_path=save_path, **kwargs) self.tokenizer = tokenizer self.tracker = tracker self.bow_embedder = bow_embedder self.embedder = embedder self.slot_filler = slot_filler self.intent_classifier = intent_classifier self.use_action_mask = use_action_mask self.debug = debug self.word_vocab = word_vocab template_path = expand_path(template_path) template_type = getattr(templ, template_type) log.info("[loading templates from {}]".format(template_path)) self.templates = templ.Templates(template_type).load(template_path) self.n_actions = len(self.templates) log.info("{} templates loaded".format(self.n_actions)) self.database = database self.api_call_id = None if api_call_action is not None: self.api_call_id = self.templates.actions.index(api_call_action) self.intents = [] if callable(self.intent_classifier): # intent_classifier returns y_probas self.intents = self.intent_classifier.get_main_component().classes network_parameters['load_path'] = load_path network_parameters['save_path'] = save_path self.network = self._init_network(network_parameters) self.reset()
def __init__(self, template_path, network_parameters, tokenizer, tracker, template_type: str = "BaseTemplate", database=None, api_call_action=None, bow_embedder=None, embedder=None, slot_filler=None, intent_classifier=None, use_action_mask=False, debug=False, save_path=None, word_vocab=None, vocabs=None, **kwargs): super().__init__(save_path=save_path, mode=kwargs['mode']) self.tokenizer = tokenizer self.tracker = tracker self.bow_embedder = bow_embedder self.embedder = embedder self.slot_filler = slot_filler self.intent_classifier = intent_classifier self.use_action_mask = use_action_mask self.debug = debug self.word_vocab = word_vocab or vocabs['word_vocab'] template_path = expand_path(template_path) template_type = getattr(templ, template_type) log.info("[loading templates from {}]".format(template_path)) self.templates = templ.Templates(template_type).load(template_path) self.n_actions = len(self.templates) log.info("{} templates loaded".format(self.n_actions)) self.database = database self.api_call_id = self.templates.actions.index(api_call_action) self.intents = [] if callable(self.intent_classifier): # intent_classifier returns y_labels, y_probs self.intents = list(self.intent_classifier(["hi"])[1][0].keys()) self.network = self._init_network(network_parameters) self.reset()
def __init__(self, tokenizer: Component, tracker: Tracker, template_path: str, save_path: str, hidden_size: int = 128, obs_size: int = None, action_size: int = None, dropout_rate: float = 0., l2_reg_coef: float = 0., dense_size: int = None, attention_mechanism: dict = None, network_parameters: Dict[str, Any] = {}, load_path: str = None, template_type: str = "DefaultTemplate", word_vocab: Component = None, bow_embedder: Component = None, embedder: Component = None, slot_filler: Component = None, intent_classifier: Component = None, database: Component = None, api_call_action: str = None, # TODO: make it unrequired use_action_mask: bool = False, debug: bool = False, **kwargs): if any(p in network_parameters for p in self.DEPRECATED): log.warning(f"parameters {self.DEPRECATED} are deprecated," " for learning rate schedule documentation see" " deeppavlov.core.models.lr_scheduled_tf_model" " or read gitub tutorial on super convergence.") if 'learning_rate' in network_parameters: kwargs['learning_rate'] = network_parameters.pop('learning_rate') super().__init__(load_path=load_path, save_path=save_path, **kwargs) self.tokenizer = tokenizer self.tracker = tracker self.bow_embedder = bow_embedder self.embedder = embedder self.slot_filler = slot_filler self.intent_classifier = intent_classifier self.use_action_mask = use_action_mask self.debug = debug self.word_vocab = word_vocab template_path = expand_path(template_path) template_type = getattr(templ, template_type) log.info("[loading templates from {}]".format(template_path)) self.templates = templ.Templates(template_type).load(template_path) self.n_actions = len(self.templates) log.info("{} templates loaded".format(self.n_actions)) self.database = database self.api_call_id = None if api_call_action is not None: self.api_call_id = self.templates.actions.index(api_call_action) self.intents = [] if callable(self.intent_classifier): self.intents = self.intent_classifier.get_main_component().classes new_network_parameters = { 'hidden_size': hidden_size, 'action_size': action_size, 'obs_size': obs_size, 'dropout_rate': dropout_rate, 'l2_reg_coef': l2_reg_coef, 'dense_size': dense_size, 'attn': attention_mechanism } if 'attention_mechanism' in network_parameters: network_parameters['attn'] = network_parameters.pop('attention_mechanism') new_network_parameters.update(network_parameters) self._init_network(**new_network_parameters) self.reset()
def __init__(self, tokenizer: Component, tracker: FeaturizedTracker, template_path: str, save_path: str, hidden_size: int = 128, obs_size: int = None, action_size: int = None, dropout_rate: float = 0., l2_reg_coef: float = 0., dense_size: int = None, attention_mechanism: dict = None, network_parameters: Optional[Dict[str, Any]] = None, load_path: str = None, template_type: str = "DefaultTemplate", word_vocab: Component = None, bow_embedder: Component = None, embedder: Component = None, slot_filler: Component = None, intent_classifier: Component = None, database: Component = None, api_call_action: str = None, use_action_mask: bool = False, debug: bool = False, **kwargs) -> None: network_parameters = network_parameters or {} if any(p in network_parameters for p in self.DEPRECATED): log.warning(f"parameters {self.DEPRECATED} are deprecated," f" for learning rate schedule documentation see" f" deeppavlov.core.models.lr_scheduled_tf_model" f" or read a github tutorial on super convergence.") if 'learning_rate' in network_parameters: kwargs['learning_rate'] = network_parameters.pop('learning_rate') super().__init__(load_path=load_path, save_path=save_path, **kwargs) self.tokenizer = tokenizer self.bow_embedder = bow_embedder self.embedder = embedder self.slot_filler = slot_filler self.intent_classifier = intent_classifier self.use_action_mask = use_action_mask self.debug = debug self.word_vocab = word_vocab template_path = expand_path(template_path) template_type = getattr(templ, template_type) log.info(f"[loading templates from {template_path}]") self.templates = templ.Templates(template_type).load(template_path) self.n_actions = len(self.templates) log.info(f"{self.n_actions} templates loaded.") self.default_tracker = tracker self.dialogue_state_tracker = DialogueStateTracker( tracker.slot_names, self.n_actions, hidden_size, database) self.api_call_id = -1 if api_call_action is not None: self.api_call_id = self.templates.actions.index(api_call_action) self.intents = [] if isinstance(self.intent_classifier, Chainer): self.intents = self.intent_classifier.get_main_component().classes new_network_parameters = { 'hidden_size': hidden_size, 'action_size': action_size, 'obs_size': obs_size, 'dropout_rate': dropout_rate, 'l2_reg_coef': l2_reg_coef, 'dense_size': dense_size, 'attn': attention_mechanism } if 'attention_mechanism' in network_parameters: network_parameters['attn'] = network_parameters.pop( 'attention_mechanism') new_network_parameters.update(network_parameters) self._init_network(**new_network_parameters) self.multiple_user_state_tracker = MultipleUserStateTracker() self.reset()
def __init__( self, tokenizer: Component, tracker: Tracker, template_path: str, save_path: str, hidden_size: int, obs_size: int = None, action_size: int = None, dropout_rate: float = 0., l2_reg_coef: float = 0., dense_size: int = None, attention_mechanism: dict = None, load_path: str = None, template_type: str = "DefaultTemplate", word_vocab: Component = None, bow_embedder: Component = None, embedder: Component = None, slot_filler: Component = None, intent_classifier: Component = None, database: Component = None, api_call_action: str = None, # TODO: make it unrequired use_action_mask: bool = False, debug: bool = False, **kwargs): super().__init__(load_path=load_path, save_path=save_path, **kwargs) self.tokenizer = tokenizer self.tracker = tracker self.bow_embedder = bow_embedder self.embedder = embedder self.slot_filler = slot_filler self.intent_classifier = intent_classifier self.use_action_mask = use_action_mask self.debug = debug self.word_vocab = word_vocab template_path = expand_path(template_path) template_type = getattr(templ, template_type) log.info("[loading templates from {}]".format(template_path)) self.templates = templ.Templates(template_type).load(template_path) self.n_actions = len(self.templates) log.info("{} templates loaded".format(self.n_actions)) self.database = database self.api_call_id = None if api_call_action is not None: self.api_call_id = self.templates.actions.index(api_call_action) self.intents = [] if callable(self.intent_classifier): self.intents = self.intent_classifier.get_main_component().classes self._init_network(hidden_size=hidden_size, action_size=action_size, obs_size=obs_size, dropout_rate=dropout_rate, l2_reg_coef=l2_reg_coef, dense_size=dense_size, attn=attention_mechanism) self.reset()