def forward_request(self, model_info, inputs, outputs=None, options=None): return [ [TranslationOutput(translation)] for translation in self.translate_batch( inputs, model_info["source"], model_info["target"] ) ]
def forward_request(self, batch_inputs, info, timeout=None): return [[ TranslationOutput(translation) ] for translation in translate_list(self._credentials, batch_inputs, source_language=info['source'], target_language=info['target'])]
def forward_request(self, batch_inputs, info, timeout=None): return [[TranslationOutput(translation)] for translation in translate_list( self.httpClient, self._appid, self._key, batch_inputs, source_language=info['source'], target_language=info['target'])]
def forward_request(self, model_info, inputs, outputs=None, options=None): translate_fn = model_info hypotheses = [] for i, source in enumerate(inputs): target = outputs[i] if outputs is not None else None output = translate_fn(source, target) hypotheses.append([TranslationOutput(output)]) return hypotheses
def forward_request(self, batch_inputs, info, timeout=None): batch_inputs = [{'src': ' '.join(tokens)} for tokens in batch_inputs] try: batch_results = requests.post( info['endpoint'], json=batch_inputs, timeout=timeout).json() except requests.exceptions.Timeout as e: logger.error('%s', e) return None batch_outputs = [] for hypotheses in batch_results: outputs = [] for hyp in hypotheses: tokens = hyp['tgt'].split() score = hyp['pred_score'] / len(tokens) outputs.append(TranslationOutput(tokens, score=score, attention=hyp['attn'])) batch_outputs.append(outputs) return batch_outputs
def forward_request(self, batch_inputs, info, timeout=None): return [[TranslationOutput(translation)] for translation in self.translate_batch( batch_inputs, info['source'], info['target'])]