Ejemplo n.º 1
0
def test_get_output_paths_no_version(d, m_patch):
    g = os.path.join(d.dir, d.proj, d.name, d.version)
    with patch('mead.utils.find_model_version') as v_patch:
        v_patch.return_value = d.version
        c, s = get_output_paths(d.dir, d.proj, d.name, None, False)
    assert c == g
    assert c == s
Ejemplo n.º 2
0
    def run(self, basename, output_dir, project=None, name=None, model_version=None, **kwargs):
        basename = get_tf_index_from_unzipped(basename)

        with tf.Graph().as_default():
            config_proto = tf.ConfigProto(allow_soft_placement=True)
            with tf.Session(config=config_proto) as sess:
                sig_input, sig_output, sig_name, assets = self._create_rpc_call(sess, basename)
                client_output, server_output = get_output_paths(
                    output_dir,
                    project, name,
                    model_version,
                    kwargs.get('remote', True), make_server=False
                )
                logger.info('Saving vectorizers and vocabs to %s', client_output)
                logger.info('Saving serialized model to %s' % server_output)
                try:
                    builder = self._create_saved_model_builder(sess, server_output, sig_input, sig_output, sig_name)
                    create_bundle(builder, client_output, basename, assets)
                    logger.info('Successfully exported model to %s' % output_dir)
                except AssertionError as e:
                    # model already exists
                    raise e
                except Exception as e:
                    import traceback
                    traceback.print_exc()
                    # export process broke.
                    # TODO(MB): we should remove the directory, if one has been saved already.
                    raise e
        return client_output, server_output
Ejemplo n.º 3
0
def test_get_output_paths_old(d, m_patch):
    gc = os.path.join(d.dir, d.version)
    gs = os.path.join(d.dir, d.version)
    c, s = get_output_paths(d.dir, None, None, d.version, False)
    assert c == gc
    assert s == gs
    assert c == s
Ejemplo n.º 4
0
    def run(self, basename, output_dir, project=None, name=None, model_version=None, **kwargs):
        basename = get_tf_index_from_unzipped(basename)

        with tf.Graph().as_default():
            config_proto = tf.ConfigProto(allow_soft_placement=True)
            with tf.Session(config=config_proto) as sess:
                sig_input, sig_output, sig_name, assets = self._create_rpc_call(sess, basename)
                client_output, server_output = get_output_paths(
                    output_dir,
                    project, name,
                    model_version,
                    kwargs.get('remote', True), make_server=False
                )
                logger.info('Saving vectorizers and vocabs to %s', client_output)
                logger.info('Saving serialized model to %s' % server_output)
                try:
                    builder = self._create_saved_model_builder(sess, server_output, sig_input, sig_output, sig_name)
                    create_bundle(builder, client_output, basename, assets)
                    logger.info('Successfully exported model to %s' % output_dir)
                except AssertionError as e:
                    # model already exists
                    raise e
                except Exception as e:
                    import traceback
                    traceback.print_exc()
                    # export process broke.
                    # TODO(MB): we should remove the directory, if one has been saved already.
                    raise e
        return client_output, server_output
Ejemplo n.º 5
0
    def run(self, basename, output_dir, project=None, name=None, model_version=None, **kwargs):
        logger.warning("Pytorch exporting is experimental and is not guaranteed to work for plugin models.")
        client_output, server_output = get_output_paths(
            output_dir,
            project, name,
            model_version,
            kwargs.get('remote', True),
        )
        logger.info("Saving vectorizers and vocabs to %s", client_output)
        logger.info("Saving serialized model to %s", server_output)
        model, vectorizers, model_name = self.load_model(basename)
        order = monkey_patch_embeddings(model)
        data, lengths = create_fake_data(VECTORIZER_SHAPE_MAP, vectorizers, order)
        meta = create_metadata(
            order, ['output'],
            self.sig_name,
            model_name, model.lengths_key,
            exporter_type=self.preproc_type()
        )

        exportable = self.wrapper(model)
        logger.info("Tracing Model.")
        traced = torch.jit.trace(exportable, (data, lengths))
        traced.save(os.path.join(server_output, 'model.pt'))

        logger.info("Saving metadata.")
        save_to_bundle(client_output, basename, assets=meta)
        logger.info('Successfully exported model to %s', output_dir)
Ejemplo n.º 6
0
def test_get_output_paths_no_version_remote(d, m_patch):
    gc = os.path.join(d.dir, "client", d.proj, d.name, d.version)
    gs = os.path.join(d.dir, "server", d.proj, d.name, d.version)
    with patch('mead.utils.find_model_version') as v_patch:
        v_patch.return_value = d.version
        c, s = get_output_paths(d.dir, d.proj, d.name, None, True)
    assert c == gc
    assert s == gs
Ejemplo n.º 7
0
    def run(self,
            basename,
            output_dir,
            project=None,
            name=None,
            model_version=None,
            **kwargs):
        logger.warning(
            "Pytorch exporting is experimental and is not guaranteed to work for plugin models."
        )
        client_output, server_output = get_output_paths(
            output_dir,
            project,
            name,
            model_version,
            kwargs.get('remote', True),
        )
        logger.info("Saving vectorizers and vocabs to %s", client_output)
        logger.info("Saving serialized model to %s", server_output)
        model, vectorizers, model_name = self.load_model(basename)
        order = monkey_patch_embeddings(model)
        data, lengths = create_fake_data(VECTORIZER_SHAPE_MAP, vectorizers,
                                         order)
        meta = create_metadata(order, ['output'],
                               self.sig_name,
                               model_name,
                               model.lengths_key,
                               exporter_type=self.preproc_type())

        exportable = self.wrapper(model)
        logger.info("Tracing Model.")
        traced = torch.jit.trace(exportable, (data, lengths))
        traced.save(os.path.join(server_output, 'model.pt'))

        logger.info("Saving metadata.")
        save_to_bundle(client_output, basename, assets=meta)
        logger.info('Successfully exported model to %s', output_dir)
Ejemplo n.º 8
0
def test_get_output_paths_make_server_remote(d, m_patch):
    gs = os.path.join(d.dir, "server", d.proj, d.name, d.version)
    gc = os.path.join(d.dir, "client", d.proj, d.name, d.version)
    _, _ = get_output_paths(d.dir, d.proj, d.name, d.version, True, True)
    assert m_patch.call_args_list == [call(gc), call(gs)]
Ejemplo n.º 9
0
def test_get_output_paths_no_make_server(d, m_patch):
    _, _ = get_output_paths(d.dir, d.proj, d.name, d.version, False, False)
    m_patch.assert_not_called()
Ejemplo n.º 10
0
def test_get_output_paths_make_server(d, m_patch):
    g = os.path.join(d.dir, d.proj, d.name, d.version)
    _, _ = get_output_paths(d.dir, d.proj, d.name, d.version, False, True)
    m_patch.assert_called_once_with(g)
Ejemplo n.º 11
0
def test_get_output_paths_name_remote(d, m_patch):
    gc = os.path.join(d.dir, "client", d.name, d.version)
    gs = os.path.join(d.dir, "server", d.name, d.version)
    c, s = get_output_paths(d.dir, None, d.name, d.version, True)
    assert c == gc
    assert s == gs
Ejemplo n.º 12
0
def test_get_output_paths_project(d, m_patch):
    g = os.path.join(d.dir, d.proj, d.version)
    c, s = get_output_paths(d.dir, d.proj, None, d.version, False)
    assert c == g
    assert c == s
Ejemplo n.º 13
0
def test_get_output_paths_project_name_remote(d, m_patch):
    gc = os.path.join(d.dir, 'client', d.proj, d.name, d.version)
    gs = os.path.join(d.dir, 'server', d.proj, d.name, d.version)
    c, s = get_output_paths(d.dir, d.proj, d.name, d.version, True)
    assert c == gc
    assert s == gs
Ejemplo n.º 14
0
def test_get_output_paths_old_remote(d, m_patch):
    gc = os.path.join(d.dir, 'client', d.base, d.version)
    gs = os.path.join(d.dir, 'server', d.base, d.version)
    c, s = get_output_paths(d.dir, None, None, d.version, True)
    assert c == gc
    assert s == gs
Ejemplo n.º 15
0
    def _run(self,
             basename,
             output_dir,
             project=None,
             name=None,
             model_version=None,
             use_version=False,
             zip_results=True,
             remote=False,
             **kwargs):
        client_output, server_output = get_output_paths(
            output_dir,
            project,
            name,
            model_version,
            remote,
            use_version=use_version)
        logger.info("Saving vectorizers and vocabs to %s", client_output)
        logger.info("Saving serialized model to %s", server_output)

        model, vectorizers, vocabs, model_name = self.load_model(basename)
        # Triton server wants to see a specific name

        model = self.apply_model_patches(model)

        data = self.create_example_input(vocabs, vectorizers)
        example_output = self.create_example_output(model)

        inputs = self.create_model_inputs(model)
        outputs = self.create_model_outputs(model)

        dynamics = self.create_dynamic_axes(model, vectorizers, inputs,
                                            outputs)

        meta = create_metadata(inputs, outputs, self.sig_name, model_name,
                               model.lengths_key)

        if not self.tracing:
            model = torch.jit.script(model)

        logger.info("Exporting Model.")
        logger.info("Model inputs: %s", inputs)
        logger.info("Model outputs: %s", outputs)

        onnx_model_name = REMOTE_MODEL_NAME if remote else model_name

        torch.onnx.export(
            model,
            data,
            verbose=True,
            dynamic_axes=dynamics,
            f=f'{server_output}/{onnx_model_name}.onnx',
            input_names=inputs,
            output_names=outputs,
            opset_version=self.onnx_opset,
            #propagate=True,
            example_outputs=example_output)

        logger.info("Saving metadata.")
        save_to_bundle(client_output,
                       basename,
                       assets=meta,
                       zip_results=zip_results)
        logger.info('Successfully exported model to %s', output_dir)
        return client_output, server_output
Ejemplo n.º 16
0
def test_get_output_paths_no_make_server_remote(d, m_patch):
    gc = os.path.join(d.dir, "client", d.proj, d.name, d.version)
    _, _ = get_output_paths(d.dir, d.proj, d.name, d.version, True, False)
    m_patch.assert_called_once_with(gc)
Ejemplo n.º 17
0
    def _run(self,
             basename,
             output_dir,
             project=None,
             name=None,
             model_version=None,
             use_version=False,
             zip_results=True,
             **kwargs):
        logger.warning(
            "Pytorch exporting is experimental and is not guaranteed to work for plugin models."
        )
        client_output, server_output = get_output_paths(
            output_dir,
            project,
            name,
            model_version,
            kwargs.get('remote', False),
            use_version=use_version)
        logger.info("Saving vectorizers and vocabs to %s", client_output)
        logger.info("Saving serialized model to %s", server_output)

        model, vectorizers, vocabs, model_name = self.load_model(basename)
        model = self.apply_model_patches(model)

        data = self.create_example_input(vocabs, vectorizers)
        example_output = self.create_example_output(model)

        inputs = self.create_model_inputs(model)
        outputs = self.create_model_outputs(model)

        dynamics = self.create_dynamic_axes(model, vectorizers, inputs,
                                            outputs)

        meta = create_metadata(inputs, outputs, self.sig_name, model_name,
                               model.lengths_key)

        if not self.tracing:
            model = torch.jit.script(model)

        logger.info("Exporting Model.")
        logger.info("Model inputs: %s", inputs)
        logger.info("Model outputs: %s", outputs)

        torch.onnx.export(model,
                          data,
                          verbose=True,
                          dynamic_axes=dynamics,
                          f=f'{server_output}/{model_name}.onnx',
                          input_names=inputs,
                          output_names=outputs,
                          opset_version=self.onnx_opset,
                          example_outputs=example_output)

        logger.info("Saving metadata.")
        save_to_bundle(client_output,
                       basename,
                       assets=meta,
                       zip_results=zip_results)
        logger.info('Successfully exported model to %s', output_dir)
        return client_output, server_output
Ejemplo n.º 18
0
def test_get_output_paths_name(d, m_patch):
    g = os.path.join(d.dir, d.name, d.version)
    c, s = get_output_paths(d.dir, None, d.name, d.version, False)
    assert c == g
    assert c == s