Exemple #1
0
 def __params2args(self, argu_dict, params):
     arguments = ""
     for p in params:
         if argu_dict[p] is not None:
             if p not in self.none_params:
                 arguments += docker_config.arg(p, argu_dict[p])
     return arguments
Exemple #2
0
    def __params2args(self, argu_dict, params):
        """Convert function parameters into string of arguments for input JSON

        Args:
            argu_dict (dict): the dictionary of key and value for function parameters
            params (set): the paramater names of certain function 
        Returns:
            str: converted string for function parameters
        """
        arguments = ""
        for p in params:
            if argu_dict[p] is not None:
                if p not in self.none_params:  # no need to convert
                    arguments += docker_config.arg(p, argu_dict[p])
        return arguments
Exemple #3
0
    def convert_model(self,
                      model_type=None,
                      output_onnx_path=None,
                      model="",
                      model_params=None,
                      model_input_shapes=None,
                      target_opset=None,
                      caffe_model_prototxt=None,
                      initial_types=None,
                      model_inputs_names=None,
                      model_outputs_names=None,
                      input_json=None,
                      convert_json=False,
                      windows=False):

        # is Windows
        if os.name == 'nt':
            windows = True

        def mount_parameters(output_onnx_path, model, caffe_model_prototxt,
                             input_json):
            # --output_onnx_path
            if output_onnx_path is None or output_onnx_path == '':
                output_onnx_path = self.convert_path

            output_onnx_path = self.__join_with_mount(output_onnx_path)

            # --model
            model = self.__join_with_mount(model)
            # --caffe_model_prototxt
            if caffe_model_prototxt is not None:
                caffe_model_prototxt = self.__join_with_mount(
                    caffe_model_prototxt)

            if input_json is not None:
                input_json = self.__join_with_mount(input_json)
            return output_onnx_path, model, caffe_model_prototxt, input_json

        if model_type is None and input_json is None:
            raise RuntimeError('The conveted model type needs to be provided.')

        img_name = (docker_config.CONTAINER_NAME +
                    docker_config.FUNC_NAME['onnx_converter'] + ':latest')

        # --input_json
        if input_json is not None:
            local_input_json = input_json

        output_onnx_path, model, caffe_model_prototxt, input_json = mount_parameters(
            output_onnx_path, model, caffe_model_prototxt, input_json)

        # --initial_types
        if initial_types is not None:
            if convert_json:
                initial_types = '[(\'' + initial_types[
                    0] + '\',' + initial_types[1] + ')]'
            else:
                initial_types = '"[(\'' + initial_types[
                    0] + '\',' + initial_types[1] + ')]"'

        # create test directory for output
        if self.convert_directory is not None:
            test_path = posixpath.join(self.path, self.convert_directory)
            if not os.path.exists(test_path):
                os.makedirs(test_path)

        json_filename = input_json

        parameters = self.convert_model.__code__.co_varnames[
            1:self.convert_model.__code__.co_argcount]
        arguments = self.__params2args(locals(), parameters)

        # convert the input parameters into json file
        if convert_json:
            self.__convert_input_json(arguments, json_filename)
            arguments = docker_config.arg('input_json', input_json)
        # load by JSON file
        elif input_json is not None:
            with open(posixpath.join(self.path, local_input_json), 'r') as f:
                json_data = json.load(f)

                if 'output_onnx_path' in json_data and (
                        json_data['output_onnx_path'] is not None
                        and json_data['output_onnx_path'] != ''):
                    output_onnx_path = json_data['output_onnx_path']
                    self.convert_path = output_onnx_path
                    params = mount_parameters(output_onnx_path, model,
                                              caffe_model_prototxt, input_json)
                    output_onnx_path = params[0]
                if 'model' in json_data:
                    model = json_data['model']
                if 'caffe_model_prototxt' in json_data:
                    caffe_model_prototxt = json_data['caffe_model_prototxt']
                # convert to mount path
                _, model, caffe_model_prototxt, _ = mount_parameters(
                    output_onnx_path, model, caffe_model_prototxt, input_json)
                # write back to JSON
                json_data['output_onnx_path'] = output_onnx_path
                if 'model' in json_data:
                    json_data['model'] = model
                if 'caffe_model_prototxt' in json_data:
                    json_data['caffe_model_prototxt'] = caffe_model_prototxt

            with open(posixpath.join(self.path, local_input_json), 'w') as f:
                json.dump(json_data, f)

        stream = self.client.containers.run(
            image=img_name,
            command=arguments,
            volumes={self.path: {
                'bind': self.mount_path,
                'mode': 'rw'
            }},
            detach=True)
        if self.print_logs: self.__print_docker_logs(stream, windows)

        return output_onnx_path
Exemple #4
0
    def perf_tuning(self,
                    model=None,
                    result=None,
                    config=None,
                    mode=None,
                    execution_provider=None,
                    repeated_times=None,
                    duration_times=None,
                    threadpool_size=None,
                    num_threads=None,
                    top_n=None,
                    parallel=None,
                    runtime=True,
                    input_json=None,
                    convert_json=False,
                    windows=False):

        # is Windows, there is no runtime
        if os.name == 'nt':
            runtime = False
            windows = True

        def mount_parameters(model, result, input_json):
            # --model
            if model is None:
                model = self.convert_path
            model = self.__join_with_mount(model)

            result = self.__join_with_mount(self.result)

            # --input_json
            if input_json is not None:
                input_json = self.__join_with_mount(input_json)

            return model, result, input_json

        json_filename = input_json
        # --input_json
        if input_json is not None:
            local_input_json = input_json

        if result is not None:
            self.result = result

        model, result, input_json = mount_parameters(model, result, input_json)

        img_name = (docker_config.CONTAINER_NAME +
                    docker_config.FUNC_NAME['perf_tuning'] + ':latest')

        parameters = self.perf_tuning.__code__.co_varnames[
            1:self.perf_tuning.__code__.co_argcount]
        arguments = self.__params2args(locals(), parameters)

        # convert the input parameters into json file
        if convert_json:
            self.__convert_input_json(arguments, json_filename)
            arguments = docker_config.arg('input_json', input_json)
        # load by JSON file
        elif input_json is not None:
            with open(posixpath.join(self.path, local_input_json)) as f:
                json_data = json.load(f)
                if 'result' in json_data:
                    result = json_data['result']
                    if result[:len(self.mount_path)] == self.mount_path:
                        self.result = str(result[len(self.mount_path) + 1:])
                    else:
                        self.result = result

                    params = mount_parameters(model, None, input_json)
                    result = params[1]
                if 'runtime' in json_data:
                    runtime = json_data['runtime']

                if 'model' in json_data:
                    model = json_data['model']

                # convert to mount path
                model, _, _ = mount_parameters(model, result, input_json)
                # write back to JSON
                json_data['result'] = result

                if 'model' in json_data:
                    json_data['model'] = model
            with open(posixpath.join(self.path, local_input_json), 'w') as f:
                json.dump(json_data, f)

        runtime = 'nvidia' if runtime else ''

        stream = self.client.containers.run(
            image=img_name,
            command=arguments,
            volumes={self.path: {
                'bind': self.mount_path,
                'mode': 'rw'
            }},
            runtime=runtime,
            detach=True)
        if self.print_logs: self.__print_docker_logs(stream, windows)

        return posixpath.join(self.path, self.result)
Exemple #5
0
    def perf_tuning(self,
                    model=None,
                    result=None,
                    config=None,
                    mode=None,
                    execution_provider=None,
                    repeated_times=None,
                    duration_times=None,
                    inter_op_num_threads=None,
                    intra_op_num_threads=None,
                    top_n=None,
                    parallel=None,
                    runtime=True,
                    input_json=None,
                    convert_json=False,
                    windows=False):
        """Parameters usage could reference: 
        https://github.com/microsoft/OLive/blob/master/notebook/onnx-pipeline.ipynb"""

        # is Windows, there is no runtime
        if os.name == 'nt':
            runtime = False
            windows = True

        # add mounted path into local path and handle missing parameters with default path
        def mount_parameters(model, result, input_json):
            # --model
            if model is None:
                model = self.convert_path
            model = self.__join_with_mount(model)

            result = self.__join_with_mount(self.result)

            # --input_json
            if input_json is not None:
                input_json = self.__join_with_mount(input_json)

            return model, result, input_json

        json_filename = input_json
        # --input_json
        if input_json is not None:
            local_input_json = input_json

        if result is not None:
            self.result = result

        model, result, input_json = mount_parameters(model, result, input_json)

        img_name = (docker_config.CONTAINER_NAME +
                    docker_config.FUNC_NAME['perf_tuning'] + ':latest')

        parameters = self.perf_tuning.__code__.co_varnames[
            1:self.perf_tuning.__code__.co_argcount]
        arguments = self.__params2args(locals(), parameters)

        # convert the input parameters into json file
        if convert_json:
            self.__convert_input_json(arguments, json_filename)
            arguments = docker_config.arg('input_json', input_json)
        # load by JSON file and handle missing parameters with default path, add mounted path into original path
        elif input_json is not None:
            with open(posixpath.join(self.path, local_input_json)) as f:
                json_data = json.load(f)
                if 'result' in json_data:
                    result = json_data['result']
                    if result[:len(self.mount_path)] == self.mount_path:
                        self.result = str(result[len(self.mount_path) + 1:])
                    else:
                        self.result = result

                    params = mount_parameters(model, None, input_json)
                    result = params[1]
                if 'runtime' in json_data:
                    runtime = json_data['runtime']

                if 'model' in json_data:
                    model = json_data['model']

                # convert to mount path
                model, _, _ = mount_parameters(model, result, input_json)
                # write back to JSON
                json_data['result'] = result

                if 'model' in json_data:
                    json_data['model'] = model
            # after parameter modification, overlap the original JSON file
            with open(posixpath.join(self.path, local_input_json), 'w') as f:
                json.dump(json_data, f)

        # run nvidia if enabled
        runtime = 'nvidia' if runtime else ''

        # run docker commands to execute
        stream = self.client.containers.run(
            image=img_name,
            command=arguments,
            volumes={self.path: {
                'bind': self.mount_path,
                'mode': 'rw'
            }},
            runtime=runtime,
            detach=True)
        if self.print_logs: self.__print_docker_logs(stream, windows)

        return posixpath.join(self.path, self.result)
Exemple #6
0
def main():
    main_parser = argparse.ArgumentParser(prog="cosmicpi", description="CosmicPi acquisition process", add_help=False)
    main_parser.add_argument("--config", help="Path to configuration file", default="/etc/cosmicpi.yaml")
    args, remaining_argv = main_parser.parse_known_args()

    # Merge the default config with the configuration file
    config = load_config(args.config)

    # Parse the command line for overrides
    parser = argparse.ArgumentParser(parents=[main_parser])
    parser.set_defaults(**config)

    parser.add_argument("-i", "--host",       **arg("broker.host",          "Message broker host"))
    parser.add_argument("-p", "--port",       **arg("broker.port",          "Message broker port", type=int))
    parser.add_argument("-a", "--username",   **arg("broker.username",      "Message broker username"))
    parser.add_argument("-b", "--password",   **arg("broker.password",      "Message broker password"))
    parser.add_argument("-n", "--no-publish", **arg("broker.enabled",       "Disable event publication"))
    parser.add_argument("-u", "--usb",        **arg("usb.device",           "USB device name"))
    parser.add_argument("-d", "--debug",      **arg("debug",                "Enable debug mode"))
    parser.add_argument("-o", "--log-config", **arg("logging.config",       "Path to logging configuration"))
    parser.add_argument("-l", "--no-log",     **arg("logging.enabled",      "Disable file logging"))
    parser.add_argument("-v", "--no-vib",     **arg("monitoring.vibration", "Disable vibration monitoring"))
    parser.add_argument("-w", "--no-weather", **arg("monitoring.weather",   "Disable weather monitoring"))
    parser.add_argument("-c", "--no-cosmics", **arg("monitoring.cosmics",   "Disable cosmic ray monitoring"))
    parser.add_argument("-k", "--patk",       **arg("patok",                "Server push notification token"))

    options = parser.parse_args()

    log_config = options.logging["config"]
    print ("INFO: using logging configuration from %s" % log_config)
    logging.config.fileConfig(log_config, disable_existing_loggers=False)
    console = logging.getLogger(__name__)

    if options.debug:
        print_config(options)

    try:
        publisher = EventPublisher(options)
    except:
        console.error("Exception: Can't connect to broker")
        sys.exit(1)

    try:
        usb = UsbHandler(options.usb['device'], 9600, 60)
        usb.open()
    except Exception as e:
        console.error("Exception: Can't open USB device: %s" % e)
        sys.exit(1)

    detector = Detector(usb, publisher, options)

    try:
        detector.start()
        command_handler = CommandHandler(detector, usb, options)
        command_handler.start()

        while True:
            time.sleep(1)

    except Exception as e:
        console.info("Exception: main: %s" % e)
        traceback.print_exc()

    finally:
        detector.stop()
        console.info("Quitting ...")
        time.sleep(1)
        usb.close()
        publisher.close()
        sys.exit(0)
Exemple #7
0
import os
import sys
import tensorflow as tf
from tensorflow import keras
import argparse
import models
import dataloader
from config import Config, Argument as arg

IMG_WIDTH = 320
IMG_HEIGHT = 180
D_STATE = 1

config = Config(
    dataset=arg(str, 'dataset', 'TFRecords file'),
    validation=arg(str, 'validation', 'TFRecords file'),
    model_name=arg(str, 'bc', 'Output directory'),
    batch_size=arg(int, 64, "Batch size"),
    epochs=arg(int, 50, 'How many epochs to train'),
    lr=arg(float, 1e-4, 'Learning rate'),
    shuffle_buffer=arg(int, 8000, 'Shuffle buffer size'),
    workers=arg(int, 4, 'How many threads to use for preprocessing'),
    augment=Config(flip_prob=arg(
        float, 0.2, 'Probability of flipping image in preprocessing'),
                   translate=arg(int, 25,
                                 'translation magnitude in preprocessing'),
                   min_contrast=arg(float, 0.9, 'Min contrast augmentation'),
                   max_contrast=arg(float, 1.1, 'Max contrast augmentation'),
                   min_saturation=arg(float, 0.9, 'Min saturation'),
                   max_satutation=arg(float, 1.1, 'Max saturation'),
                   hue=arg(float, 0.1, 'Hue max delta')))