def test_create_instance_empty_request(self): request = None MockKafkaDestination = type("MockKafkaDestination", (object, ), { "__init__": lambda self, destination: None }) Destination.destination_types['kafka'] = MockKafkaDestination logger = logging.get_logger('DestinationTypes', is_static=True) with self.assertLogs(logger=logger, level='DEBUG') as cm: result = Destination.create_instance(request) self.assertIn("Error", cm.output.pop())
* SPDX-License-Identifier: BSD-3-Clause ''' from modules.Pipeline import Pipeline # pylint: disable=import-error from modules.PipelineManager import PipelineManager # pylint: disable=import-error from common.utils import logging # pylint: disable=import-error import string import shlex import subprocess import time import copy from threading import Thread import shutil import uuid logger = logging.get_logger('FFmpegPipeline', is_static=True) if shutil.which('ffmpeg') is None: raise Exception("ffmpeg not installed") class FFmpegPipeline(Pipeline): def __init__(self, id, config, models, request): self.config = config self.models = models self.template = config['template'] self.id = id self._process = None self.start_time = None self.stop_time = None self._ffmpeg_launch_string = None
class PipelineManager: MAX_RUNNING_PIPELINES = -1 running_pipelines = 0 logger = logging.get_logger('PipelineManager', is_static=True) pipeline_types = {} pipeline_instances = {} pipeline_state = {} pipeline_id = 0 pipelines = None pipeline_queue = deque() @staticmethod def load_config(pipeline_dir, max_running_pipelines): PipelineManager.pipeline_types = import_pipeline_types(PipelineManager.logger) PipelineManager.logger.info("Loading Pipelines from Config Path {path}".format(path=pipeline_dir)) PipelineManager.MAX_RUNNING_PIPELINES = max_running_pipelines pipelines = {} for root, subdirs, files in os.walk(pipeline_dir): if os.path.abspath(root) == os.path.abspath(pipeline_dir): for subdir in subdirs: pipelines[subdir] = {} else: if len(files) == 0: pipeline = os.path.basename(root) pipelines[pipeline] = {} for subdir in subdirs: pipelines[pipeline][subdir] = {} else: pipeline = os.path.basename(os.path.dirname(root)) version = os.path.basename(root) for file in files: path = os.path.join(root, file) if path.endswith(".json"): with open(path, 'r') as jsonfile: config = json.load(jsonfile) if ('type' not in config) or ('description' not in config): continue if config['type'] in PipelineManager.pipeline_types: pipelines[pipeline][version] = config else: del pipelines[pipeline][version] PipelineManager.logger.error( "Pipeline %s with type %s not supported" % (pipeline, config['type'])) # Remove pipelines with no valid versions pipelines = dict([(model, versions) for model, versions in pipelines.items() if len(versions) > 0]) PipelineManager.pipelines = pipelines PipelineManager.logger.info("Completed Loading Pipelines") @staticmethod def get_loaded_pipelines(): results = [] if PipelineManager.pipelines is not None: for pipeline in PipelineManager.pipelines: for version in PipelineManager.pipelines[pipeline]: result = PipelineManager.get_pipeline_parameters(pipeline, version) if result: results.append(result) return results @staticmethod def get_pipeline_parameters(name, version): if not PipelineManager.is_pipeline_exists(name, version): return None params_obj = { "name": name, "version": version } if "type" in PipelineManager.pipelines[name][version]: params_obj["type"] = PipelineManager.pipelines[name][version]["type"] if "description" in PipelineManager.pipelines[name][version]: params_obj["description"] = PipelineManager.pipelines[name][version]["description"] if "parameters" in PipelineManager.pipelines[name][version]: params_obj["parameters"] = PipelineManager.pipelines[name][version]["parameters"] return params_obj @staticmethod def is_input_valid(name, version, request): config_validation = PipelineManager.pipelines[name][str(version)].get("parameters", {}) try: input_validator = jsonschema.Draft4Validator(schema=config_validation, format_checker=jsonschema.draft4_format_checker) input_validator.validate(request.get("parameters", {})) PipelineManager.logger.debug("Validation successful") return True except: PipelineManager.logger.debug("Validation error in request payload") return False @staticmethod def create_instance(name, version, request): PipelineManager.logger.info("Creating Instance of Pipeline {name}/{v}".format(name=name, v=version)) if not PipelineManager.is_pipeline_exists(name, version): return None, "Invalid Pipeline or Version" if not PipelineManager.is_input_valid(name, version, request): return None, "Request parameters do not match JSON schema" pipeline_type = PipelineManager.pipelines[name][str(version)]['type'] PipelineManager.pipeline_id += 1 PipelineManager.pipeline_instances[PipelineManager.pipeline_id] = \ PipelineManager.pipeline_types[pipeline_type](PipelineManager.pipeline_id, PipelineManager.pipelines[name][str(version)], ModelManager.models, request) PipelineManager.pipeline_queue.append(PipelineManager.pipeline_id) PipelineManager.start() return PipelineManager.pipeline_id, None @staticmethod def start(): if (PipelineManager.MAX_RUNNING_PIPELINES <= 0 or PipelineManager.running_pipelines < PipelineManager.MAX_RUNNING_PIPELINES) and len(PipelineManager.pipeline_queue) != 0: pipeline_to_start = PipelineManager.pipeline_instances[PipelineManager.pipeline_queue.popleft()] if(pipeline_to_start is not None): PipelineManager.running_pipelines += 1 pipeline_to_start.start() @staticmethod def pipeline_finished(): PipelineManager.running_pipelines -= 1 PipelineManager.start() @staticmethod def remove_from_queue(id): PipelineManager.pipeline_queue.remove(id) @staticmethod def get_instance_parameters(name, version, instance_id): if not PipelineManager.is_pipeline_exists(name, version, instance_id): return None return PipelineManager.pipeline_instances[instance_id].params() @staticmethod def get_instance_status(name, version, instance_id): if not PipelineManager.is_pipeline_exists(name, version, instance_id): return None return PipelineManager.pipeline_instances[instance_id].status() @staticmethod def stop_instance(name, version, instance_id): if not PipelineManager.is_pipeline_exists(name, version, instance_id): return None return PipelineManager.pipeline_instances[instance_id].stop() @staticmethod def is_pipeline_exists(name, version, instance_id=None): if name not in PipelineManager.pipelines or \ str(version) not in PipelineManager.pipelines[name]: return False if instance_id and instance_id not in PipelineManager.pipeline_instances: return False return True
import os import json import common.settings # pylint: disable=import-error from common.utils import logging # pylint: disable=import-error logger = logging.get_logger('ModelManager', is_static=True) class ModelManager: models = None @staticmethod def load_config(model_dir): logger.info("Loading Models from Config Path {path}".format( path=os.path.abspath(model_dir))) if os.path.islink(model_dir): logger.warning("Models directory is symbolic link") if os.path.ismount(model_dir): logger.warning("Models directory is mount point") models = {} for path in os.listdir(model_dir): try: full_path = os.path.join(model_dir, path) if os.path.isdir(full_path): model = path for version_dir in os.listdir(full_path): version_path = os.path.join(full_path, version_dir) if os.path.isdir(version_path): version = int(version_dir) config_path = os.path.join(version_path, "model.json")
''' * Copyright (C) 2019 Intel Corporation. * * SPDX-License-Identifier: BSD-3-Clause ''' import connexion import six from modules.PipelineManager import PipelineManager from http import HTTPStatus from common.utils import logging logger = logging.get_logger('Default Controller', is_static=True) from modules.ModelManager import ModelManager bad_request_response = 'Invalid pipeline, version or instance' def models_get(): # noqa: E501 """models_get Return supported models # noqa: E501 :rtype: List[ModelVersion] """ try: logger.debug("GET on /models") return ModelManager.get_loaded_models()
import connexion import json import asyncio from openapi_server import encoder sys.path.append(os.path.dirname(__file__) + "/../../") from common import settings from modules.PipelineManager import PipelineManager from modules.ModelManager import ModelManager from threading import Thread from common.utils import logging from optparse import OptionParser logger = logging.get_logger('main', is_static=True) def get_options(): parser = OptionParser() parser.add_option("-p", "--port", action="store", type="int", dest="port", default=int(os.getenv('PORT', '8080'))) parser.add_option("--framework", action="store", dest="framework", choices=['gstreamer', 'ffmpeg'], default=os.getenv('FRAMEWORK', 'gstreamer'))
import string import json import time import copy import modules.Destination as Destination # pylint: disable=import-error from modules.Pipeline import Pipeline # pylint: disable=import-error import modules.GstGVAJSONMeta as GstGVAJSONMeta # pylint: disable=import-error from common.utils import logging # pylint: disable=import-error import gi # pylint: disable=import-error gi.require_version('Gst', '1.0') from gi.repository import Gst, GObject # pylint: disable=import-error logger = logging.get_logger('GSTPipeline', is_static=True) class GStreamerPipeline(Pipeline): Gst.init(None) GObject.threads_init() def __init__(self, id, config, models): self.config = config self.id = id self.pipeline = None self.template = config['template'] self.models = models self.request = None self.state = None self.frame_count = 0
* Copyright (C) 2019 Intel Corporation. * * SPDX-License-Identifier: BSD-3-Clause ''' import os import json import common.settings # pylint: disable=import-error from common.utils import logging # pylint: disable=import-error import time from modules.ModelManager import ModelManager # pylint: disable=import-error from collections import deque import jsonschema as jsonschema import modules.schema as schema logger = logging.get_logger('PipelineManager', is_static=True) def import_pipeline_types(): pipeline_types = {} try: from modules.GStreamerPipeline import GStreamerPipeline # pylint: disable=import-error pipeline_types['GStreamer'] = GStreamerPipeline except Exception as error: logger.error("Error loading GStreamer: %s\n" % (error,)) try: from modules.FFmpegPipeline import FFmpegPipeline # pylint: disable=import-error pipeline_types['FFmpeg'] = FFmpegPipeline except Exception as error: logger.error("Error loading FFmpeg: %s\n" % (error,)) return pipeline_types
class PipelineManager: pipelines = None logger = logging.get_logger('PipelineManager', is_static=True) pipeline_types = import_pipeline_types(logger) pipeline_instances = {} pipeline_state = {} pipeline_id = 0 @staticmethod def load_config(pipeline_dir): PipelineManager.logger.info( "Loading Pipelines from Config Path {path}".format( path=pipeline_dir)) pipelines = {} for root, subdirs, files in os.walk(pipeline_dir): if os.path.abspath(root) == os.path.abspath(pipeline_dir): for subdir in subdirs: pipelines[subdir] = {} else: if len(files) == 0: pipeline = os.path.basename(root) pipelines[pipeline] = {} for subdir in subdirs: pipelines[pipeline][subdir] = {} else: pipeline = os.path.basename(os.path.dirname(root)) version = os.path.basename(root) for file in files: path = os.path.join(root, file) if path.endswith(".json"): with open(path, 'r') as jsonfile: config = json.load(jsonfile) if ('type' not in config) or ('description' not in config): continue if config[ 'type'] in PipelineManager.pipeline_types: pipelines[pipeline][version] = config else: del pipelines[pipeline][version] PipelineManager.logger.error( "Pipeline %s with type %s not supported" % (pipeline, config['type'])) # Remove pipelines with no valid versions pipelines = dict([(model, versions) for model, versions in pipelines.items() if len(versions) > 0]) PipelineManager.pipelines = pipelines PipelineManager.logger.info("Completed Loading Pipelines") @staticmethod def get_loaded_pipelines(): results = [] if PipelineManager.pipelines is not None: for pipeline in PipelineManager.pipelines: for version in PipelineManager.pipelines[pipeline]: result = PipelineManager.get_pipeline_parameters( pipeline, version) if result: results.append(result) return results @staticmethod def get_pipeline_parameters(name, version): if not PipelineManager.is_pipeline_exists(name, version): return None params_obj = {"name": name, "version": version} if "type" in PipelineManager.pipelines[name][version]: params_obj["type"] = PipelineManager.pipelines[name][version][ "type"] if "description" in PipelineManager.pipelines[name][version]: params_obj["description"] = PipelineManager.pipelines[name][ version]["description"] if "parameters" in PipelineManager.pipelines[name][version]: params_obj["parameters"] = PipelineManager.pipelines[name][ version]["parameters"] return params_obj @staticmethod def create_instance(name, version): PipelineManager.logger.info( "Creating Instance of Pipeline {name}/{v}".format(name=name, v=version)) if not PipelineManager.is_pipeline_exists(name, version): return None pipeline_type = PipelineManager.pipelines[name][str(version)]['type'] PipelineManager.pipeline_id += 1 PipelineManager.pipeline_instances[ PipelineManager. pipeline_id] = PipelineManager.pipeline_types[pipeline_type]( PipelineManager.pipeline_id, PipelineManager.pipelines[name][str(version)], ModelManager.models) return PipelineManager.pipeline_instances[PipelineManager.pipeline_id] @staticmethod def get_instance_parameters(name, version, instance_id): if not PipelineManager.is_pipeline_exists(name, version, instance_id): return None return PipelineManager.pipeline_instances[instance_id].params() @staticmethod def get_instance_status(name, version, instance_id): if not PipelineManager.is_pipeline_exists(name, version, instance_id): return None return PipelineManager.pipeline_instances[instance_id].status() @staticmethod def stop_instance(name, version, instance_id): if not PipelineManager.is_pipeline_exists(name, version, instance_id): return None return PipelineManager.pipeline_instances[instance_id].stop() @staticmethod def is_pipeline_exists(name, version, instance_id=None): if name not in PipelineManager.pipelines or \ str(version) not in PipelineManager.pipelines[name]: return False if instance_id and instance_id not in PipelineManager.pipeline_instances: return False return True
def import_destination_types(logger): types = {} try: from modules.KafkaDestination import KafkaDestination # pylint: disable=import-error types["kafka"] = KafkaDestination except Exception as error: logger.error("Error loading Kafka Destination%s\n" % (error, )) try: from modules.FileDestination import FileDestination # pylint: disable=import-error types["file"] = FileDestination except Exception as error: logger.error("Error loading FileDestination: %s\n" % (error, )) return types logger = logging.get_logger('DestinationTypes', is_static=True) destination_types = import_destination_types(logger) def create_instance(request): try: return destination_types[request["destination"]["type"]]( request["destination"]) except Exception as error: logger.error("Error creating destination: %s %s\n" % (request, error)) return None
class ModelManager: models = None logger = logging.get_logger('ModelManager', is_static=True) @staticmethod def load_config(model_dir): ModelManager.logger.info("Loading Models from Config Path {path}".format(path=os.path.abspath(model_dir))) models = {} for path in os.listdir(model_dir): try: full_path = os.path.join(model_dir, path) if os.path.isdir(full_path): model = path for version_dir in os.listdir(full_path): version_path = os.path.join(full_path, version_dir) if os.path.isdir(version_path): version = int(version_dir) config_path = os.path.join(version_path, "model.json") with open(config_path, 'r') as jsonfile: config = json.load(jsonfile) if 'network' in config: config['network'] = os.path.abspath(os.path.join(version_path, config['network'])) if 'weights' in config: config['weights'] = os.path.abspath(os.path.join(version_path, config['weights'])) if 'proc' in config: config['proc'] = os.path.abspath(os.path.join(version_path, config['proc'])) if 'gallery' in config: config['gallery'] = os.path.abspath(os.path.join(version_path, config['gallery'])) if 'labels' in config: config['labels'] = os.path.abspath(os.path.join(version_path, config['labels'])) if 'features' in config: config['features'] = os.path.abspath(os.path.join(version_path, config['features'])) if 'outputs' in config: for key in config['outputs']: if 'labels' in config['outputs'][key]: config['outputs'][key]['labels'] = os.path.abspath( os.path.join(version_path, config['outputs'][key]['labels'])) except Exception as error: ModelManager.logger.error("Error in Model Loading: {err}".format(err=error)) model = None if model: models[model] = {} models[model][version] = config ModelManager.models = models ModelManager.logger.info("Completed Loading Models") @staticmethod def get_model_parameters(name, version): if name not in ModelManager.models or version not in ModelManager.models[name] : return None params_obj = { "name": name, "version": version } if "type" in ModelManager.models[name][version]: params_obj["type"] = ModelManager.models[name][version]["type"] if "description" in ModelManager.models[name][version]: params_obj["description"] = ModelManager.models[name][version]["description"] return params_obj @staticmethod def get_loaded_models(): results = [] if ModelManager.models is not None: for model in ModelManager.models: for version in ModelManager.models[model].keys(): result = ModelManager.get_model_parameters(model, version) if result : results.append(result) return results
class PipelineManager: pipelines = None logger = logging.get_logger('PipelineManager', is_static=True) pipeline_types = import_pipeline_types(logger) pipeline_instances = {} pipeline_state = {} pipeline_id = 0 @staticmethod def load_config(pipeline_dir): PipelineManager.logger.info( "Loading Pipelines from Config Path {path}".format( path=pipeline_dir)) pipelines = {} for root, subdirs, files in os.walk(pipeline_dir): if os.path.abspath(root) == os.path.abspath(pipeline_dir): for subdir in subdirs: pipelines[subdir] = {} else: if len(files) == 0: pipeline = root.split('/')[-1] pipelines[pipeline] = {} for subdir in subdirs: pipelines[pipeline][subdir] = {} else: pipeline = root.split('/')[-2] version = root.split('/')[-1] for file in files: path = root + '/' + file if path.endswith(".json"): with open(path, 'r') as jsonfile: config = json.load(jsonfile) if ('type' not in config) or ('description' not in config): continue if config[ 'type'] in PipelineManager.pipeline_types: pipelines[pipeline][version] = config else: del pipelines[pipeline][version] PipelineManager.logger.error( "Pipeline %s with type %s not supported" % (pipeline, config['type'])) # Remove pipelines with no valid versions pipelines = dict([(model, versions) for model, versions in pipelines.items() if len(versions) > 0]) PipelineManager.pipelines = pipelines PipelineManager.logger.info("Completed Loading Pipelines") @staticmethod def get_loaded_pipelines(): result = [] if PipelineManager.pipelines is not None: for pipeline in PipelineManager.pipelines: for version in PipelineManager.pipelines[pipeline]: result.append( PipelineManager.get_pipeline_parameters( pipeline, version)) return result @staticmethod def get_pipeline_parameters(name, version): params_obj = { "name": name, "version": version, "type": PipelineManager.pipelines[name][version]["type"], "description": PipelineManager.pipelines[name][version]["description"], } if "parameters" in PipelineManager.pipelines[name][version]: params_obj["parameters"] = PipelineManager.pipelines[name][ version]["parameters"] return params_obj @staticmethod def create_instance(name, version): PipelineManager.logger.info( "Creating Instance of Pipeline {name}/{v}".format(name=name, v=version)) try: pipeline_type = PipelineManager.pipelines[name][str( version)]['type'] PipelineManager.pipeline_id += 1 PipelineManager.pipeline_instances[PipelineManager.pipeline_id] = \ PipelineManager.pipeline_types[pipeline_type](PipelineManager.pipeline_id, PipelineManager.pipelines[name][str(version)], ModelManager.models) return PipelineManager.pipeline_instances[ PipelineManager.pipeline_id] except Exception as e: PipelineManager.logger.error(e) return None @staticmethod def get_instance_parameters(instance_id): return PipelineManager.pipeline_instances[instance_id].params() @staticmethod def get_instance_status(instance_id): return PipelineManager.pipeline_instances[instance_id].status() @staticmethod def stop_instance(instance_id): return PipelineManager.pipeline_instances[instance_id].stop()