def start_server(self): ip, port = "0.0.0.0", random.randint(8000, 8999) if self.server is None: self.server = Server(ip, port, server_gui=self) self.server.start_server() self.start_button.setText("Stop") self.start_button.clicked.connect(self.stop_server) self.setWindowTitle(f"{ip}:{port}")
def __init__( self, db_manager_listening: str, db_manager_port: str, workload_sub_host: str, workload_pubsub_port: str, ) -> None: """Initialize a DatabaseManager.""" self._workload_sub_host = workload_sub_host self._workload_pubsub_port = workload_pubsub_port self._databases: Dict[str, Database] = {} server_calls: Dict[str, Tuple[Callable[[Body], Response], Optional[Dict]]] = self._get_server_calls() self._server = Server(db_manager_listening, db_manager_port, server_calls)
class ServerGUI(QMainWindow): server = None def __init__(self): super().__init__() self.setWindowTitle("Stopped") self.start_button = QPushButton("Start") self.clients_list = QListWidget() self.start_button.clicked.connect(self.start_server) # Setting layout self.layout = QVBoxLayout() self.layout.addWidget(self.start_button) self.layout.addWidget(QLabel("Clients:")) self.layout.addWidget(self.clients_list) widget = QWidget() widget.setLayout(self.layout) self.setCentralWidget(widget) self.setIcon() def setIcon(self): appIcon = QIcon("icons/server.png") self.setWindowIcon(appIcon) def stop_server(self): self.server.stop_server() self.server = None self.start_button.clicked.connect(self.start_server) self.start_button.setText("Start") self.setWindowTitle("Stopped") def start_server(self): ip, port = "0.0.0.0", random.randint(8000, 8999) if self.server is None: self.server = Server(ip, port, server_gui=self) self.server.start_server() self.start_button.setText("Stop") self.start_button.clicked.connect(self.stop_server) self.setWindowTitle(f"{ip}:{port}") def update_client_list(self, clients): self.clients_list.clear() for client in clients: self.clients_list.addItem(f"{client.username}:{client.socket_id}")
def __init__( self, generator_listening: str, generator_port: str, workload_listening: str, workload_pub_port: str, ) -> None: """Initialize a WorkloadGenerator.""" self._workload_listening = workload_listening self._workload_pub_port = workload_pub_port server_calls: Dict = { "start workload": self._call_start_workload, "get workload": self._call_get_workload, "stop workload": self._call_stop_workload, } self._server = Server(generator_listening, generator_port, server_calls) self._workload: str = None # type: ignore self._workload_frequency: int = 0 self._init_server() self._init_scheduler()
from backend.config import Config from backend.db_manager import DbManager from backend.importer import Importer from backend.server import Server db_man = DbManager(Config.DB_NAME) if Config.START_CREATE_CLEAR_DB: db_man.create_clear() if Config.START_IMPORT_DATA: im = Importer(Config.IMPORT_URL_TEMPLATE, Config.IMPORT_ACCESS_KEY, Config.IMPORT_DATE_FROM, Config.IMPORT_DATE_TO, db_man) im.import_data() if Config.START_RUN_SERVER: Server(Config.SERVER_IP, Config.SERVER_PORT, db_man).run()
class DatabaseManager(object): """A manager for database drivers.""" def __init__( self, db_manager_listening: str, db_manager_port: str, workload_sub_host: str, workload_pubsub_port: str, ) -> None: """Initialize a DatabaseManager.""" self._workload_sub_host = workload_sub_host self._workload_pubsub_port = workload_pubsub_port self._databases: Dict[str, Database] = {} server_calls: Dict[str, Tuple[Callable[[Body], Response], Optional[Dict]]] = self._get_server_calls() self._server = Server(db_manager_listening, db_manager_port, server_calls) def __enter__(self) -> "DatabaseManager": """Return self for a context manager.""" return self def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: """Call close with a context manager.""" self.close() return None def _get_server_calls(self, ) -> Dict: return { "add database": self._call_add_database, "delete database": self._call_delete_database, "start worker": self._call_start_worker, "close worker": self._call_close_worker, "get databases": self._call_get_databases, "get queue length": self._call_get_queue_length, "status": self._call_status, "get time intense metric": self._call_time_intense_metric, "get metric": self._call_metric, } def _call_add_database(self, body: Body) -> Response: """Add database and initialize driver for it.""" if body["id"] in self._databases: return get_response(400) db_instance = Database( body["id"], body["number_workers"], "tcp://{:s}:{:s}".format( self._workload_sub_host, self._workload_pubsub_port, ), ) self._databases[body["id"]] = db_instance return get_response(200) def _call_get_databases(self, body: Body) -> Response: """Get list of all databases.""" databases = [{ "id": id, "number_workers": database.number_workers } for id, database in self._databases.items()] response = get_response(200) response["body"]["databases"] = databases return response def _call_delete_database(self, body: Body) -> Response: id: str = body["id"] database: Optional[Database] = self._databases.pop(id, None) if database: database.close() del database return get_response(200) else: return get_response(404) def _call_status(self, body: Body) -> Response: status = [] for database_id, database in self._databases.items(): status.append({ "id": database_id, "worker_pool_status": database.get_worker_pool_status(), }) response = get_response(200) response["body"]["status"] = status return response def _call_start_worker(self, body: Body) -> Response: for database in self._databases.values(): if not database.start_worker(): return get_response(400) return get_response(200) def _call_close_worker(self, body: Body) -> Response: for database in self._databases.values(): if not database.close_worker(): return get_response(400) return get_response(200) def _call_get_queue_length(self, body: Body) -> Response: response = get_response(200) response["body"]["databases"] = [{ "id": id, "queue_length": database.get_queue_length() } for id, database in self._databases.items()] return response def _call_time_intense_metric(self, body: Body) -> Response: # do some work sleep(0.2) response = get_response(200) return response def _call_metric(self, body: Body) -> Response: # do some work sleep(0.05) response = get_response(200) return response def start(self) -> None: """Start the manager by starting the server.""" self._server.start() def close(self) -> None: """Close the socket and context, exit all databases.""" for database in self._databases.values(): database.close() self._server.close()
class WorkloadGenerator(object): """Object responsible for generating workload.""" def __init__( self, generator_listening: str, generator_port: str, workload_listening: str, workload_pub_port: str, ) -> None: """Initialize a WorkloadGenerator.""" self._workload_listening = workload_listening self._workload_pub_port = workload_pub_port server_calls: Dict = { "start workload": self._call_start_workload, "get workload": self._call_get_workload, "stop workload": self._call_stop_workload, } self._server = Server(generator_listening, generator_port, server_calls) self._workload: str = None # type: ignore self._workload_frequency: int = 0 self._init_server() self._init_scheduler() def _init_scheduler(self) -> None: self._scheduler = BackgroundScheduler() self._generate_workload_job = self._scheduler.add_job( func=self._generate_workload, trigger="interval", seconds=1, ) self._scheduler.start() def __enter__(self) -> "WorkloadGenerator": """Return self for a context manager.""" return self def __exit__( self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType], ) -> Optional[bool]: """Call close with a context manager.""" self.close() return None def _init_server(self) -> None: self._context = Context(io_threads=1) self._pub_socket = self._context.socket(PUB) self._pub_socket.bind("tcp://{:s}:{:s}".format( self._workload_listening, self._workload_pub_port)) def _call_start_workload(self, body: Body) -> Response: self._workload = body["workload_name"] self._workload_frequency = body["frequency"] return get_response(200) def _call_stop_workload(self, body: Body) -> Response: self._workload = None # type: ignore self._workload_frequency = 0 return get_response(200) def _generate_workload(self) -> None: if self._workload: response = get_response(200) response["body"]["querylist"] = [ self._workload for _ in range(self._workload_frequency) ] self._pub_socket.send_json(response) def _call_get_workload(self, body: Body) -> Response: response = get_response(200) response["body"]["workload"] = { "workload_name": self._workload, "frequency": self._workload_frequency, } return response def start(self) -> None: """Start the generator by starting the server.""" self._server.start() def close(self) -> None: """Close the socket and context.""" self._generate_workload_job.remove() self._scheduler.shutdown() self._pub_socket.close() self._context.term()
def main(args): logging.basicConfig( format="%(name)s -- %(levelname)s -- %(lineno)s -- %(message)s", level=args.verbosity.upper()) logger = logging.getLogger("dds") logger.addHandler(logging.NullHandler()) # Make simulation objects logger.info(f"Starting server with high threshold of " f"{args.high_threshold} low threshold of " f"{args.low_threshold} tracker length of " f"{args.tracker_length}") config = ServerConfig(args.resolutions[0], args.resolutions[1], args.qp[0], args.qp[1], args.bsize, args.high_threshold, args.low_threshold, args.max_object_size, args.min_object_size, args.tracker_length, args.boundary, args.intersection_threshold, args.tracking_threshold, args.suppression_threshold, args.simulate, args.rpn_enlarge_ratio, args.prune_score, args.objfilter_iou, args.size_obj) server = Server(config) logger.info("Starting client") client = Client(server, args.hname, config) mode = None results, bw = None, None if args.simulate: mode = "simulation" logger.warning("Running DDS in SIMULATION mode") # Run simulation logger.info(f"Analyzing video {args.video_name} with low resolution " f"of {args.resolutions[0]} and high resolution of " f"{args.resolutions[1]}") results, bw = client.analyze_video_simulate( args.video_name, args.low_images_path, args.high_images_path, args.high_results_path, args.low_results_path, args.enforce_iframes, args.mpeg_results_path, args.estimate_banwidth, args.debug_mode) elif not args.simulate and not args.hname and args.resolutions[-1] != -1: mode = "emulation" logger.warning(f"Running DDS in EMULATION mode on {args.video_name}") # Run emulation results, bw = client.analyze_video_emulate(args.video_name, args.high_images_path, args.enforce_iframes, args.low_results_path, args.debug_mode) elif not args.simulate and not args.hname: mode = "mpeg" logger.warning(f"Running in MPEG mode with resolution " f"{args.resolutions[0]} on {args.video_name}") results, bw = client.analyze_video_mpeg(args.video_name, args.high_images_path, args.enforce_iframes) # Evaluation and writing results # Read Groundtruth results low, high = bw f1 = 0 stats = (0, 0, 0) number_of_frames = len( [x for x in os.listdir(args.high_images_path) if "png" in x]) if args.ground_truth: ground_truth_dict = read_results_dict(args.ground_truth) logger.info("Reading ground truth results complete") tp, fp, fn, _, _, _, f1 = evaluate(number_of_frames - 1, results.regions_dict, ground_truth_dict, args.low_threshold, 0.5, 0.4, 0.4) stats = (tp, fp, fn) logger.info(f"Got an f1 score of {f1} " f"for this experiment {mode} with " f"tp {stats[0]} fp {stats[1]} fn {stats[2]} " f"with total bandwidth {sum(bw)}") else: logger.info("No groundtruth given skipping evalution") # Write evaluation results to file write_stats(args.outfile, f"{args.video_name}", config, f1, stats, bw, number_of_frames, mode)
def main(server: Server): server.start()
import os import sys from backend.logic import Logic from backend.server import Server from backend.storage import Storage # noinspection PyShadowingNames def main(server: Server): server.start() if __name__ == '__main__': listening_port = int(os.environ.get('LISTENING_PORT', '8080')) database_uri = os.environ.get('STORAGE_DB_URI') if not database_uri: print('STORAGE_DB_URI must be set for backend to run!', file=sys.stderr) sys.exit() storage = Storage(database_uri) logic = Logic(storage) server = Server(logic, listening_port) main(server)
import logging from backend.server import Server from backend.settings import init_logging init_logging() logger = logging.getLogger(__name__) s = Server() logger.info("Starting up") s.run()
# coding=utf-8 """ Create on 2021/3/15 14:37 @desc : @author : wurenxi @File : __main__.py """ from backend.server import Server if __name__ == '__main__': Server().start()
import logging from backend.server import Server from backend.settings import init_logging init_logging() logger = logging.getLogger(__name__) s = Server(host="localhost") logger.info("Starting up") s.run()