def image_description(): file_handler = request.files['image'] file_name = secure_filename(file_handler.filename) if len(file_name) > 0: head_part, tail_part = path.split(file_name) tail_name, tail_extension = tail_part.split('.') if tail_extension not in app.config['VALID_EXTENSIONS']: logger.error('this extension is not valid') abort(400) else: logger.error('this filename is not valid') abort(400) try: if app.config['IMAGE_DESCRIPTOR'] is not None: logger.debug('start object detection') raw_data = file_handler.read() output = app.config['IMAGE_DESCRIPTOR'].detect(raw_data) return json.dumps(output) logger.debug('the descriptor is not loaded') logger.debug('please check the file [__init__.py]') return json.dumps(flask_constants['detection_failure']) except Exception: logger.debug('An exception was raised ...') return json.dumps(flask_constants['detection_failure'])
def server_start(): try: with Configurator() as config: config.add_request_method(upload_csv, 'upload_csv', reify=True) config.add_request_method(create_data_Set, 'create_data_Set', reify=True) config.add_route('upload_csv', '/upload_csv/') config.add_route('create_data_Set', '/create_data_Set') config.add_view(upload_csv, route_name='upload_csv', renderer='json', request_method=("POST", "GET")) config.add_view(create_data_Set, route_name='create_data_Set', renderer='json', request_method=("POST", "GET")) app = config.make_wsgi_app() logger.info("initializing xmpp server...") logger.info("Server is running on {0}:{1}".format( http_host, str(http_port))) server = make_server(http_host, http_port, app) server.serve_forever() except Exception as e: logger.error(msg="Error while running server", exc_info=True)
def router_process(router_port, publisher_port, source_images, target, router_readyness, workers_states_queue): try: ctx = zmq.Context() logger.info('the context was created ...') router_socket = ctx.socket(zmq.ROUTER) publisher_socket = ctx.socket(zmq.PUB) logger.info('router and publisher socket was initialized') router_socket.bind(f'tcp://*:{router_port}') publisher_socket.bind(f'tcp://*:{publisher_port}') logger.info('router and publisher socket was bound') router_socket_poller = zmq.Poller() router_socket_poller.register(router_socket, zmq.POLLIN) workers_states_map = {} print(source_images) nb_source_images = len(source_images) counter = 0 keep_routing = True router_readyness.set() logger.info('router process is ready') while keep_routing: keep_routing = counter < nb_source_images router_events = dict(router_socket_poller.poll(100)) while not workers_states_queue.empty(): pid, sts = workers_states_queue.get() workers_states_map[pid] = sts if router_socket in router_events.keys(): if router_events[router_socket] == zmq.POLLIN: inc = router_socket.recv_multipart() rid, sep, msg, pid = inc if msg == b'ready': if counter < nb_source_images: contents = json.dumps({ 'source': source_images[counter], 'target': target }).encode() logger.info(f'router gor {msg} for worker {pid}') router_socket.send_multipart( [ # send this request to the first ready worker rid, # tis id will be used by the router b'', # this one will deleted by the req socket b'', # just for convinience contents ]) counter = counter + 1 # ... publisher_socket.send_multipart([b'KILL', b'']) # kill all workers logger.info( 'router process pending for worker to finish cleaning ...!') start = time.time() while any([sts != 'dead' for sts in workers_states_map.values()]): current_time = time.time() if current_time - start >= 1: start = current_time logger.info(workers_states_map) if not workers_states_queue.empty(): pid, sts = workers_states_queue.get() workers_states_map[pid] = sts time.sleep(0.001) logger.info('all worker are free => router can terminate !') except zmq.ZMQError as e: logger.error('router => [Exception]zmqerror', e) except KeyboardInterrupt as e: logger.error('router => [Exception]keyboard', e) except Exception as e: logger.error('router => [Exception]global', e) finally: publisher_socket.close() router_socket.close() ctx.term() logger.info('router process is closed ...!')
def finder_process(router_port, publisher_port, process_id, router_readyness, workers_states_queue): try: ctx = zmq.Context() req_socket = ctx.socket(zmq.REQ) subscriber_socket = ctx.socket(zmq.SUB) req_socket.connect(f'tcp://localhost:{router_port}') subscriber_socket.connect(f'tcp://localhost:{publisher_port}') req_socket.setsockopt_string(zmq.IDENTITY, process_id) subscriber_socket.setsockopt_string(zmq.SUBSCRIBE, 'KILL') req_socket_poller = zmq.Poller() subscriber_socket_poller = zmq.Poller() req_socket_poller.register(req_socket, zmq.POLLIN) subscriber_socket_poller.register(subscriber_socket, zmq.POLLIN) logger.info(f'worker : {process_id} is initialized') router_readyness.wait() # wait until thr router is ready...! req_socket.send_multipart([b'ready', process_id.encode('utf-8')]) workers_states_queue.put((process_id, 'alive')) keep_processing = True while keep_processing: subscriber_events = dict(subscriber_socket_poller.poll(100)) if subscriber_socket in subscriber_events.keys(): if subscriber_events[subscriber_socket] == zmq.POLLIN: topic, msg = subscriber_socket.recv_multipart() if topic.decode() == 'KILL': logger.info(f'worker {process_id} got kill signal') keep_processing = False req_events = dict(req_socket_poller.poll(100)) if req_socket in req_events.keys(): if req_events[req_socket] == zmq.POLLIN: inc = req_socket.recv_multipart() sep, incoming_req = inc json_req = json.loads(incoming_req.decode('utf-8')) logger.info(f'worker : {process_id} get req : {json_req}') source, target = json_req['source'], json_req['target'] script = f'python -m searching.search --extractor models/vgg16.h5 --target {target} --input {source}' logger.info('the next script will start : ') logger.info(script) #time.sleep(1) # some work ... :) response = subprocess.run(script, shell=True) if response.returncode == 0: logger.success( f'the similarity was computed for {source}') else: logger.error( f'some erros was triggered during the computation : {source}' ) logger.info(f'worker {process_id} finish the work') req_socket.send_multipart( [b'ready', process_id.encode('utf-8')]) # ... # ... END LOOP ... ! except zmq.ZMQError as e: logger.error(f'worker {process_id} => [Exception]zmqerror', e) except KeyboardInterrupt as e: logger.error(f'worker {process_id} => [Exception]keyboard', e) except Exception as e: logger.error(f'worker {process_id} => [Exception]global', e) finally: subscriber_socket.close() req_socket.close() ctx.term() logger.info(f'worker {process_id} is closed ...!') workers_states_queue.put((process_id, 'dead'))
path_to_source = path.join(current_dir, '..', source) path_to_target = path.join(current_dir, '..', target) source_images = pull_files(path_to_source) try: workers = [] for idx in range(parser_map['nb_workers']): workers.append( mp.Process(target=finder_process, args=[ router_port, publisher_port, '%03d' % idx, router_readyness, workers_states_queue ])) workers[-1].start() server = mp.Process(target=router_process, args=[ router_port, publisher_port, source_images, target, router_readyness, workers_states_queue ]) server.start() server.join() for wrk in workers: wrk.join() except KeyboardInterrupt as e: logger.error('main => [Exception]keyboard', e)
from os import path from flask import Flask from logger.log import logger from descriptor.descript import Descriptor current_dir = path.dirname(path.realpath(__file__)) config_filename = path.join(current_dir, '..', 'models/yolov3.cfg') weights_filename = path.join(current_dir, '..', 'models/yolov3.weights') image_descriptor = None try: image_descriptor = Descriptor(config_filename, weights_filename, 0.5, 0.3) logger.info('the descriptor is ready') except FileNotFoundError as contents: logger.error( 'some files are missing, please check the weight or config file') except Exception as e: logger.error(e) max_row = 1024 max_col = 1024 max_channels = 3 extentions = ['jpg', 'gif', 'png', 'ttf', 'jpeg'] app = Flask(__name__) app.config['MAX_CONTENT_LENGTH'] = max_row * max_col * max_channels app.config['VALID_EXTENSIONS'] = extentions app.config['IMAGE_DESCRIPTOR'] = image_descriptor from app import views
from pyramid.config import Configurator from logger.log import logger from src.get_big_query import upload_csv, create_data_Set, create_schema import configparser from wsgiref.simple_server import make_server try: config = configparser.ConfigParser() config.read("config/config.ini") http_host = config.get("HTTP", "host") http_port = int(config.get("HTTP", "port")) except Exception as e: logger.error( msg="Error occured while reading configuration from config file", exc_info=True) def server_start(): try: with Configurator() as config: config.add_request_method(upload_csv, 'upload_csv', reify=True) config.add_request_method(create_data_Set, 'create_data_Set', reify=True) config.add_route('upload_csv', '/upload_csv/') config.add_route('create_data_Set', '/create_data_Set') config.add_view(upload_csv, route_name='upload_csv', renderer='json', request_method=("POST", "GET"))