def detector_process(filename_reader): from detector import detect_in_files, eye_classifier, face_classifier # callback for when features are detected out_dir = '/tmp/detected/' try: os.makedirs(out_dir) except OSError: pass def echo_features(img, features, filename): import cv2 if all(map(len, features.values())): for rects in features.values(): for rect in rects: cv2.rectangle( img, (rect[0], rect[1]), (rect[0] + rect[2], rect[1] + rect[3]), (0,0,0) ) outfilename = os.path.split(filename)[1] cv2.imwrite(os.path.join(out_dir, outfilename), img) print('detected: {} {}'.format(filename, features)) classifiers = dict( face=face_classifier, eye=eye_classifier ) filename_q = Queue() spawn_greenlets([ (pipe_to_q, filename_reader, filename_q), (detect_in_files, echo_features, filename_q, classifiers), ])
def main(): incoming_tweets_q = Queue() twitter_api = get_twitter_api() conf = [ (filter_twitter, twitter_api, incoming_tweets_q, ['lol']), (echo_statuses, incoming_tweets_q), ] spawn_greenlets(conf)
def renderer_process(buffer_reader, output_dir='/tmp/'): plot_q = Queue() rendered_q = Queue() spawn_greenlets([ (configure_plots, buffer_reader, plot_q), (plotter, plot_q, rendered_q, output_dir), (echo_queue, rendered_q), ])
def twitter_process(url_writer, filter_phrases=[]): url_q = Queue() image_status_q = Queue() twitter_api = get_twitter_api() spawn_greenlets([ (filter_twitter, twitter_api, image_status_q, filter_phrases), (extract_status_images, image_status_q, url_q), (q_to_pipe, url_q, url_writer) ])
def image_fetch_process(url_reader, filename_writer, output_dir='/tmp/gtwitcv/'): url_q = Queue() rendered_q = Queue() filename_q = Queue() fetch_tmp_dir = '/tmp/gtwitimg/' spawn_greenlets([ (pipe_to_q, url_reader, url_q), (fetch_image_url, url_q, filename_q, fetch_tmp_dir), (q_to_pipe, filename_q, filename_writer), ])
def sampler_process(buffer_writer, fn, phrase='lol'): raw_status_q = Queue() status_q = Queue() twitter_api = get_twitter_api() spawn_greenlets([ (sampler, buffer_writer, fn, 2.0, 10.0), (filter_twitter, twitter_api, status_q, [phrase]), (extract_statuses, status_q, raw_status_q), (count_phrases, raw_status_q, phrase), ])
def renderer_process(buffer_reader, output_dir='/tmp/'): plot_q = Queue() rendered_q = Queue() try: twitter_api = get_twitter_api() except KeyError: twitter_api = None confs = [ [configure_plots, buffer_reader, plot_q], [plotter, plot_q, rendered_q, output_dir], [tweet_rendered, twitter_api, rendered_q], ] if not DEBUG: for conf in confs: conf[0] = never_surrender(conf[0]) spawn_greenlets(confs)
def sampler_process(output_writer, fn, phrase='lol'): sample_buffer_q = Queue() raw_status_q = Queue() status_q = Queue() phrase_status_q = Queue() top_tweet_status_q = Queue() twitter_api = get_twitter_api() confs = [ [sampler, sample_buffer_q, fn, TWEET_SAMPLE_WINDOW, TWEET_INTERVAL], [aggregate_sampler_data, output_writer, sample_buffer_q], [filter_twitter, twitter_api, status_q, [phrase]], [fanout, status_q, [phrase_status_q, top_tweet_status_q]], [extract_statuses, phrase_status_q, raw_status_q], [count_phrases, raw_status_q, phrase], # top tweet [count_top_tweet, top_tweet_status_q], [periodic_top_tweets, twitter_api, TWEET_INTERVAL * 0.25, 10] ] if not DEBUG: for conf in confs: conf[0] = never_surrender(conf[0]) spawn_greenlets(confs)