Exemple #1
0
def gen_video():
    streamer = Streamer('localhost', 9002)
    streamer.start()
    minutes = 0

    while True:
        if streamer.client_connected():
            time.sleep(minutes)

            path = 'output/file' + str(streamer.i) + '.mp4'
            f = open(path, 'rb')
            data = f.read()
            print("file ", streamer.i, " READ")
            f.close()
            # streamer.video = data

            # vid = cv2.VideoCapture(path)
            # fps = vid.get(cv2.CAP_PROP_FPS)      # OpenCV2 version 2 used "CV_CAP_PROP_FPS"
            # frameCount = int(vid.get(cv2.CAP_PROP_FRAME_COUNT))
            # duration = frameCount/fps
            # minutes = int(duration/60)
            minutes = 7
            streamer.i += 1
            # yield streamer.get_video()
            yield data
def gen():
  streamer = Streamer('0.0.0.0', 8000)
  streamer.start()

  while True:
    if streamer.streaming:
      yield (b'--frame\r\n'b'Content-Type: image/jpeg\r\n\r\n' + streamer.get_jpeg() + b'\r\n\r\n')
    def __init__(self, fixtures_folder, parent=None):
        QWidget.__init__(self, parent)
        self.current_fixture = None
        self.fixtures_folder = fixtures_folder
        self.setWindowTitle("Frangitron DMX program editor")

        self.text = QPlainTextEdit()
        font = QFont("Monospace")
        font.setStyleHint(QFont.TypeWriter)
        font.setPixelSize(16)
        self.text.setFont(font)
        self.text.setStyleSheet(
            "color: white; background-color: rgb(30, 30, 30)")

        self.combo_fixture = QComboBox()

        self.frame_programs = QWidget()
        self.checkboxes_programs = list()
        self.layout_programs = QGridLayout(self.frame_programs)

        self.spinner_offset = QSpinBox()
        self.spinner_offset.setMinimum(1)
        self.spinner_offset.setMaximum(512)
        self.spinner_offset.setValue(1)
        self.spinner_offset.valueChanged.connect(self.address_changed)

        self.doc = QPlainTextEdit()
        self.doc.setReadOnly(True)
        self.doc.setFont(font)

        self.status = QLabel()

        layout = QGridLayout(self)
        layout.addWidget(self.combo_fixture, 0, 1)
        layout.addWidget(self.spinner_offset, 0, 2)
        layout.addWidget(self.frame_programs, 1, 1)
        layout.addWidget(self.text, 0, 0, 3, 1)
        layout.addWidget(self.doc, 2, 1, 1, 2)
        layout.addWidget(self.status, 3, 0, 1, 3)
        layout.setColumnStretch(0, 60)
        layout.setColumnStretch(1, 40)

        self.resize(1280, 800)

        self.streamer = Streamer(self.fixtures_folder)

        self.combo_fixture.addItems(sorted(self.streamer.fixtures))
        self.combo_fixture.currentIndexChanged.connect(self.fixture_changed)

        self.timer = QTimer()
        self.timer.timeout.connect(self.tick)
        self.timer.start(500.0 / FRAMERATE)
        self.should_reload = True

        self.fixture_changed()
class Main:
    def __init__(self):

        logging.basicConfig(
            level=logging.DEBUG,
            format='%(asctime)s %(threadName)s\t%(levelname)-8s\t%(message)s')

        signal.signal(signal.SIGINT, self.exit)
        signal.signal(signal.SIGTERM, self.exit)

        self.is_restarted = False
        self.config = self.load_config()

        host = self.config['host']
        stream_token = self.config['stream_token']
        stream_config = self.config['stream_config']

        self.previous_status = ""

        logging.info('### PICAM-STREAM ###')
        logging.info('streaming to \'{}\''.format(host))

        self.s = Streamer(host, stream_token, stream_config)
        self.o = Observer(host, self.observer_event_handler)

    def load_config(self, config_path='./config.json'):
        with open(config_path, 'r') as f:
            config = json.load(f)
            return config

    def observer_event_handler(self, status):

        if self.previous_status != status:  # status has changed
            logging.debug('observer reported status \'{}\''.format(status))
            self.previous_status = status

        if status in ['disconnected', 'stopped', 'error']:
            if not self.is_restarted:
                logging.warning('(re)starting stream ...'.format(status))
                self.s.restart_stream()
                self.is_restarted = True
        else:
            self.is_restarted = False

    def start(self):
        self.o.start()

    def exit(self, signum, frame):
        logging.debug('SIGTERM was sent, exiting')

        self.o.stop()
        self.s.stop_stream()

        logging.info('bye!')
Exemple #5
0
def host2(listen_port, remote_port):
    s = Streamer(dst_ip="localhost",
                 dst_port=remote_port,
                 src_ip="localhost",
                 src_port=listen_port)
    # send small pieces of data
    for i in range(NUMS):
        buf = ("%d " % i)
        print("sending {%s}" % buf)
        s.send(buf.encode('utf-8'))
    receive(s)
    print("STAGE 2 TEST PASSED!")
Exemple #6
0
    def __init__(self, parent=None, *data):
        super(Play, self).__init__(parent)

        self.parent = parent
        self.id = data[0]

        # block and delete download audio binary if play new track
        self.lock_audio_load = False

        self.parent.is_stop = False

        self.sig.connect(self.parent.updateProgress)
        self.playback = Streamer(self.parent.volume)
        self.start()
async def main():
    from colors import Col
    from datetime import datetime
    from time import perf_counter
    t = perf_counter()

    some_name = 'emilybarkiss'
    sample_sz = 350
    n_consumers = 100

    async with TwitchClient() as tc:
        streamer = Streamer(name=some_name)
        folnet = FollowerNetwork(streamer.uid)
        livestreams = LiveStreams()

        pipeline = RecommendationPipeline(streamer, folnet, livestreams,
                                          sample_sz)
        await pipeline(tc, n_consumers)

    streamer.display
    pipeline.folnet_pipe.display
    pipeline.live_stream_pipe.display

    print(f'{Col.magenta}🟊 N consumers: {n_consumers} {Col.end}')
    print(
        f'{Col.cyan}⏲ Total Time: {round(perf_counter() - t, 3)} sec {Col.end}'
    )
    print(
        f'{Col.red}\t««« {datetime.now().strftime("%I:%M.%S %p")} »»» {Col.end}'
    )
    def __init__(self):
        StatusUpdateNode.__init__(self)
        Streamer.__init__(self)

	mask = EventsCodes.ALL_FLAGS["IN_CREATE"]  # watched events

	self.wm = WatchManager()
	self.notifier = Notifier(self.wm, self)

	if self.GetStreamerValue("src"):
	    self.src=self.GetStreamerValue("src")
	else:
	    self.src="/tmp/motion/"

	self.wdd = self.wm.add_watch(self.src, mask, rec=True)

	if self.GetStreamerValue("host"):
	    self.host=self.GetStreamerValue("host")
        else:
	    self.host="dogbert"

	if self.GetStreamerValue("username"):
	    self.username=self.GetStreamerValue("username")
        else:
	    self.username="******"

	if self.GetStreamerValue("password"):
	    self.password=self.GetStreamerValue("password")
        else:
	    self.password="******"

	if self.GetStreamerValue("dst"):
	    self.dst=self.GetStreamerValue("dst")
        else:
	    self.dst="/home/arhivar/static_html/live/slides4"

	try:
            self.ssh = paramiko.SSHClient()
            self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            self.ssh.connect(self.host, username=self.username, password=self.password)

            self.ftp = self.ssh.open_sftp()
	except:
	    self.error= True
	    print "Cannot connect"
Exemple #9
0
def host1(listen_port, remote_port):
    s = Streamer(dst_ip="localhost",
                 dst_port=remote_port,
                 src_ip="localhost",
                 src_port=listen_port)
    receive(s)
    print("STAGE 1 TEST PASSED!")
    # send large chunks of data
    i = 0
    buf = ""
    while i < NUMS:
        buf += ("%d " % i)
        if len(buf) > 12345 or i == NUMS - 1:
            print("sending {%s}" % buf)
            s.send(buf.encode('utf-8'))
            buf = ""
        i += 1
    print("CHECK THE OTHER SCRIPT FOR STAGE 2 RESULTS.")
Exemple #10
0
	def __init__(self):
		Thread.__init__(self, target=Conman.run, name='Conman')
		settings = self.load_settings()
		self.queue    = Queue(100)
		self.backend  = FileSystem(out_queue=self.queue, **settings['backend'])
		self.streamer = Streamer(self.backend, self.queue)
		self.jsonwire = JsonWire('', 3484, self.queue, accept=False)
		self.backend.start()
		self.streamer.start()
		self.jsonwire.start()
 def __init__(self, config_file):
     config = ConfigParser.ConfigParser()
     config.readfp(open('config.txt'))
     self.db_name = config.get("Agent", "database")
     self.db = database.Database(self.db_name)
     self.wake_interval = int(config.get("Agent", "wake_every"))
     self.stopwords = open(config.get("Agent", "stop_words"),
                           'r').read().split()
     self.local_timezone = pytz.timezone("America/New_York")
     self.woke_at = self.utc_for(datetime.datetime.now())
     self.tasks = tasks.TaskTypes
     self.tweet_cache = []
     self.streamSampler = Streamer(0, self,
                                   config.get("Stream", "username"),
                                   config.get("Stream", "password"))
     self.filterSampler = Streamer(0, self,
                                   config.get("FilterStream", "username"),
                                   config.get("FilterStream", "password"))
     self.expire = False
     self.blackboard = {}
Exemple #12
0
 def __create_streamers(self):
     streamers = self.__load_streamers()
     streamer_ids = self.__get_streamers_id(streamers)
     for streamer in streamers:
         streamer_name = streamer.lower()
         self.__streamers[streamer_name] = Streamer(
             streamer_name,
             self.__capture_directory,
             streamer_ids.get(streamer_name),
             self.__complete_directory,
         )
Exemple #13
0
    def ready(self):
        print('Starting Streamer (From WebappConfig)')

        scraper_path = '\\'.join([
            os.path.dirname(os.path.abspath(__file__)), 'RepScraper',
            'scraper.py'
        ])
        sys.path.append('\\'.join(
            [os.path.dirname(os.path.abspath(__file__)), 'RepScraper']))
        from streamer import Streamer

        s = Streamer()
Exemple #14
0
def gen():
  streamer = Streamer('localhost', 9006)
  streamer.start()

  while True:
    if streamer.client_connected():
      yield (b'--frame\r\n'b'Content-Type: image/jpeg\r\n\r\n' + streamer.get_jpeg() + b'\r\n\r\n')
def gen():
	streamer = Streamer('0.0.0.0', 8089)
	streamer.start()

	while True:
		if streamer.client_connected():
			frame = streamer.get_jpeg()
			yield (b'--frame\r\n'b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
class VideoPlayer:
    def __init__(self):
        self.yt_player = YouTubePlayer()
        self.streamer = Streamer()
        self.file_player = FilePlayer()
        self.players_cache = Cache(_cache_size)

    def set_status_func(self, status_func):
        self.yt_player.set_status_func(status_func)
        self.streamer.set_status_func(status_func)

    def _get_player(self, url):
        c = self.players_cache.get(url)
        if c is not None:
            return c

        if self.file_player.can_play(url):
            c = self.file_player
        elif self.yt_player.can_play(url):
            c = self.yt_player
        elif self.streamer.can_play(url):
            c = self.streamer

        if c is not None:
            self.players_cache.add(url, c)
            return c
        return None

    def get_qualities(self, url):
        p = self._get_player(url)
        if p is None: return None
        else: return p.get_qualities(url)

    def can_play(self, url):
        try:
            return self._get_player(url) is not None
        except:
            return False

    def is_playing(self):
        return self.yt_player.is_playing() or self.streamer.is_playing() \
             or self.file_player.is_playing()

    def play(self, url, quality):
        p = self._get_player(url)
        if p is None: raise Exception('No player found')
        p.play(url, quality)

    def is_playlist(self):
        return self.yt_player.is_playlist()

    def playlist_next(self):
        self.yt_player.playlist_next()

    def stop(self):
        self.yt_player.stop()
        self.streamer.stop()
        self.file_player.stop()
Exemple #17
0
    def run(self):
        end_time = 0
        try:
            fake_stream = Streamer()
            # UNCOMMENT the following debug messages to check if the worker reports stream statistics correctly. 
            # to make sure we do not mess up the stream state.
            # self.logger.debug( "Stream: id: {}, outcome: {}".format(self.id, fake_stream.outcome.name))
            while True:
                if fake_stream.getNext() == 'x' and fake_stream.getNext() == 'A' and fake_stream.getNext() == 'd':
                    # self.logger.debug( "Successful Stream: id: {}, outcome: {}".format(self.id, fake_stream.outcome.name))
                    self.byte_count += 1
                    end_time = time.time()
                    self.status = Status.SUCCESS
                    break
                self.byte_count += 1

        except Exception as e:
            self.logger.error("Worker {}: Failed to process stream. Exception: {}, {}".format(self.id, type(e).__name__, e.args))
            self.status = Status.FAILURE
            self.byte_count = 0
            
        self.queue.put((self.id, end_time, self.byte_count, self.status))
        self.status
    def __init__(self):

        logging.basicConfig(
            level=logging.DEBUG,
            format='%(asctime)s %(threadName)s\t%(levelname)-8s\t%(message)s')

        signal.signal(signal.SIGINT, self.exit)
        signal.signal(signal.SIGTERM, self.exit)

        self.is_restarted = False
        self.config = self.load_config()

        host = self.config['host']
        stream_token = self.config['stream_token']
        stream_config = self.config['stream_config']

        self.previous_status = ""

        logging.info('### PICAM-STREAM ###')
        logging.info('streaming to \'{}\''.format(host))

        self.s = Streamer(host, stream_token, stream_config)
        self.o = Observer(host, self.observer_event_handler)
Exemple #19
0
def gen():
    streamer = Streamer(os.getenv("SERVER_IP", 'localhost'),
                        os.getenv("PORT", '9092'), os.getenv("TOPIC", 'test'))
    streamer.start()

    while True:
        if streamer.client_connected():
            yield (b'--frame\r\n'
                   b'Content-Type: image/jpeg\r\n\r\n' + streamer.get_jpeg() +
                   b'\r\n\r\n')
    def start_collection(self):
        """Start collection."""
        # create our streamer
        collection_name = self.active_job["name"]
        self.listener = RotatingFileListener(
            collection_name=collection_name,
            **self.output_config
        )

        self.stream = Streamer(
            listener=self.listener,
            api_key=self.twitter_auth["api_key"],
            api_secret=self.twitter_auth["api_secret"],
            access_token=self.twitter_auth["access_token"],
            access_token_secret=self.twitter_auth["access_token_secret"],
            source_addr=self.source_addr
        )

        # set job id in server messenger
        self.sm.active_job_id = self.active_job_id
    def __init__(self,
                 streamer_name: str,
                 sample_sz=300,
                 max_followings=200,
                 min_mutual=3) -> None:
        self.sample_sz = sample_sz
        self.max_followings = max_followings
        self.min_mutual = min_mutual

        self.streamer = Streamer(name=streamer_name)
        self.folnet = FollowerNetwork(streamer_id=self.streamer.uid,
                                      min_mutual=self.min_mutual)
        self.live_streams = LiveStreams()

        self.pipeline = RecommendationPipeline(
            self.streamer,
            self.folnet,
            self.live_streams,
            max_followings=self.max_followings,
            sample_sz=self.sample_sz)
Exemple #22
0
    def test(self):
        print("Creating 3 output files.")
        output1 = open("out1.h264", "wb")
        output2 = open("out2.h264", "wb")
        output3 = open("out3.h264", "wb")

        print("Creating Streamers with the created outputs.")
        streamer1 = Streamer("h264", id_output=("1", output1))
        streamer2 = Streamer("h264", id_output=("2", output2))
        streamer3 = Streamer("h264", id_output=("3", output3))

        print("Checking if Streamer instances are same object.")
        self.assertIs(streamer1, streamer2)
        self.assertIs(streamer2, streamer3)

        print("Checking if Streamer instance has 3 outputs.")
        self.assertEqual(len(streamer1.output.outputs), 3)

        print("Recording to 3 outputs simultaneously for 5 seconds")
        streamer1.start()
        time.sleep(5)
        streamer1.stop()

        output1.close()
        output2.close()
        output3.close()

        print("Checking if 3 output files have same size.")
        self.assertEqual(
            os.stat("out1.h264").st_size,
            os.stat("out2.h264").st_size)
        self.assertEqual(
            os.stat("out2.h264").st_size,
            os.stat("out3.h264").st_size)

        print("Removing 3 output files.")
        os.remove("out1.h264")
        os.remove("out2.h264")
        os.remove("out3.h264")
Exemple #23
0
def prepare_bot(exchange: str, user: str):
    settings = load_settings(user)
    commons = Commons(user, settings['ema_spans_minutes'])
    all_coins = set(flatten([s.split('/') for s in commons.cc.markets]))
    settings['coins'] = [c for c in settings['coins'] if c in all_coins]
    all_margin_pairs = [
        f"{e['base']}/{e['quote']}"
        for e in commons.cc.sapi_get_margin_allpairs()
    ]
    settings['symbols'] = [
        s for c in settings['coins']
        if (s := f"{c}/{settings['quot']}") in all_margin_pairs
    ]
    commons.init(settings['symbols'])
    receiver_funcs = [commons.update_ema]
    vwap = Vwap(commons, settings)
    vwap.init()
    receiver_funcs.append(vwap.on_update)
    commons.start_call_limiter()
    streamer = Streamer(settings, receiver_funcs)
    return commons, vwap, streamer
    def test(self):
        streamer1 = Streamer("h264")
        streamer2 = Streamer("mjpeg")
        streamer3 = Streamer("yuv420")
        streamer4 = Streamer("h264", resize="720p")

        self.assertIsNot(streamer1, streamer2)
        self.assertIsNot(streamer1, streamer3)
        self.assertIsNot(streamer1, streamer4)
        self.assertIsNot(streamer2, streamer3)
        self.assertIsNot(streamer2, streamer4)
        self.assertIsNot(streamer3, streamer4)

        self.assertIsInstance(streamer1, SplitFrameStreamer)
        self.assertIsInstance(streamer2, SplitFrameStreamer)
        self.assertIsInstance(streamer3, BaseStreamer)
        self.assertIsInstance(streamer4, SplitFrameStreamer)

        self.assertRaises(Exception, Streamer, "mjpeg", resize="1080p")

        streamer4.ready_to_stop = True
        streamer4 = Streamer("mjpeg", resize="1080p")

        self.assertRaises(Exception, Streamer, "mjpeg", resize="HD")
Exemple #25
0
    def run(self):
        address_used_internally = self.config['address_used_internally']
        range_port_init = self.config['internal_range_port']
        number_filter_columns = self.config['number_filter_columns']
        number_readers = self.config['number_readers']

        readers = []
        for i in range(number_readers):
            readers.append(
                Reader(self.config['reader'], address_used_internally,
                       range_port_init, i, number_readers))

        streamer = Streamer(address_used_internally, range_port_init,
                            address_used_internally, range_port_init + 1,
                            number_readers, number_filter_columns)

        filters_columns = []
        for i in range(number_filter_columns):
            filters_columns.append(
                FilterColumns(address_used_internally, range_port_init + 1,
                              address_used_internally, range_port_init + 2))

        input_ventilator = StreamerPublisher(address_used_internally,
                                             range_port_init + 2,
                                             self.outgoing_address,
                                             self.outgoing_port,
                                             number_filter_columns)

        for reader in readers:
            reader.start()
        streamer.start()
        for filter_columns in filters_columns:
            filter_columns.start()
        input_ventilator.start()

        for reader in readers:
            reader.join()
        streamer.join()
        for filter_columns in filters_columns:
            filter_columns.join()
        input_ventilator.join()
def Open():
    Streamer("localhost", 8765).start()

    return "success"
        options={"sensor": sensor, "source": record}
        t=(str(office[0])+"c"+str(office[1])).replace(".",'d').replace("-",'n')
        uri=web_host+"/offices/"+t+"/api/sensorsdb"
        r=requests.put(uri,data=json.dumps(options),verify=False)
        if r.status_code==200 or r.status_code==201: return True
    except Exception as e:
        print("Exception: "+str(e), flush=True)
    return False

for simh in sim_hosts:
    if simh[1]=="0": continue
    port_scan.append("-p "+simh[1]+" "+simh[0])

scanner=Scanner()
streamer=None
if rtmp_host: streamer=Streamer()
while True:

    options=port_scan
    if dbp and not sim_hosts:
        try:
            r=dbp.bucketize("ip_text:* or port:*",["ip_text","port"],size=1000)
            if r:
                options.extend([k for k in r["ip_text"] if r["ip_text"][k]])
                options.extend(["-p "+str(k) for k in r["port"] if r["port"][k]])
        except:
            print(traceback.format_exc(), flush=True)
            continue
        
    for ip,port in scanner.scan(" ".join(options)):
        # new or disconnected camera
    def run(self):

        address_used_internally = self.config['address_used_internally']
        range_port_init = self.config['internal_range_port']


        number_of_filters_columns = self.config['number_of_filters_columns']
        number_of_filters_by_score = self.config['number_of_filters_by_score']

        streamer_input = StreamerSubscriber(self.incoming_address, self.incoming_port, address_used_internally, range_port_init, 1, number_of_filters_columns)


        filters_columns = []        
        for i in range(number_of_filters_columns):
            filters_columns.append(FilterColumns(address_used_internally, range_port_init, address_used_internally, range_port_init + 1 ))

        streamer_filtered_columns = Streamer(address_used_internally,  range_port_init+ 1, address_used_internally, range_port_init + 2, number_of_filters_columns, number_of_filters_by_score )
        filters_by_score = []
        for i in range(number_of_filters_by_score):
            filters_by_score.append(FilterByScore(self.filter_points, address_used_internally,  range_port_init + 2, address_used_internally, range_port_init + 3))
        streamer_filtered_points = Streamer(address_used_internally,  range_port_init + 3, address_used_internally, range_port_init + 4, number_of_filters_by_score, 1)
        sum_up_points = SumUpPoints(address_used_internally, range_port_init + 4, address_used_internally, range_port_init + 5)
        sink = Sink(address_used_internally, range_port_init + 5, self.config['output_filename'])
        
        streamer_input.start()
        for filter_columns in filters_columns:
            filter_columns.start()
        streamer_filtered_columns.start()
        for filter_by_score in filters_by_score:
            filter_by_score.start()
        streamer_filtered_points.start()
        sum_up_points.start()
        sink.start()


        streamer_input.join()
        for filter_columns in filters_columns:
            filter_columns.join()
        streamer_filtered_columns.join()
        for filter_by_score in filters_by_score:
            filter_by_score.join()
        streamer_filtered_points.join()
        sum_up_points.join()
        sink.join()
from streamer import Streamer
from db_analyzer import Analyzer

keyword_sets = {
    "DC": [
        "batman", "wonder woman", "aquaman", "the flash", "superman",
        "man of steel"
    ],
    "Marvel":
    ["iron man", "black widow", "thor", "hulk", "spider-man", "black panther"]
}
threads = {}

for name, set in keyword_sets.items():
    threads[name] = Streamer(name, set)
    print('Starting stream on publisher', name)
    threads[name].start()

print('Starting analyzer')
threads['Analyzer'] = Analyzer()
threads['Analyzer'].start()

for name, thread in threads.items():
    print("joining on", name)
    thread.join()

for name in keyword_sets:
    print("stream", name, "status:", threads[name].status_code)
# coding=utf-8
"""
Test client for the streamer
"""
import logging
from datetime import datetime
from streamer import Streamer
from utils import get_kw_from_file


logging.basicConfig(filename="../logs/twcrawl_log_%s.log" % datetime.now().strftime("%Y%m%d_%H%M"),
    format='%(asctime)-15s %(levelname)s : %(message)s',
    level=logging.INFO)

streamer = Streamer()
streamer.main(language="tr", track=get_kw_from_file())



Exemple #31
0
class Conman(Thread):
	label     = None
	state     = STARTING
	backend   = None
	streamer  = None
	jsonwire  = None
	queue     = None
	handlers  = {}
	connected = False
	accepting = False

	def __init__(self):
		Thread.__init__(self, target=Conman.run, name='Conman')
		settings = self.load_settings()
		self.queue    = Queue(100)
		self.backend  = FileSystem(out_queue=self.queue, **settings['backend'])
		self.streamer = Streamer(self.backend, self.queue)
		self.jsonwire = JsonWire('', 3484, self.queue, accept=False)
		self.backend.start()
		self.streamer.start()
		self.jsonwire.start()

	def load_settings(self):
 		path = os.path.join(os.environ['DWITE_CFG_DIR'], 'conman.json')
		settings = {}
		if os.path.exists(path):
			f = open(path)
			try:
				settings = json.load(f)
			except:
				print('ERROR: Could not load settings file %s' % path)
				settings = {}
			f.close()

		if 'backend' not in settings:
			settings['backend'] = FileSystem.dump_defaults()
		return settings

	def save_settings(self):
		path = os.path.join(os.environ['DWITE_CFG_DIR'], 'conman.json')
		try:
			f = open(path, 'w')
			settings = json.load(f)
			f.close()
		except:
			settings = {'backend':{}}
		settings['backend'] = self.backend.dump_settings()
		f = open(path, 'w')
		json.dump(settings, f, indent=4)
		f.close()

	def get_handler(self, msg):
		if msg.guid in self.handlers:
			return self.handlers[msg.guid]
		return None

	def stop(self, hard=False):
		self.streamer.stop()
		self.jsonwire.stop(hard)
		self.backend.stop()
		self.state = STOPPED

	def send_hail(self):
		def handle_hail(self, msg, orig_msg, user):
			assert type(msg) == JsonResult
			if msg.errno:
				print msg.errstr
				self.stop()
		guid = random.randint(1, 1000000)
		hail = Hail(guid, self.backend.name, 0, self.streamer.port)
		self.handlers[guid] = (hail, handle_hail, None)
		self.jsonwire.send(hail.serialize())

	def run(self):
		while self.state != STOPPED:

			if self.state == PAUSED:
				time.sleep(0.5)
				continue

			msg = None
			try:
				msg = self.queue.get(block=True, timeout=0.5)
			except Empty:
				if (not self.jsonwire.is_alive()) and self.state == RUNNING:
					self.state = STARTING
					self.connected = False
					self.jsonwire = JsonWire('', 3484, self.queue, accept=False)
					self.jsonwire.start()
				continue
			except Exception, e:
				print 'VERY BAD!'
				traceback.print_exc()

			if type(msg) in [Accepting, Connected]:
				self.connected |= (type(msg) == Connected)
				self.accepting |= (type(msg) == Accepting)
				if self.connected and self.accepting and self.state == STARTING:
					# ready to hail the DM with all necessary info about conman
					# subsystems
					self.send_hail()
					self.state = RUNNING
				continue

			if isinstance(msg, JsonResult):
				if msg.guid in self.handlers:
					(orig_msg, handler, user) = self.get_handler(msg)
					handler(self, msg, orig_msg, user)
				else:
					print msg
				continue

			self.backend.in_queue.put(msg)

		self.save_settings()
Exemple #32
0
from streamer import Streamer
import cv2

streamer = Streamer(8888)
server = "http://rpi-6.wifi.local.cmu.edu:8888/video_feed"
VCap = cv2.VideoCapture(server)
if not VCap.isOpened():
    print("ERROR! Check the camera.")
    exit(0)
while True:
    ret, frame = VCap.read()
    #if frame==None: continue
    streamer.update_frame(frame)

    if not streamer.is_streaming:
        streamer.start_streaming()
    cv2.waitKey(10)
Exemple #33
0
import cv2

from streamer import Streamer
from visualizer import Visualizer
from detector import Detector
from publisher import Publisher

SOURCE = 1
PORT = "5555"
TOPIC = "obstacle_detector"
FRAME_SIZE = (960, 640)

streamer = Streamer(SOURCE)
streamer.start()

visualizer = Visualizer(streamer.frame)
visualizer.start()

detector = Detector(streamer.frame)
detector.start()

publisher = Publisher(port=PORT, topic=TOPIC)
publisher.start()

while True:
    if streamer.is_stopped or visualizer.is_stopped:
        streamer.stop()
        visualizer.stop()
        detector.stop()
        publisher.stop()
        break
import os
import sys
from streamer import Streamer

if __name__ == '__main__':
    if len(sys.argv) == 3:
        if os.path.isdir(sys.argv[1]) and os.path.isfile(sys.argv[2]):
            from webapp import serve_webapp
            fixtures_folder = sys.argv[1]
            programs_file = sys.argv[2]
            serve_webapp(Streamer(fixtures_folder, programs_file))

        if sys.argv[1] == "editor" and os.path.isdir(sys.argv[2]):
            from editor import launch_editor
            fixtures_folder = sys.argv[2]
            launch_editor(fixtures_folder)
Exemple #35
0
    def run(self):

        address_used_internally = self.config['address_used_internally']
        range_port_init = self.config['internal_range_port']
        
        number_of_filter_scored = self.config['number_of_filter_scored']
        number_of_sum_up_players = self.config['number_of_sum_up_players']
        number_of_filters_columns = self.config['number_of_filters_columns']

        streamer_input = StreamerSubscriber(self.incoming_address, self.incoming_port, address_used_internally, range_port_init, 1, number_of_filters_columns)

        filters_columns = []        
        for i in range(number_of_filters_columns):
            filters_columns.append(FilterColumns(address_used_internally, range_port_init, address_used_internally, range_port_init + 1))
        streamer_filtered_columns = Streamer(address_used_internally, range_port_init + 1, address_used_internally, range_port_init + 2, number_of_filters_columns, number_of_filter_scored)
        
        filters_scored = []
        for i in range(number_of_filter_scored):
            filters_scored.append(FilterScored(address_used_internally, range_port_init + 2, address_used_internally, range_port_init + 3))
        streamer_scored_goals = StreamerPublisher(address_used_internally, range_port_init + 3, address_used_internally, range_port_init + 4, number_of_filter_scored, number_of_sum_up_players, lambda x: x[1])
        
        # Add subscriber here
        players_summers = []
        for i in range(number_of_sum_up_players):
            players_summers.append(SumUpPlayers(self.config['sum_up_players'], address_used_internally, range_port_init + 4, address_used_internally, range_port_init + 5, self.numerator_address, self.numerator_port))
        
        streamer_players  = Streamer(address_used_internally, range_port_init + 5, address_used_internally, range_port_init + 6, number_of_sum_up_players, 1)
      
        ranking_maker = RankingMaker(address_used_internally, range_port_init + 6, address_used_internally, range_port_init + 7)
        
        sink = Sink(address_used_internally, range_port_init + 7, self.config['output_filename'])
        
        streamer_input.start()
        for filter_columns in filters_columns:
            filter_columns.start()
        streamer_filtered_columns.start()
        for filter_scored in filters_scored:
            filter_scored.start()
        streamer_scored_goals.start()
        for players_summer in players_summers:
            players_summer.start()
        streamer_players.start()
        ranking_maker.start()
        sink.start()


        streamer_input.join()
        for filter_columns in filters_columns:    
            filter_columns.join()
        streamer_filtered_columns.join()
        for filter_scored in filters_scored:
            filter_scored.join()
        streamer_scored_goals.join()
        for player_summer in players_summers:
            players_summer.join()
        streamer_players.join()
        ranking_maker.join()
        sink.join()
class Client(object):

    """Basic Client class for polling server for jobs."""

    def __init__(
            self,
            base_url,
            token,
            ping_interval,
            update_interval,
            twitter_auth,
            output_config,
            source_addr=None):
        """construct client."""

        self.ping_interval = ping_interval
        self.update_interval = update_interval
        self.output_config = output_config
        self.source_addr = source_addr
        self.active_job = None
        self.active_job_id = None
        self.log = logging.getLogger("Client")

        self.sm = ServerMessenger(
            base_url=base_url,
            token=token
        )

        self.job_checker = JobChecker(self.sm)


    def wait_for_job(self):
        """poll for an active job assignment."""

        while running is True:
            active_job = self.job_checker.getActiveJob()

            if active_job is not None:
                # store data
                self.active_job = active_job
                self.active_job_id = active_job["id"]

                # check status
                status = CaptureStatus(self.active_job["status"])
                if status.running():
                    return

            # sleep
            sleep(self.ping_interval)


    def start_collection(self):
        """Start collection."""
        # create our streamer
        collection_name = self.active_job["name"]
        self.listener = RotatingFileListener(
            collection_name=collection_name,
            **self.output_config
        )

        self.stream = Streamer(
            listener=self.listener,
            api_key=self.twitter_auth["api_key"],
            api_secret=self.twitter_auth["api_secret"],
            access_token=self.twitter_auth["access_token"],
            access_token_secret=self.twitter_auth["access_token_secret"],
            source_addr=self.source_addr
        )

        # set job id in server messenger
        self.sm.active_job_id = self.active_job_id


    def run_collection(self):
        """Run collection."""

        # start the collection
        self.start_collection()

        # make sure we got a valid stream
        if self.stream is None:
            log.error("stream was not started")
            return

        # old state defaults
        old_status = CaptureStatus(CaptureStatus.STATUS_UNKNOWN)

        # initialize keyword details
        term_checker = TermChecker(self.server_messenger)

        # we haven't updated yet
        last_update = None

        # archived
        archived_date = None

        # run while valid
        while running is True and archived_date is None:

            # get job status from server
            status_msg = self.sm.getStatus()

            # if we got null, there's a problem with the server,
            # sleep and continue
            if status_msg is None:
                sleep(update_interval)
                continue

            # set up the status
            status = CaptureStatus(
                status_msg['status'] if 'status'
                in status_msg
                else STATUS_UNKNOWN)
            self.log.debug("got status: %d", status_msg['status'])



            # look for archived date and bail immediately
            archived_date = status_msg["archived_date"]
            if archived_date is not None:
                continue


            # are there any keyword changes?
            term_checker.checkTerms()


            # has the status changed?
            if old_status != status:
                self.log.info("changing status#1 %d -> %d", old_status, status)
                if status.isRunning():
                    if not self.stream.isRunning():
                        self.log.info("Starting stream")
                        self.stream.track_list = term_checker.terms
                        sm_total_count = status_msg['total_count']
                        if sm_total_count is not None:
                            self.listener.total = sm_total_count
                        self.stream.start()
                        # ackknowledge that we have the newest keywords in here
                        term_checker.resetTermsChanged()
                elif status.isStopped():
                    if self.stream.isRunning():
                        self.log.info("Stopping stream")
                        self.stream.stop()
            elif term_checker.haveTermsChanged():
                self.stream.track_list = self.term_checker.terms
                if self.stream.isRunning():
                    self.log.debug("restarting streams for keywords")
                    self.stream.stop()
                    sleep(ping_interval)
                    self.stream.start()
                    term_checker.resetTermsChanged()


            # sleep
            sleep(self.ping_interval)

            # new status
            new_status = CaptureStatus(STATUS_UNKNOWN)
            if self.stream.isRunning():
                self.log.debug("stream exists and is running")
                if status != STATUS_STOPPING:
                    new_status = STATUS_STARTED
            else:
                if status != STATUS_STARTING:
                    self.log.debug(
                        "stream exists but is not running (forcing %d -> %d",
                        status,
                        new_status)
                    new_status = STATUS_STOPPED



            # if there's a discrepancy
            if new_status != status and new_status != STATUS_UNKNOWN:
                self.log.info("changing status#2 %d -> %d", status, new_status)
                self.sm.updateStatus(new_status)

            # update the old status
            old_status = new_status

            # self.log.debug("running  - %s", running)

            # output status
            # send update status to server if we're running
            if self.stream.isRunning():
                self.listener.print_status()
                self.sm.pingServer(self.listener.total, self.listener.rate)

                do_update = False
                if last_update is None:
                    self.log.debug("initial update")
                    do_update = True
                else:
                    delta = datetime.now() - last_update
                    if delta.total_seconds() > self.update_interval:
                        self.log.debug(
                            "update delta: %f",
                            delta.total_seconds())
                        do_update = True
                    # else:
                        # self.log.debug("delta: %f", delta.total_seconds())

                # update to server
                if do_update is True:
                    self.sm.putUpdate(
                        self.listener.received,
                        self.listener.total,
                        self.listener.rate
                    )
                    last_update = datetime.now()


            else:
                self.log.debug("waiting for update")


        # wait for stream to stop
        if self.stream.isRunning():
            self.log.info("Stopping...")
            self.stream.stop()

            while self.stream.isRunning():
                self.log.info("Waiting for self.logger to stop")
                sleep(1)

        # allow our listener and stream to be deleted
        self.stream = None
        self.listener = None







    def run(self):
        """Start up the client running machine."""

        while running is True:
            # wait for an active job
            self.wait_for_job()

            if self.active_job is not None:
                self.log.info(
                    "working on job (id: %d, name: %s)",
                    self.active_job_id,
                    self.active_job["name"]
                )

                # start collection
                self.run_collection()
            else:
                sleep(self.ping_interval)