Esempio n. 1
0
def main():
    # Init object
    Northernlion = stream.Stream("Northernlion")
    online = False
    # Run infinitely, that way its always monitoring for streams
    print(f"Monitoring {Northernlion.channel}")
    while True:
        # If the channel is live, we start monitoring for what games they are playing
        Northernlion.liveCheck()
        if Northernlion.live:
            if not Northernlion.start:
                Northernlion.setStart()
                print(Northernlion.start)

            online = True
            print("Finding current game...")
            Northernlion.updateDocket()
            print(Northernlion.docket)

        elif online:
            # If the channel was online last time we checked but is no longer
            # Wait 3 minutes to make sure it doesn't come back online
            print(
                "Waiting 3 more minutes to make sure stream doesn't come back..."
            )
            for remaining in range(180, 0, -1):
                sys.stdout.write("\r")
                sys.stdout.write(f"{remaining} seconds remaining...")
                sys.stdout.flush()
                time.sleep(1)
            print()
            Northernlion.liveCheck()
            if not Northernlion.live:
                Northernlion.cleanDocket()
                print(f"Getting vod URL")
                Northernlion.findVOD()
                print(f"Getting top clip")
                Northernlion.findClip()
                print(f"Posting to Reddit")
                reddit.post(Northernlion)

                # Reset variables
                online = False
                del Northernlion
                Northernlion = stream.Stream("Northernlion")

        print("Sleeping for 1 minute before checking again...")
        for remaining in range(60, 0, -1):
            sys.stdout.write("\r")
            sys.stdout.write(f"{remaining} seconds remaining")
            sys.stdout.flush()
            time.sleep(1)
        print()
Esempio n. 2
0
 def control(self, t):
     if t:
         if self.stream != None:
             if self.stream.isAlive():
                 return {"running": True}
             else:
                 self.stream = streams.Stream()
         else:
             self.stream = streams.Stream()
         self.stream.start()
         return {"running": True}
     else:
         self.stream.stop()
         self.stream.join()
         return {"running": False}
Esempio n. 3
0
def startStreaming(new_streams, cid):
    # Loop thru each stream in our list
    for active_stream in new_streams:
        # Connect to the stream
        with event_stream.Stream(active_stream, falcon, queue, config,
                                 cid) as active:
            start = False
            try:
                if active.partition in str(config["partition"]).split(","):
                    start = True
                if str(config["partition"]).lower() == "all":
                    start = True
            except KeyError:
                # Start all available streams if partition is not specified
                start = True
            if start:
                status.statusWrite(
                    "Starting listener on partition number {}...".format(
                        str(active.partition)))
                # Create a thread to handle stream processing, daemonize so the thread shuts down when we do
                t = threading.Thread(target=active.process, daemon=True)
                # Begin processing the stream's contents
                t.start()

    return True
Esempio n. 4
0
    def __init__(self, channelNames, name=None):
        name = str(name) if not name is None else self.__class__.__name__
        stream.Stream.__init__(self, channelNames, name = 'sensor.' + name)
        self.raw = stream.Stream(channelNames, self.name + '_raw')
        self.loadCalibration()

        self.raw.updated.connect(self.newData)
Esempio n. 5
0
 def __init__(self, handles, data, rate, io_name, request_num, scan_rate):
     super(Thread, self).__init__()
     self.syncStream = stream.Stream(handles, rate)
     self.handles = handles
     self.data = data
     self.io_name = io_name
     self.request_num = request_num
     self.scan_rate = scan_rate
     self.io_addr = ljm.namesToAddresses(len(io_name), io_name)[0]
Esempio n. 6
0
    def __init__(self, sensors):
        super().__init__()
        self.rotation_raw = stream.Stream(('pitch', 'roll', 'heading'), name='estimator.rotation_raw')

        self.sensors = sensors
        self.sensors.accelerometer.updated.connect(self.updateRotation)
        self.sensors.magnetometer.updated.connect(self.updateRotation)

        self.rotation = filters.Complementary(self.rotation_raw, self.sensors.gyroscope, tau=50, name='estimator.rotation')
Esempio n. 7
0
import stream
from streamlist import StreamList

stream0 = stream.Stream('123', 'User')
stream1 = stream.Stream('999', 'Looser')
streamlist = StreamList()
streamlist.add_stream('123', stream0)
streamlist.add_stream('999', stream1)
print(streamlist.to_json())
Esempio n. 8
0
from __future__ import print_function

import os
import sys
sys.path.append('lib')

from itertools import izip
import numpy as np
import classify
import stream as img_stream
import serial
from log import sys_logger as logger

model = classify.Classify(10)
streamFactory = img_stream.Stream()
imgGenerator = streamFactory.from_camera(0)
ser = serial.Serial('mock')

scene = 0

logger.info('lanuch successfully')

while True:

    # check serial
    if ser.is_ready():
        cmd, payload = ser.get_payload()
        # fit the model
        if cmd == 0:
            sid = payload
Esempio n. 9
0
def cleanup(cohosts):
    guests = []
    for channel in cohosts:
        guests.append(stream.Stream(channel, False))
    return guests
Esempio n. 10
0
 def setUp(self):
     self.stream = stream.Stream()
Esempio n. 11
0
def main():
    # Init variables to default values
    guests = cleanup(cohosts)
    Northernlion = stream.Stream("Northernlion", False)
    NLSS = stream.NLSS([], [])
    online = False
    startTime = None
    # Run infinitely, that way its always monitoring for streams
    print(f"Monitoring {Northernlion.getName()}")
    while True:
        # If the channel is live, we start monitoring for what games they are playing
        if Northernlion.liveCheck():
            if not startTime:
                NLSS.setStart()
                startTime = NLSS.getStart()
                print (startTime)

            online = True
            print("Finding current game...")
            Northernlion.setGame()
            game = Northernlion.getGame()
            NLSS.addDocket(game)
            print(f"Checking for guests...")
            for guest in guests:
                if guest not in NLSS.getGuests():
                    guest.liveCheck()
                    if guest.getLive():
                        guest.setGame()
                        guestGame = guest.getGame()
                        if guestGame == game:
                            print(f"{guest.getName()} added to guests")
                            NLSS.addGuest(guest)
                else:
                    print(f"{guest.getName()} not added, already in list")

        elif online:
            # If the channel was online last time we checked but is no longer
            # Wait 2 minutes to make sure it doesn't come back online
            print("Waiting 3 more minutes to make sure stream doesn't come back...")
            for remaining in range(180, 0, -1):
                sys.stdout.write("\r")
                sys.stdout.write(f"{remaining} seconds remaining...")
                sys.stdout.flush()
                time.sleep(1)
            print()
            if not Northernlion.liveCheck():
                # Need to delete "unique" game entries, as sometimes the game being
                # played at the start is left over from last stream
                print(f"Cleaning entries from docket\n{NLSS.getDocket()}")
                NLSS.cleanDocket()
                print(f"Getting vod URL")
                NLSS.findVOD()
                print(f"Posting to Reddit")
                postReddit.post(NLSS.getDocket(),
                                NLSS.getVOD(), NLSS.getGuests(), NLSS.getClip())

                # Reset variables
                online = False
                startTime = None
                for guest in guests:
                    del guest
                del Northernlion
                del NLSS
                guests = cleanup(cohosts)
                Northernlion = stream.Stream("Northernlion", False)
                NLSS = stream.NLSS([], [])

        print("Sleeping for 1 minute before checking again...")
        for remaining in range(60, 0, -1):
            sys.stdout.write("\r")
            sys.stdout.write(f"{remaining} seconds remaining")
            sys.stdout.flush()
            time.sleep(1)
        print()
Esempio n. 12
0
    def loadOrigin(self, origin_id, orig, station_list):
        """
        Fetches Origin (and MT) from the scisola database
        and return it in origin object.
        Returns True/False and origin object/error.
        """

        _query = "SELECT `Origin`.`id`, `Origin`.`timestamp`, " + \
        "`Origin`.`datetime`, " + \
        "`Origin`.`magnitude`, `Origin`.`latitude`, " + \
        "`Origin`.`longitude`, `Origin`.`depth`, " + \
        "`Origin`.`automatic`, `Origin`.`results_dir`, " + \
        "`Event`.`id`, " + \
        "`Moment_Tensor`.`cent_shift`, " + \
        "`Moment_Tensor`.`cent_time`, `Moment_Tensor`." + \
        "`cent_latitude`, `Moment_Tensor`.`cent_longitude`, " + \
        "`Moment_Tensor`.`cent_depth`, `Moment_Tensor`." + \
        "`correlation`, `Moment_Tensor`.`var_reduction`, " + \
        "`Moment_Tensor`.`mw`, `Moment_Tensor`.`mrr`, " + \
        "`Moment_Tensor`.`mtt`, `Moment_Tensor`.`mpp`, " + \
        "`Moment_Tensor`.`mrt`, `Moment_Tensor`.`mrp`, " + \
        "`Moment_Tensor`.`mtp`, `Moment_Tensor`.`vol`, " + \
        "`Moment_Tensor`.`dc`, `Moment_Tensor`.`clvd`, " + \
        "`Moment_Tensor`.`mo`, `Moment_Tensor`.`strike`, " + \
        "`Moment_Tensor`.`dip`, `Moment_Tensor`.`rake`, " + \
        "`Moment_Tensor`.`strike_2`, `Moment_Tensor`.`dip_2`, " + \
        "`Moment_Tensor`.`rake_2`, `Moment_Tensor`.`p_azm`, " + \
        "`Moment_Tensor`.`p_plunge`, `Moment_Tensor`.`t_azm`, " + \
        "`Moment_Tensor`.`t_plunge`, `Moment_Tensor`.`b_azm`, " + \
        "`Moment_Tensor`.`b_plunge`, `Moment_Tensor`.`minSV`, " + \
        "`Moment_Tensor`.`maxSV`, `Moment_Tensor`.`CN`, " + \
        "`Moment_Tensor`.`stVar`, `Moment_Tensor`.`fmVar`, " + \
        "`Moment_Tensor`.`frequency_1`, `Moment_Tensor`." + \
        "`frequency_2`, `Moment_Tensor`.`frequency_3`, " + \
        "`Moment_Tensor`.`frequency_4` FROM `Origin` INNER JOIN " + \
        "`Event` ON `Origin`.`id` = `Event`.`Origin_id` INNER JOIN " + \
        "`Moment_Tensor` ON " + \
        "`Origin`.`id` = `Moment_Tensor`.`Origin_id` " + \
        "WHERE `Origin`.`id` = " + str(origin_id) + ";"

        _row = self.read([_query])[0][0]

        # converts string to datetime object
        _orig_tp = date.datetime.strptime(_row[1], "%Y/%m/%d %H:%M:%S.%f")

        orig = origin.Origin()
        orig.id = int(_row[0])
        orig.timestamp = _orig_tp
        orig.datetime = _row[2]
        orig.magnitude = float(_row[3])
        orig.latitude = float(_row[4])
        orig.longitude = float(_row[5])
        orig.depth = float(_row[6])
        orig.automatic = bool(_row[7])
        orig.results_dir = _row[8]
        orig.event_id = _row[9]

        orig.mt = origin.MomentTensor()

        orig.mt.cent_shift = int(_row[10])
        orig.mt.cent_time = float(_row[11])
        orig.mt.cent_latitude = float(_row[12])
        orig.mt.cent_longitude = float(_row[13])
        orig.mt.cent_depth = float(_row[14])
        orig.mt.correlation = float(_row[15])
        orig.mt.var_reduction = float(_row[16])
        orig.mt.mw = float(_row[17])
        orig.mt.mrr = float(_row[18])
        orig.mt.mtt = float(_row[19])
        orig.mt.mpp = float(_row[20])
        orig.mt.mrt = float(_row[21])
        orig.mt.mrp = float(_row[22])
        orig.mt.mtp = float(_row[23])
        orig.mt.vol = float(_row[24])
        orig.mt.dc = float(_row[25])
        orig.mt.clvd = float(_row[26])
        orig.mt.mo = float(_row[27])
        orig.mt.strike = float(_row[28])
        orig.mt.dip = float(_row[29])
        orig.mt.rake = float(_row[30])
        orig.mt.strike2 = float(_row[31])
        orig.mt.dip2 = float(_row[32])
        orig.mt.rake2 = float(_row[33])
        orig.mt.p_azm = float(_row[34])
        orig.mt.p_plunge = float(_row[35])
        orig.mt.t_azm = float(_row[36])
        orig.mt.t_plunge = float(_row[37])
        orig.mt.b_azm = float(_row[38])
        orig.mt.b_plunge = float(_row[39])
        orig.mt.minSV = float(_row[40])
        orig.mt.maxSV = float(_row[41])
        orig.mt.CN = float(_row[42])
        orig.mt.stVar = float(_row[43])
        orig.mt.fmVar = float(_row[44])
        orig.mt.frequency_1 = float(_row[45])
        orig.mt.frequency_2 = float(_row[46])
        orig.mt.frequency_3 = float(_row[47])
        orig.mt.frequency_4 = float(_row[48])

        _query = "SELECT DISTINCT `streamNetworkCode`, " + \
        "`streamStationCode` FROM `Stream_Contribution` " + \
        "WHERE `Origin_id` = " + str(orig.id) + ";"

        _rows = self.read([_query])[0]

        # get stations
        for _row in _rows:
            _station = stream.Station()
            _station.network = _row[0]
            _station.code = _row[1]

            _query = "SELECT `streamCode`, `var_reduction`, " + \
            "`mseed_path` FROM `Stream_Contribution` " + \
            "WHERE `Origin_id` = " + str(orig.id) + \
            " AND streamStationCode = '" + str(_station.code) + \
            "' ORDER BY `streamCode`;"

            _stream_rows = self.read([_query])[0]

            # get station's streams
            for _stream_row in _stream_rows:
                _stream = stream.Stream()
                _stream.code = _stream_row[0]
                _stream.reduction = float(_stream_row[1])
                _stream.mseed_path = _stream_row[2]
                _station.stream_list.append(_stream)

            station_list.append(_station)

        return orig, station_list
Esempio n. 13
0
    def loadStations(self, station_list):
        """
        Fetches Streams (and Stations) from the scisola database
        and saves them in station_list (list containing stations objects).
        Returns list -station_list-.
        """
        queries = []

        _query = "SELECT Station.code, Station.network, " + \
        "Station.description, Station.latitude, " + \
        "Station.longitude, Station.elevation, " + \
        "Station.priority FROM Station;"
        queries.append(_query)

        _query = "SELECT Station.network, Station.code, Stream.code, " + \
        "Station.description, Station.latitude, Station.longitude, " + \
        "Station.elevation, Stream.azimuth, Stream.dip, " + \
        "Stream.gain_sensor, Stream.gain_datalogger, " + \
        "Stream.norm_factor, Stream.nzeros, Stream.zeros_content, " + \
        "Stream.npoles, Stream.poles_content, Stream.priority " + \
        "FROM Station INNER JOIN Stream ON Station.id=Stream.station_id;"
        queries.append(_query)

        _station_rows, _stream_rows = self.read(queries)

        for _row in _station_rows:
            station = stream.Station()
            station.code = _row[0]
            station.network = _row[1]
            station.description = _row[2]
            station.latitude = _row[3]
            station.longitude = _row[4]
            station.elevation = _row[5]
            station.priority = int(_row[6])
            station_list.append(station)

        _stream_list = []
        for _row in _stream_rows:
            strm = stream.Stream()
            strm.station = stream.Station()
            strm.station.code = _row[1]
            strm.station.network = _row[0]
            strm.station.description = _row[3]
            strm.station.latitude = _row[4]
            strm.station.longitude = _row[5]
            strm.station.elevation = _row[6]
            strm.code = _row[2]
            strm.azimuth = _row[7]
            strm.dip = _row[8]
            strm.gain_sensor = _row[9]
            strm.gain_datalogger = _row[10]
            strm.norm_factor = _row[11]
            strm.nzeros = _row[12]
            strm.zeros_content = stream.blob2list(_row[13])
            strm.npoles = _row[14]
            strm.poles_content = stream.blob2list(_row[15])
            strm.priority = int(_row[16])
            _stream_list.append(strm)

        for station in station_list:
            station.stream_list = [
                _stream for _stream in _stream_list
                if _stream.station.network == station.network
                and _stream.station.code == station.code
            ]

        return stream.removeEmptyStations(station_list)
Esempio n. 14
0
        #end='',                  # The end timestamp for the range of candles requested
        granularity=
        'D',  # S5 S10 S15 S30 M1 M2 M3 M4 M5 M10 M15 M30 H1 H2 H3 H4 H6 H8 H12 D W M
        candleFormat='midpoint',  # midpoint or bidask
        #includeFirst='false',    # If it is set to “true”, the candlestick covered by the start timestamp will be returned
        #dailyAlignment=17,       # The hour of day used to align candles with hourly, daily, weekly, or monthly granularity
        #weeklyAlignment='Friday',# The day of the week used to align candles with weekly granularity
        #alignmentTimezone='America/New_York' # The timezone to be used for the dailyAlignment parameter
    )
    if response:
        print(str(response))

    #

    # 启动实时汇率获取
#   rates.start(accountId=account_id, instruments='AUD_CAD')
#   time.sleep(5)

# 关闭api函数工作线程
    api.deinit()
    # 关闭实时汇率获取线程


#   rates.stop()

if __name__ == "__main__":
    api = rest.Api(environment, access_token)
    rates = stream.Stream(environment, access_token, True)
    events = stream.Stream(environment, access_token, False)
    main()
Esempio n. 15
0
        pass


if __name__ == '__main__':
    print("Start recording...")

    cli = deviceClient.Client(light_visualizer)

    cli_thread = threading.Thread(target=cli.run_client)
    cli_thread.daemon = True
    cli_thread.start()

    if len(sys.argv) >= 2:
        audio_stream = stream.FileStream(sys.argv[1])
    else:
        audio_stream = stream.Stream()
        lights_thread = threading.Thread(target=visualize,
                                         args=(light_visualizer, ))
        lights_thread.daemon = True
        lights_thread.start()

    light_visualizer.attach_stream(audio_stream)

    vis_thread = threading.Thread(target=light_visualizer.visualize)
    vis_thread.daemon = True
    vis_thread.start()

    if not is_pi:
        analyzer.update()

    vis_thread.join()