Пример #1
0
 def main(self):
     #twitter authorization
     auth = OAuthHandler(AuthDetails.consumer_key, AuthDetails.consumer_secret)
     auth.set_access_token(AuthDetails.access_token, AuthDetails.access_token_secret)
     language = 'en'
     pt = ProcessTweet()
     searchTerm = pt.unicodetostring(self.searchTerm)
     stopAt = pt.unicodetostring(self.stopAt)
     #calls method to train the classfier
     tr = Training()
     (priors, likelihood) = tr.starttraining()
     #stream tweets from twitter
     twitterStream = Stream(auth, Listener(searchTerm, stopAt))
     twitterStream.filter(track=[searchTerm], languages=[language])
     sen = Sentiment()
     sentiment_tally = Counter()
     (sentiment_tally, tweet_list) = sen.gettweetstoanalyse(priors, likelihood, searchTerm)
     tr = Training()
     sen = Sentiment()
     (neutral, positive, negative) = sen.analyse(sentiment_tally)
     tweet_list = self.edittweetlists(tweet_list)
     #truncate streamtweets table
     self.removetweetsfromdatabase()
     #save training data
     tr.savetrainingdatatodb(priors, likelihood)
     return (neutral, positive, negative, tweet_list)
Пример #2
0
    def add_link( self, url ):

        """
            Adds a link to the link widget.

            Only adds if its not already present.
        """

        if url not in self.links:

            self.links.add( url )


            rowCounts = self.links_ui.rowCount()
            nextRow = rowCounts + 1
            nextPosition = rowCounts    # row count is the length, but position is zero-based

            self.links_ui.setRowCount( nextRow )

            urlEntry = QTableWidgetItem( url )
            statusEntry = QTableWidgetItem( '' )

            statusEntry.setTextAlignment( Qt.AlignCenter )

            urlEntry.setFlags( urlEntry.flags() & ~Qt.ItemIsEditable ) # not editable
            statusEntry.setFlags( statusEntry.flags() & ~Qt.ItemIsEditable ) # not editable

            self.links_ui.setItem( nextPosition, 0, urlEntry )
            self.links_ui.setItem( nextPosition, 1, statusEntry )


                # check if online
            stream = Stream( url.split() )

            stream.is_online( statusEntry )
Пример #3
0
    def get(self):
        url = self.request.url
        stream_name = re.findall('searchStream=(\S+)',url)
        if len(stream_name) == 0:
            self.response.write(url)
        else:
            stream_name = re.findall('searchStream=(\S+)',url)[0]
            streams = Stream.query().fetch()
            nameList = list()
            for stream in streams:
                nameList.append(stream.name)

            index = list()
            for i in xrange(len(nameList)):
                index.append(LCS(nameList[i], stream_name))
            tmp = zip(index, nameList)
            tmp.sort(reverse = True)
            #we only show five most relation streams
            if len(tmp) < 5:
                showNum = len(tmp)
            else:
                showNum = 5
            self.response.write(SEARCH_PAGE_TEMPLATE)
            self.response.write('<p>%d results for <b>%s</b>,<br>  click on image to view stream</p>' % (showNum,stream_name))
            for i in xrange(showNum):
                stream = Stream.query(Stream.name==tmp[i][1]).fetch()[0]
                #self.response.write(stream.numberofpictures)
                if stream.numberofpictures > 0:
                    pictures=db.GqlQuery("SELECT * FROM Picture " +"WHERE ANCESTOR IS :1 "+"ORDER BY uploaddate DESC",db.Key.from_path('Stream',stream.name))
                    self.response.write('<table border="1" style="width:100%"><table style = "width:10%">')
                    self.response.out.write('<td><div style = "position:relative;"><a href = "%s"><img src="img?img_id=%s" ></img><div style = "position: absolute; left:150px; top:20px"></a>%s</div></div></td>' % (stream.url, pictures[0].key(),stream.name))
                    self.response.write('</table>')
                else:
                    self.response.out.write('<td><div style = "position:relative;"><a href = "%s"><img src="http://www.estatesale.com/img/no_image.gif" ></img><div style = "position: absolute; left:150px; top:20px"></a>%s</div></div></td>' % (stream.url, stream.name))
Пример #4
0
 def _on_accept(self):
     _logger.debug('_on_accept')
     while True:
         try:
             sock, addr = self._fd.accept()
             _logger.debug('fd: %d accept fd: %d',
                           self._fd.fileno(), sock.fileno())
         except socket.error as msg:
             if msg.errno == errno.ECONNABORTED:
                 continue
             if msg.errno != errno.EAGAIN and msg.errno != errno.EINPROGRESS:
                 _logger.error('fd: %d, accept: %s',
                               self._fd.fileno(), os.strerror(msg.errno))
                 self._fd.close()
                 if self._onClosed is not None:
                     try:
                         self._onClosed(self)
                     except Exception as ex:
                         _logger.error('_onClosed: %s', str(ex))
                         _logger.exception(traceback.format_exc())
             return
         else:
             new_stream = Stream(sock, prefix=self._prefix)
             new_stream._connected = True
             try:
                 self._onAccepted(new_stream, addr)
             except Exception as e:
                 _logger.error('_onAccepted: %s', e)
                 _logger.exception(traceback.format_exc())
                 new_stream.close()
Пример #5
0
    def get(self):
        search_item = self.request.get("search_item")
        print(search_item)
        streams = Stream.query().fetch()
        nameList = list()
        cover_list = []
        stream_list = []
        for stream in streams:
            nameList.append(stream.name)

        index = list()
        for i in xrange(len(nameList)):
            index.append(LCS(nameList[i], search_item))
        tmp = zip(index, nameList)
        tmp.sort(reverse=True)
        # we only show five most relation streams
        for i in xrange(len(tmp)):
            stream = Stream.query(Stream.name == tmp[i][1]).fetch()[0]
            stream_list.append(stream.name)
            # self.response.write(stream.numberofpictures)
            # if stream.numberofpictures > 0:
            #   pictures=db.GqlQuery("SELECT * FROM Picture " +"WHERE ANCESTOR IS :1 "+"ORDER BY uploaddate DESC",db.Key.from_path('Stream',stream.name))
            cover_list.append(stream.coverurl)

        dictPassed = {"streamList": stream_list, "displayCovers": cover_list}
        jsonObj = json.dumps(dictPassed, sort_keys=True, indent=4, separators=(",", ": "))
        print("write json")
        self.response.write(jsonObj)
Пример #6
0
    def __pipe__(self, inpipe):
        accept_branch, reject_branch = itertools.tee(iter(inpipe))
	accept = lambda x: abs(x) <  self.thresh
	reject = lambda x: abs(x) >= self.thresh
	self.iterator = itertools.ifilter(accept, accept_branch)
	reject_branch = itertools.ifilter(reject, reject_branch)
	Stream.pipe(reject_branch, self.named_stream)
	return self
Пример #7
0
 def handle(self):
     self.data = self.request.recv(1024).strip()
     if self.data == "getStreamUrl":
         stream_url = Stream.getStream()
         self.request.sendall(stream_url)
     elif self.data == "stopStream":
         Stream.stopStream()
         self.request.sendall("StreamClosed")
Пример #8
0
    def __init__(self, **kw):
        """ Default constructor
        Creates and opens a serial port

        **kw - keyword arguments to pass into a pySerial serial port
        """
        Stream.__init__(self)
        self.port = serial.Serial(**kw)
Пример #9
0
def main(argv):
    if len(argv) < 2:
        print("usage: python server.py [port]")
        return 1
    port = int(argv[1])
    stream = Stream(port)
    stream.accept_incoming_connections()
    stream.start()
Пример #10
0
    def test_produce(self):
        test = []

        def test_produce_cb(stream, data):
            test.append(data)

        stream = Stream()
        stream.produce_cb = test_produce_cb
        stream.produce("test")
        self.assertEqual(test, ["test"])
Пример #11
0
    def _on_decode_error(self, received):
        self._disable_heartbeat()
        self._stream._encoders = []
        backend = Stream(prefix="SIMPLE")

        def tunnel_ready_to_send(_):
            backend.start_receiving()

        def tunnel_send_buffer_full(_):
            backend.stop_receiving()

        def tunnel_received(_, data, _addr):
            backend.send(data)
            return backend.is_ready_to_send()

        def tunnel_closed(_):
            backend.close()

        def backend_received(_, data, _addr):
            self._stream.send(data)
            return self._stream.is_ready_to_send()

        def backend_closed(_self):
            self._stream.close()

        self._stream.set_on_ready_to_send(tunnel_ready_to_send)
        self._stream.set_on_send_buffer_full(tunnel_send_buffer_full)
        self._stream.set_on_received(tunnel_received)
        self._stream.set_on_closed(tunnel_closed)
        backend.set_on_received(backend_received)
        backend.set_on_closed(backend_closed)
        if received is not None and len(received) > 0:
            backend.send(received)
        backend.connect(UNKNOWN_CONN_ADDR, UNKNOWN_CONN_PORT)
Пример #12
0
    def select_stream_from_entry( self ):

        """
            Gets the values from the ui elements, and executes the program in json mode, to determine if the values are valid
        """
        url = self.get_complete_url()
        split_url = url.split()

        self.messages_ui.append( 'Trying to open stream: {}'.format( url ) )

        stream = Stream( split_url )

        stream.start( self.messages_ui )
Пример #13
0
class StreamTestCase(AsyncHTTPTestCase):
    def get_app(self):
        return Application([('/',HelloHandler)])

    def test_read(self):
        s = socket.socket(socket.AF_INET,socket.SOCK_STREAM,0)
        s.connect(("localhost",self.get_http_port()))
        self.stream = Stream(s,io_loop=self.io_loop)
        self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
        self.stream.on("data",self.on_data1)
        self.wait()

    def on_data1(self,data):
        self.assertEquals(data.split("\r\n\r\n")[-1],"Hello")
        self.stop()
Пример #14
0
    def test_consume(self):
        stream = Stream()

        # test consume without filter
        stream.queue.put(1)
        stream.queue.put(2)
        self.assertEqual(stream.consume(), 1)
        self.assertEqual(stream.consume(), 2)

        # test consume with filter
        stream.queue.put(1)
        stream.queue.put(2)
        stream.queue.put(3)

        self.assertEqual(stream.consume(lambda data: data == 2), 2)
        self.assertEqual(stream.consume(), 3)
Пример #15
0
    def initialize(self):
        if self._stream is None:
            self._stream = Stream(prefix='TUNNEL')

        # self._stream.set_buffer_size(BUFF_SIZE)
        self._stream.set_tcp_no_delay()

        self._stream.append_send_handler(obscure.pack_data)
        self._stream.append_send_handler(obscure.random_padding)
        # self._stream.append_send_handler(obscure.gen_aes_encrypt())
        self._stream.append_send_handler(obscure.gen_xor_encrypt())
        # self._stream.append_send_handler(obscure.base64_encode)
        self._stream.append_send_handler(obscure.gen_http_encode(self._connect_to is not None))
        self._stream.append_receive_handler(obscure.gen_http_decode(self._connect_to is not None))
        # self._stream.append_receive_handler(obscure.base64_decode)
        self._stream.append_receive_handler(obscure.gen_xor_decrypt())
        # self._stream.append_receive_handler(obscure.gen_aes_decrypt())
        self._stream.append_receive_handler(obscure.unpad_random)
        self._stream.append_receive_handler(obscure.unpack_data)

        self._stream.set_on_ready_to_send(lambda _: self._on_tunnel_ready_to_send())
        self._stream.set_on_send_buffer_full(lambda _: self._on_tunnel_send_buffer_full())
        self._stream.set_on_received(lambda _, data, addr: self._on_received(data, addr))
        self._stream.set_on_fin_received(lambda _: self._on_fin_received())
        self._stream.set_on_closed(lambda _: self._on_closed())

        if self._connect_to is not None:
            self._stream.connect(*self._connect_to)
        else:
            self._stream.set_on_decode_error(lambda _, received: self._on_decode_error(received))
            self._stream.start_receiving()
        self._enable_heartbeat()
Пример #16
0
    def post(self):
        original_url0 = self.request.headers["Referer"]
        original_url = original_url0
        if "%3D" not in original_url:
            original_url += "%3D%3D"
            original_url += users.get_current_user().nickname()

        stream_name = re.findall("=(.*)%3D%3D", original_url)
        if len(stream_name) < 1:
            stream_name = re.findall("%3D(.*)%3D%3D", original_url)[0]
        else:
            stream_name = stream_name[0]

        user_name = re.findall("%3D%3D(.*)\?", original_url)
        if len(user_name) < 1:
            user_name = re.findall("%3D%3D(.*)", original_url)[0]
        else:
            user_name = user_name[0]

        user_name = user_name.split("%40")
        if len(user_name) > 1:
            user_name = user_name[0] + "@" + user_name[1]
        else:
            user_name = user_name[0]

        # Change!# stream=Stream.query(Stream.name==stream_name, Stream.author_name==user_name).fetch()[0]
        stream = Stream.query(Stream.name == stream_name).fetch()[0]

        if users.get_current_user():
            # stream.subscribers.append(users.get_current_user().nickname())
            # users.get_current_user().email() = users.get_current_user().email().lower()
            stream.subscribers.append(users.get_current_user().email().lower())
        stream.put()

        self.redirect(original_url0)
Пример #17
0
    def get(self):
        status = (0, 0)

        stream_name = re.findall("%3D(.*)", self.request.url)[0]

        # Change!# stream=Stream.query(Stream.name==stream_name, Stream.author==users.get_current_user()).fetch()[0]
        stream = Stream.query(Stream.name == stream_name).fetch()[0]

        if stream.author == users.get_current_user():
            status = (1, 1)
        elif users.get_current_user():
            status = (1, 0)
        else:
            self.redirect(users.create_login_url(self.request.url))

        pictures = db.GqlQuery(
            "SELECT *FROM Picture " + "WHERE ANCESTOR IS :1 " + "ORDER BY uploaddate DESC LIMIT 3",
            db.Key.from_path("Stream", stream_name),
        )

        uploadurl = blobstore.create_upload_url("/upload")
        showmoreurl = urllib.urlencode({"showmore": stream.name + "==" + users.get_current_user().nickname()})
        geoviewurl = urllib.urlencode({"geoview": stream.name + "==" + users.get_current_user().nickname()})
        template_values = {
            "user_name": users.get_current_user().nickname(),
            "showmoreurl": showmoreurl,
            "stream_name": stream_name,
            "pictures": pictures,
            "status": status,
            "uploadurl": uploadurl,
            "geoviewurl": geoviewurl,
        }
        template = JINJA_ENVIRONMENT.get_template("viewsinglestream_index.html")
        self.response.write(template.render(template_values))
Пример #18
0
class Scanner(object):
    def initialize_scan(self, string):
        self.tokens = []
        self.stream = Stream(string)

    def produce(self, token):
        self.tokens.append(token)

    def tokenize(self):
        while self.tokenize_one():
            pass
        return self.tokens

    def tokenize_one(self):
        i = self.stream.index

        if self.stream.eof():
            self.eof()
            return False

        for tokenizer_pair in self.state:
            tokenizer = tokenizer_pair[0]
            action = tokenizer_pair[1]
            if tokenizer.applies(self.stream):
                j = self.stream.index
                action(self, tokenizer.token(self.stream, i, j))
                return True

        raise StandardError("No tokenizer applied.")
Пример #19
0
class Test:
  def __init__(self, stream):
    self.display = Display()
    self.stream = Stream(stream)

  def parse(self):
    if not self.stream.line:
      return False
    parse = self.stream.line.split('#')
    if len(parse) == 2:
      try:
        self.result = int(parse[0].rstrip(' '))
        self.message = parse[1].rstrip(' ')
      except:
        return False
      if self.result != 0 and self.result != 1:
        return False
      return True
    return False

  def launch(self):
    sucess = 0
    total = 0
    while self.stream.read():
      if self.parse():
        if self.result == 1:
          sucess += 1
        else:
          self.display.error("Error on" + self.message)
        total += 1
    self.display.summary(sucess, total)
Пример #20
0
    def get(self):
        status = (0,0)

        stream_name=re.findall('%3D(.*)',self.request.url)[0]

        #Change!# stream=Stream.query(Stream.name==stream_name, Stream.author==users.get_current_user()).fetch()[0]
        stream=Stream.query(Stream.name==stream_name).fetch()[0]


        if(stream.author==users.get_current_user()):
            status = (1,1)
        elif(users.get_current_user()):
            status = (1,0)
        else:
            self.redirect(users.create_login_url(self.request.url))

        pictures=db.GqlQuery("SELECT *FROM Picture " + "WHERE ANCESTOR IS :1 " +"ORDER BY uploaddate DESC LIMIT 3" , db.Key.from_path('Stream',stream_name))

        uploadurl = blobstore.create_upload_url('/upload')
        showmoreurl=urllib.urlencode({'showmore': stream.name+"=="+users.get_current_user().nickname()})
        geoviewurl=urllib.urlencode({'geoview': stream.name+"=="+users.get_current_user().nickname()})
        template_values = {
            'user_name':users.get_current_user().nickname(),
            'showmoreurl': showmoreurl,
            'stream_name': stream_name,
            'pictures':pictures,
            'status':status,
            'uploadurl':uploadurl,
            'geoviewurl': geoviewurl

        }
        template = JINJA_ENVIRONMENT.get_template('viewsinglestream_index.html')
        self.response.write(template.render(template_values))
    def setUp(self):
        print("Starting test...")
        self.freq = 10
        self._server = SocketWriter(freq = self.freq) # First tests: producer at 10 Hz

        self.com = ComChannel("localhost")
        self.stream = Stream(self.com, 61000, maxlength = 3)
Пример #22
0
    def post(self):
        original_url0 = self.request.headers['Referer']
        original_url = original_url0
        if "%3D" not in original_url:
            original_url += '%3D%3D'
            original_url += users.get_current_user().nickname()

        stream_name=re.findall('=(.*)%3D%3D',original_url)
        if(len(stream_name)<1):
            stream_name=re.findall('%3D(.*)%3D%3D',original_url)[0]
        else:
            stream_name=stream_name[0]

        user_name=re.findall('%3D%3D(.*)\?',original_url)
        if(len(user_name)<1):
            user_name=re.findall('%3D%3D(.*)',original_url)[0]
        else:
            user_name=user_name[0]

        user_name=user_name.split('%40')
        if(len(user_name)>1):
            user_name=user_name[0]+'@'+user_name[1]
        else:
            user_name=user_name[0]

        #Change!# stream=Stream.query(Stream.name==stream_name, Stream.author_name==user_name).fetch()[0]
        stream=Stream.query(Stream.name==stream_name).fetch()[0]

        if users.get_current_user():
            #stream.subscribers.append(users.get_current_user().nickname())
            stream.subscribers.append(users.get_current_user().email())
        stream.put()

        self.redirect(original_url0)
Пример #23
0
    def get(self, photoIndexes, currentLocation):
        passedInCoord = currentLocation.split('_')
        lat = float(passedInCoord[0])
        lon = float(passedInCoord[1])

        displayImageObjs = []
        # displayPhotoList = []

        stream_query = Stream.query()
        for stream in stream_query:
            photos = db.GqlQuery("SELECT * FROM Picture " + "WHERE ANCESTOR IS :1 " +"ORDER BY uploaddate DESC" , db.Key.from_path('Stream', stream.name))
            for photo in photos:
                photoUrl = images.get_serving_url(photo.imgkey)
                photoUrl = str(photoUrl) + "=s500"

                photoDict = {}
                photoDict["photoServingURL"] = photoUrl
                photoDict["date"] = str(photo.uploaddate)
                photoDict["loc"] = str(photo.loc)
                photoDict["streamName"] = str(stream.name)
                # photoDict["streamID"] = str(stream.key.id())
                photoCoord = str(photo.loc).split(',')
                plat = float(photoCoord[0])
                plon = float(photoCoord[1])
                R = 6373.0

                lat1 = radians(lat)
                lon1 = radians(lon)
                lat2 = radians(plat)
                lon2 = radians(plon)

                dlon = lon2 - lon1
                dlat = lat2 - lat1
                a = (sin(dlat / 2)) ** 2 + cos(lat1) * cos(lat2) * (sin(dlon / 2)) ** 2
                c = 2 * atan2(sqrt(a), sqrt(1-a))
                distance = R * c
                photoDict["actualDistance"] = distance
                if distance > 10:
                    photoDict["strDistance"] = str(distance).split('.', 1)[0] + 'km'
                else:
                    photoDict["strDistance"] = str(distance * 1000).split('.', 1)[0] + 'm'
                displayImageObjs.append(photoDict)

        displayImageObjs = sorted(displayImageObjs, key = lambda k: k['actualDistance'])
        passedPhotos = []
        morePhotos = "False"
        indexURL = photoIndexes
        indexList = str(photoIndexes).split('_')
        if len(displayImageObjs) - 1 > int(indexList[1]):
            for i in range(int(indexList[0]), int(indexList[1]) + 1):
                passedPhotos.append(displayImageObjs[i])
            indexURL = str(int(indexList[0]) + 16) + '_' + str(int(indexList[1]) + 16)
            morePhotos = "True"
        else:
            for i in range(int(indexList[0]), len(displayImageObjs)):
                passedPhotos.append(displayImageObjs[i])

        dictPassed = {'user': None, 'morePhotos': morePhotos, 'indexURL': indexURL,'displayImageObjs': passedPhotos}#'displayPhotoList' : displayStreamList
        jsonObj = json.dumps(dictPassed, sort_keys=True, indent=4, separators=(',', ': '))
        self.response.write(jsonObj)
Пример #24
0
    def __init__(self, first, second):
        MiniEngine.__init__(self)

        self._first = first
        self._second = second

        # Construct the schema of the output stream.
        self._schema = Schema()
        for a in self._first.schema() + self._second.schema():
            self._schema.append(a)

        self._queue = Queue(100)
        
        self._first_ep = self._first.connect()
        self._first_ep.notify(self._queue)

        self._second_ep = self._second.connect()
        self._second_ep.notify(self._queue)
        
        self._output = Stream(
            self._schema,
            SortOrder(),
            'Join'
        )

        self._m = {
            self._first_ep: self._first,
            self._second_ep: self._second,
        }

        self._empty = 0
Пример #25
0
    def __init__(self, query_stream, data_source, access_method):
        MiniEngine.__init__(self)
        self._query_stream = query_stream
        
        # Create an accessor for the combination of access method and data
        # source. This should fail if access method and data source are not
        # compatible.
        self._accessor = access_method(data_source)
        self._data_source = data_source
        self._access_method = access_method

        # make sure the accessor understands the query schema
        assert self._accessor.accepts(self._query_stream.schema())
        self._query_stream_ep = self._query_stream.connect()

        # Create an output stream for this data accessor. The schema of the
        # output stream is determined by the data source.
        output_schema = self._data_source.schema()
    
        # We can only reasonably infer the sort order if all of the query
        # attributes are included in the output schema.
        if query_stream.sort_order() in output_schema:
            # The new sort order is the same as that of the query stream.
            sort_order = query_stream.sort_order()
        else:
            # No sort order can be inferred; using empty.
            sort_order = SortOrder()

        self._output_stream = Stream(
            output_schema, 
            sort_order, 
            'DATA ACCESSOR'
        )
Пример #26
0
 def get(self):
     streams = Stream.query().fetch()
     ndb.delete_multi(stream_name_set.query().fetch(keys_only=True))
     for stream in streams:
         name_set = stream_name_set()
         name_set.name = stream.name
         name_set.put()
Пример #27
0
    def __init__(self, input_stream, sort_attributes, all = False):
        MiniEngine.__init__(self)
        self._input_stream = input_stream
        self._input_ep = input_stream.connect()
        self._schema = self._input_stream.schema()
        self._all = all
        self._indices = []
        # Passed as attribues = [('name', comparison_function), ...]
        for a in sort_attributes:
            # Check if the given attribute exists in the schema.
            i = self._schema.index(a[0])
            t = self._schema[i].type()

            if a[1]:
                # If a comparison function is specified, use it.
                self._indices.append((i, a[1]))
            elif hasattr(t, '__cmp__'):
                # Otherwise test if the given type has a comparator and use
                # it.
                self._indices.append((i, None))
            else:
                raise Exception('Type of attribute [%s] does not have ' + \
                                'a comparison operator.' % (a))

        self._output_stream = Stream(
            self._schema,
            SortOrder(), 
            'SORT'
        )
Пример #28
0
    def select_stream_from_link( self, tableWidgetItem ):

        row = tableWidgetItem.row()

        urlItem = self.links_ui.item( row, 0 )  # the url is in the first column

        url = urlItem.text()

        split_url = url.split()

        self.messages_ui.append( 'Trying to open stream: {}'.format( url ) )


        stream = Stream( split_url )

        stream.start( self.messages_ui )
Пример #29
0
    def get(self):
        stream_name = self.request.get("stream_name")
        email = self.request.get("email")
        caption = []
        displayImages = []
        #print stream_name
        pictures = db.GqlQuery("SELECT * FROM Picture " +"WHERE ANCESTOR IS :1 "+"ORDER BY uploaddate DESC",db.Key.from_path('Stream',stream_name))
        stream = Stream.query(Stream.name==stream_name).fetch()[0]

        if stream.author_name.lower()!= email.split("@",1)[0]:
            count=CountViews.query(CountViews.name==stream.name,ancestor=ndb.Key('User',stream.author_name)).fetch()[0]
            count.numbers=count.numbers+1
            count.totalviews=count.totalviews+1
            count.put()



        for pic in pictures:
            url = images.get_serving_url(pic.imgkey)
            url = url + "=s500"
            displayImages.append(url)
            caption.append(pic.caption)
            #print url

        dictPassed = {'displayImages':displayImages,'caption':caption,'author':str(stream.author)}
        #dictPassed = {'displayImages':displayImages,'caption':caption}
        jsonObj = json.dumps(dictPassed, sort_keys=True,indent=4, separators=(',', ': '))
        self.response.write(jsonObj)
Пример #30
0
 def test_read(self):
     s = socket.socket(socket.AF_INET,socket.SOCK_STREAM,0)
     s.connect(("localhost",self.get_http_port()))
     self.stream = Stream(s,io_loop=self.io_loop)
     self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
     self.stream.on("data",self.on_data1)
     self.wait()
Пример #31
0
def zip_stream_f(in_streams):
    out_stream = Stream('output of zip_stream')
    zip_stream(in_streams, out_stream)
    return out_stream
Пример #32
0
def test_split_agents():
    import numpy as np
    scheduler = Stream.scheduler

    s = Stream('s')

    u = Stream('u')
    v = Stream('v')
    w = Stream('w')

    y = Stream('y')
    z = Stream('z')

    # Test split
    # func operates on a single element of the single input stream and
    # return a list of elements, one for each output stream.
    def h(element):
        return [element + 1, element * 2]

    def h_args(element, addend, multiplier):
        return [element + addend, element * multiplier]

    in_stream_split = Stream('in_stream_split')
    r = Stream('r')
    t = Stream('t')
    e = split_element(func=h,
                      in_stream=in_stream_split,
                      out_streams=[r, t],
                      name='e')
    r_split, t_split = split_element_f(
        function=h,
        in_stream=in_stream_split,
        num_out_streams=2,
    )
    r_args, t_args = split_element_f(h_args,
                                     in_stream_split,
                                     2,
                                     addend=1,
                                     multiplier=2)

    scheduler.step()
    assert recent_values(r) == []
    assert recent_values(t) == []
    assert recent_values(r_split) == recent_values(r)
    assert recent_values(t_split) == recent_values(t)
    assert recent_values(r_args) == recent_values(r)
    assert recent_values(t_args) == recent_values(t)

    in_stream_split.extend(list(range(5)))
    scheduler.step()
    assert recent_values(r) == [1, 2, 3, 4, 5]
    assert recent_values(t) == [0, 2, 4, 6, 8]
    assert recent_values(r_split) == recent_values(r)
    assert recent_values(t_split) == recent_values(t)
    assert recent_values(r_args) == recent_values(r)
    assert recent_values(t_args) == recent_values(t)

    in_stream_split.append(10)
    scheduler.step()
    assert recent_values(r) == [1, 2, 3, 4, 5, 11]
    assert recent_values(t) == [0, 2, 4, 6, 8, 20]

    in_stream_split.extend([20, 100])
    scheduler.step()
    assert recent_values(r) == [1, 2, 3, 4, 5, 11, 21, 101]
    assert recent_values(t) == [0, 2, 4, 6, 8, 20, 40, 200]
    assert recent_values(r_split) == recent_values(r)
    assert recent_values(t_split) == recent_values(t)
    assert recent_values(r_args) == recent_values(r)
    assert recent_values(t_args) == recent_values(t)

    # Test split with kwargs
    def f_list(element, list_of_functions):
        return [f(element) for f in list_of_functions]

    def f_0(element):
        return element * 2

    def f_1(element):
        return element + 10

    x = Stream('x')
    rr = Stream('rr')
    tt = Stream('tt')
    ee = split_element(func=f_list,
                       in_stream=x,
                       out_streams=[rr, tt],
                       name='ee',
                       list_of_functions=[f_0, f_1])
    x.extend(list(range(5)))
    scheduler.step()
    assert recent_values(rr) == [0, 2, 4, 6, 8]
    assert recent_values(tt) == [10, 11, 12, 13, 14]

    # ------------------------------------
    # Test split with state
    # func operates on an element of the single input stream and state.
    # func returns a list with one element for each output stream.
    def h_state(element, state):
        return ([element + state, element * state], state + 1)

    r_state = Stream(name='r_state')
    t_state = Stream(name='t_state')
    in_stream_split_state = Stream('in_stream_split_state')

    e_state = split_element(func=h_state,
                            in_stream=in_stream_split_state,
                            out_streams=[r_state, t_state],
                            name='e',
                            state=0)

    scheduler.step()
    assert recent_values(r_state) == []
    assert recent_values(t_state) == []

    in_stream_split_state.extend(list(range(5)))
    scheduler.step()
    assert recent_values(r_state) == [0, 2, 4, 6, 8]
    assert recent_values(t_state) == [0, 1, 4, 9, 16]

    in_stream_split_state.append(20)
    scheduler.step()
    assert recent_values(r_state) == [0, 2, 4, 6, 8, 25]
    assert recent_values(t_state) == [0, 1, 4, 9, 16, 100]

    in_stream_split_state.extend([44, 93])
    scheduler.step()
    assert recent_values(r_state) == [0, 2, 4, 6, 8, 25, 50, 100]
    assert recent_values(t_state) == [0, 1, 4, 9, 16, 100, 264, 651]

    # ------------------------------------
    # Test split with state and args

    def hh_state(element, state, increment):
        return ([element + state, element * state], state + increment)

    rr_state = Stream(name='rr_state')
    tt_state = Stream(name='tt_state')
    in_stream_split_state_funcargs = Stream('in_stream_split_state_funcargs')

    ee_state_agent = split_element(func=hh_state,
                                   in_stream=in_stream_split_state_funcargs,
                                   out_streams=[rr_state, tt_state],
                                   name='ee_state_agent',
                                   state=0,
                                   increment=10)

    scheduler.step()
    assert recent_values(rr_state) == []
    assert recent_values(tt_state) == []

    in_stream_split_state_funcargs.extend(list(range(5)))
    scheduler.step()
    assert recent_values(rr_state) == [0, 11, 22, 33, 44]
    assert recent_values(tt_state) == [0, 10, 40, 90, 160]

    #------------------------------------------------------------------------------------------------
    #                                     UNZIP AGENT TESTS
    #------------------------------------------------------------------------------------------------

    s_unzip = Stream('s_unzip')
    u_unzip = Stream('u_unzip')
    x_unzip = Stream('x_unzip')

    # ------------------------------------
    # Test unzip
    unzip(in_stream=s_unzip, out_streams=[x_unzip, u_unzip])
    d_unzip_fn = unzip_f(s_unzip, 2)

    s_unzip.extend([(1, 10), (2, 15), (3, 18)])
    scheduler.step()
    assert recent_values(x_unzip) == [1, 2, 3]
    assert recent_values(u_unzip) == [10, 15, 18]
    assert recent_values(d_unzip_fn[0]) == x_unzip.recent[:3]
    assert recent_values(d_unzip_fn[1]) == u_unzip.recent[:3]

    s_unzip.extend([(37, 96)])
    scheduler.step()
    assert recent_values(x_unzip) == [1, 2, 3, 37]
    assert recent_values(u_unzip) == [10, 15, 18, 96]
    assert recent_values(d_unzip_fn[0]) == x_unzip.recent[:4]
    assert recent_values(d_unzip_fn[1]) == u_unzip.recent[:4]

    #------------------------------------------------------------------------------------------------
    #                                     SEPARATE AGENT TESTS
    #------------------------------------------------------------------------------------------------
    s_separate = Stream('s separate')
    u_separate = Stream('u separate')
    x_separate = Stream('x separate')

    d_separate = separate(in_stream=s_separate,
                          out_streams=[x_separate, u_separate],
                          name='d separate')
    x_sep_func, u_sep_func = separate_f(s_separate, 2)

    s_separate.extend([(0, 10), (1, 15), (0, 20)])
    scheduler.step()
    assert recent_values(x_separate) == [10, 20]
    assert recent_values(u_separate) == [15]
    assert x_sep_func.recent == x_separate.recent
    assert u_sep_func.recent == u_separate.recent

    s_separate.extend([(1, 96)])
    scheduler.step()
    assert recent_values(x_separate) == [10, 20]
    assert recent_values(u_separate) == [15, 96]
    assert recent_values(x_sep_func) == recent_values(x_separate)
    assert recent_values(u_sep_func) == recent_values(u_separate)

    #------------------------------------------------------------------------------------------------
    #                                     TIMED_UNZIP TESTS
    #------------------------------------------------------------------------------------------------
    # timed_unzip tests
    t_unzip = Stream()
    a_unzip = Stream('a_unzip')
    b_unzip = Stream('b_unzip')

    timed_unzip(t_unzip, [a_unzip, b_unzip])
    t_unzip_0, t_unzip_1 = timed_unzip_f(in_stream=t_unzip, num_out_streams=2)

    t_unzip.extend([(1, ["A", None]), (5, ["B", "a"]), (7, [None, "b"]),
                    (9, ["C", "c"]), (10, [None, "d"])])

    scheduler.step()
    assert recent_values(t_unzip_0) == [(1, 'A'), (5, 'B'), (9, 'C')]
    assert recent_values(t_unzip_1) == [(5, 'a'), (7, 'b'), (9, 'c'),
                                        (10, 'd')]
    assert recent_values(a_unzip) == recent_values(t_unzip_0)
    assert recent_values(b_unzip) == recent_values(t_unzip_1)

    #------------------------------------------------------------------------------------------------
    #                               TEST SPLIT WITH STREAM_ARRAY
    #------------------------------------------------------------------------------------------------
    # Test split_element with StreamArray
    x = StreamArray('x')
    y = StreamArray('y')
    z = StreamArray('z')

    def h_args(element, addend, multiplier):
        return [element + addend, element * multiplier]

    this_agent = split_element(func=h_args,
                               in_stream=x,
                               out_streams=[y, z],
                               addend=1.0,
                               multiplier=2.0,
                               name='this_agent')

    add_to_x = np.linspace(0.0, 4.0, 5)
    x.extend(add_to_x)
    scheduler.step()
    assert np.array_equal(recent_values(y), add_to_x + 1.0)
    assert np.array_equal(recent_values(z), add_to_x * 2.0)

    # Test separate with StreamArray
    x = StreamArray('x', dimension=2)
    y = StreamArray('y')
    z = StreamArray('z')

    separate(x, [y, z])
    x.append(np.array([1.0, 10.0]))
    scheduler.step()
    assert np.array_equal(recent_values(z), np.array([10.0]))
    assert np.array_equal(recent_values(y), np.array([]))

    x.extend(np.array([[0.0, 2.0], [1.0, 20.0], [0.0, 4.0]]))
    scheduler.step()
    assert np.array_equal(recent_values(z), np.array([10.0, 20.0]))
    assert np.array_equal(recent_values(y), np.array([2.0, 4.0]))

    # ------------------------------------------------------
    # TEST split_list
    # ------------------------------------------------------
    x = Stream('x')
    y = Stream('y')
    z = Stream('z')

    def f(lst):
        return [v * 2 for v in lst], [v * 10 for v in lst]

    split_list(f, x, [y, z])

    x.extend(list(range(3)))
    scheduler.step()
    assert recent_values(y) == [v * 2 for v in recent_values(x)]
    assert recent_values(z) == [v * 10 for v in recent_values(x)]

    x.append(100)
    scheduler.step()
    assert recent_values(y) == [v * 2 for v in recent_values(x)]
    assert recent_values(z) == [v * 10 for v in recent_values(x)]

    # ------------------------------------------------------
    # TEST split_window
    # ------------------------------------------------------
    def f(window):
        return max(window), min(window)

    x = Stream('x')
    y = Stream('y')
    z = Stream('z')

    split_window(func=f,
                 in_stream=x,
                 out_streams=[y, z],
                 window_size=2,
                 step_size=2)

    x.extend(list(range(7)))
    scheduler.step()
    assert recent_values(y) == [1, 3, 5]
    assert recent_values(z) == [0, 2, 4]

    def f(window):
        return max(window), min(window)

    x = Stream('x')
    y = Stream('y')
    z = Stream('z')

    split_window(func=f,
                 in_stream=x,
                 out_streams=[y, z],
                 window_size=3,
                 step_size=3)

    x.extend(list(range(12)))
    scheduler.step()
    assert recent_values(y) == [2, 5, 8, 11]
    assert recent_values(z) == [0, 3, 6, 9]

    # ------------------------------------------------------
    # TEST split_tuple
    # ------------------------------------------------------
    x = Stream('x')
    y = Stream('y')
    z = Stream('z')
    split_tuple(in_stream=x, out_streams=[y, z])
    x.append((0, 'A'))
    x.extend([(1, 'B'), (2, 'C')])
    scheduler.step()
    assert recent_values(y) == [0, 1, 2]
    assert recent_values(z) == ['A', 'B', 'C']

    def f(window):
        return max(window), min(window)

    x = Stream('x')
    y = Stream('y')
    z = Stream('z')

    split_window(func=f,
                 in_stream=x,
                 out_streams=[y, z],
                 window_size=3,
                 step_size=3)

    x.extend(list(range(12)))
    scheduler.step()
    assert recent_values(y) == [2, 5, 8, 11]
    assert recent_values(z) == [0, 3, 6, 9]

    print('TEST OF SPLIT IS SUCCESSFUL')
Пример #33
0
 def create_stream(self, stream_id):
     s = Stream(self, stream_id)
     self.streams[stream_id] = s
     return s
Пример #34
0
 def g(in_streams, out_streams):
     s = Stream(name='s')
     increment(in_stream=in_streams[0], out_stream=s)
     print_stream(s, name=s.name)
Пример #35
0
                f.write(bytearray(data['m_Script']['Array']))


SOURCE = r'[]'

for d, _, f in os.walk(SOURCE):
    base_dir = d[len(SOURCE):]
    for file_name in f:
        print(base_dir, file_name)

        with open(os.path.join(d, file_name), 'rb') as f:
            head = f.read(7)
            if head != b'UnityFS':
                continue
            try:
                arch = Archive(Stream(b'UnityFS' + f.read()))
            except:
                continue

        resources = {}

        cab = None
        for j in arch.files:
            if j.startswith('CAB-'):
                if j.endswith(('.resS', '.resource')):
                    resources[f'/{cab}/{j}'] = arch.files[j]
                else:
                    cab = j

        cab_path = os.path.join(base_dir, file_name.split('.')[0])
        for j in arch.files:
Пример #36
0
def repeated_min_distance(string):
    """minimal distance between repeated letters or -1. functional."""
    return Stream(
        set(string)).map(lambda char: Stream(get_indices(
            char, string)).pair_consecutive().map(lambda pair: operator.sub(
                pair[0], pair[1])).min(-1)).filter(lambda x: x > -1).min(-1)
Пример #37
0
def make_echo(spoken, delay, attenuation):
    echo = Stream(name='echo', initial_value=[0] * delay)
    heard = spoken + echo
    r_mul(in_stream=heard, out_stream=echo, arg=attenuation)
    return heard
Пример #38
0
 def __init__(self, stream):
     self.display = Display()
     self.stream = Stream(stream)
Пример #39
0
 def __init__(self):
     super(MEflows, self).__init__()
     self.add('pri', Stream())
     self.add('sec', Stream())
Пример #40
0
    def get(self):
        print("test!!")
        dellsts = self.request.get_all("status")
        if (len(dellsts) > 0):
            streams = Stream.query(
                Stream.name.IN(dellsts),
                Stream.author == users.get_current_user()).fetch()
            counts = CountViews.query(
                CountViews.name.IN(dellsts),
                ancestor=ndb.Key('User',
                                 users.get_current_user().nickname())).fetch()
            for stream in streams:
                pictures = db.GqlQuery(
                    "SELECT * FROM Picture " + "WHERE ANCESTOR IS :1",
                    db.Key.from_path('Stream', stream.name))
                for pic in pictures:
                    images.delete_serving_url(pic.imgkey)
                    blobstore.delete(pic.imgkey)
                db.delete(pictures)
                pic_count = Count_pic.query(
                    ancestor=ndb.Key('Stream', stream.name))
                ndb.delete_multi(ndb.put_multi(pic_count))
                #print pic_count
            ndb.delete_multi(ndb.put_multi(streams))
            ndb.delete_multi(ndb.put_multi(counts))
        dellsts = self.request.get_all("status1")
        #self.response.write(len(dellsts))
        if (len(dellsts) > 0):
            streams = Stream.query(Stream.name.IN(dellsts)).fetch()
            for stream in streams:
                if (users.get_current_user()
                        and users.get_current_user().nickname()
                        in stream.subscribers):
                    stream.subscribers.remove(
                        users.get_current_user().nickname())
                    stream.put()

        picNum_list = []
        streams_1 = Stream.query(Stream.author == users.get_current_user()
                                 ).order(-Stream.creattime).fetch()
        for stream in streams_1:
            pic_count = Count_pic.query(
                ancestor=ndb.Key('Stream', stream.name)).fetch()[0]
            pictures = db.GqlQuery(
                "SELECT * FROM Picture " + "WHERE ANCESTOR IS :1 " +
                "ORDER BY uploaddate DESC",
                db.Key.from_path('Stream', stream.name))
            # print (stream.name, pic_count.numbers)
            # picNum_list.append(pic_count.numbers)
            picNum_list.append(pictures.count())
        streams = Stream.query().fetch()
        streams_2 = []
        count_list = []
        user_name = users.get_current_user().nickname()
        # url =users.create_login_url('/')
        #  if(users.get_current_user()):
        #user_name = users.get_current_user().nickname()
        url = users.create_logout_url('/')
        for stream in streams:
            if (users.get_current_user().email() in stream.subscribers):
                count = CountViews.query(CountViews.name == stream.name,
                                         ancestor=ndb.Key(
                                             'User',
                                             stream.author_name)).fetch()[0]
                pictures = db.GqlQuery(
                    "SELECT * FROM Picture " + "WHERE ANCESTOR IS :1 " +
                    "ORDER BY uploaddate DESC",
                    db.Key.from_path('Stream', stream.name))

                stream.numberofpictures = pictures.count()
                streams_2.append(stream)
                count_list.append(count.numbers)

        #else:
        #   self.redirect(url,permanent=False)

        template_values = {
            'user_name': user_name,
            'streams_1': streams_1,
            'streams_2': streams_2,
            'count_list': count_list,
            'url': url,
            "picNum_list": picNum_list
        }

        template = JINJA_ENVIRONMENT.get_template('management_index.html')
        self.response.write(template.render(template_values))
Пример #41
0
class ModelCreator:

    def __init__(self, own_recv_socket_dict,
                 own_send_socket_dict=None,
                 remote_socket_dict=None,
                 config=RttNordicConfig,
                 event_filename=None,
                 event_types_filename=None,
                 log_lvl=logging.INFO):
        self.config = config
        self.event_filename = event_filename
        self.event_types_filename = event_types_filename
        self.csvfile = None

        timeouts = {
            'descriptions': None,
            'events': None
        }
        self.in_stream = Stream(own_recv_socket_dict, timeouts)

        if own_send_socket_dict is not None and remote_socket_dict is not None:
            self.sending = True
            self.out_stream = Stream(own_send_socket_dict, timeouts, remote_socket_dict=remote_socket_dict)
        else:
            self.sending = False

        self.timestamp_overflows = 0
        self.after_half = False

        self.processed_events = ProcessedEvents()
        self.temp_events = []
        self.submitted_event_type = None
        self.raw_data = EventsData([], {})
        self.event_processing_start_id = None
        self.event_processing_end_id = None
        self.submit_event = None
        self.start_event = None

        self.bufs = list()
        self.bcnt = 0

        self.logger = logging.getLogger('Profiler model creator')
        self.logger_console = logging.StreamHandler()
        self.logger.setLevel(log_lvl)
        self.log_format = logging.Formatter('[%(levelname)s] %(name)s: %(message)s')
        self.logger_console.setFormatter(self.log_format)
        self.logger.addHandler(self.logger_console)

    def shutdown(self):
        if self.csvfile is not None:
            self.processed_events.finish_writing_data_to_files(self.csvfile,
                                                               self.event_filename,
                                                               self.event_types_filename)

    def _get_buffered_data(self, num_bytes):
        buf = bytearray()
        while len(buf) < num_bytes:
            tbuf = self.bufs[0]
            size = num_bytes - len(buf)
            if len(tbuf) <= size:
                buf.extend(tbuf)
                del self.bufs[0]
            else:
                buf.extend(tbuf[0:size])
                self.bufs[0] = tbuf[size:]
        self.bcnt -= num_bytes
        return buf

    def _read_bytes(self, num_bytes):
        while True:
            if self.bcnt >= num_bytes:
                break
            try:
                buf = self.in_stream.recv_ev()
            except StreamError as err:
                self.logger.error("Receiving error: {}".format(err))
                self.close()
            if len(buf) > 0:
                self.bufs.append(buf)
                self.bcnt += len(buf)

        return self._get_buffered_data(num_bytes)

    def _timestamp_from_ticks(self, clock_ticks):
        ts_ticks_aggregated = self.timestamp_overflows * self.config['timestamp_raw_max']
        ts_ticks_aggregated += clock_ticks
        ts_s = ts_ticks_aggregated * self.config['ms_per_timestamp_tick'] / 1000
        return ts_s

    def transmit_all_events_descriptions(self):
        try:
            bytes = self.in_stream.recv_desc()
        except StreamError as err:
            self.logger.error("Receiving error: {}. Exiting".format(err))
            sys.exit()
        desc_buf = bytes.decode()
        f = StringIO(desc_buf)
        reader = csv.reader(f, delimiter=',')
        for row in reader:
            # Empty field is send after last event description
            if len(row) == 0:
                break
            name = row[0]
            id = int(row[1])
            data_type = row[2:len(row) // 2 + 1]
            data = row[len(row) // 2 + 1:]
            self.raw_data.registered_events_types[id] = EventType(name, data_type, data)
            if name not in ('event_processing_start', 'event_processing_end'):
                self.processed_events.registered_events_types[id] = EventType(name, data_type, data)

        self.event_processing_start_id = \
            self.raw_data.get_event_type_id('event_processing_start')
        self.event_processing_end_id = \
            self.raw_data.get_event_type_id('event_processing_end')

        if self.sending:
            event_types_dict = dict((k, v.serialize())
                    for k, v in self.processed_events.registered_events_types.items())
            json_et_string = json.dumps(event_types_dict)
            try:
                self.out_stream.send_desc(json_et_string.encode())
            except StreamError as err:
                self.logger.error("Error: {}. Unable to send data".format(err))
                sys.exit()

    def _read_single_event(self):
        id = int.from_bytes(
            self._read_bytes(1),
            byteorder=self.config['byteorder'],
            signed=False)
        et = self.raw_data.registered_events_types[id]

        buf = self._read_bytes(4)
        timestamp_raw = (
            int.from_bytes(
                buf,
                byteorder=self.config['byteorder'],
                signed=False))

        if self.after_half \
        and timestamp_raw < 0.4 * self.config['timestamp_raw_max']:
            self.timestamp_overflows += 1
            self.after_half = False

        if timestamp_raw > 0.6 * self.config['timestamp_raw_max']:
            self.after_half = True

        timestamp = self._timestamp_from_ticks(timestamp_raw)

        def process_int32(self, data):
            buf = self._read_bytes(4)
            data.append(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                       signed=True))

        def process_uint32(self, data):
            buf = self._read_bytes(4)
            data.append(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                       signed=False))

        def process_int16(self, data):
            buf = self._read_bytes(2)
            data.append(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                       signed=True))

        def process_uint16(self, data):
            buf = self._read_bytes(2)
            data.append(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                       signed=False))

        def process_int8(self, data):
            buf = self._read_bytes(1)
            data.append(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                       signed=True))

        def process_uint8(self, data):
            buf = self._read_bytes(1)
            data.append(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                       signed=False))

        def process_string(self, data):
            buf = self._read_bytes(1)
            buf = self._read_bytes(int.from_bytes(buf, byteorder=self.config['byteorder'],
                                                  signed=False))
            data.append(buf.decode())

        READ_BYTES = {
            "u8": process_uint8,
            "s8": process_int8,
            "u16": process_uint16,
            "s16": process_int16,
            "u32": process_uint32,
            "s32": process_int32,
            "s": process_string,
            "t": process_uint32
        }
        data=[]
        for event_data_type in et.data_types:
            READ_BYTES[event_data_type](self, data)
        return Event(id, timestamp, data)

    def _send_event(self, tracked_event):
        event_string = tracked_event.serialize()
        try:
            self.out_stream.send_ev(event_string.encode())
        except StreamError as err:
            if err.args[1] != 'closed':
                self.logger.error("Error. Unable to send data: {}".format(err))
            # Receiver has been closed
            self.close()

    def _write_event_to_file(self, csvfile, tracked_event):
        try:
            csvfile.write(tracked_event.serialize() + '\r\n')
        except IOError:
            self.logger.error("Problem with accessing csv file")
            self.close()

    def transmit_events(self):
        if self.event_filename and self.event_types_filename:
            self.csvfile = self.processed_events.init_writing_data_to_files(
                self.event_filename,
                self.event_types_filename)
        while True:
            event = self._read_single_event()
            if self.raw_data.registered_events_types[event.type_id].name == PROFILER_FATAL_ERROR_EVENT_NAME:
                self.logger.error("Fatal error of Profiler on device! Event has been dropped. "
                                  "Data buffer has overflown. No more events will be received.")

            if event.type_id == self.event_processing_start_id:
                self.start_event = event
                for i in range(len(self.temp_events) - 1, -1, -1):
                    # comparing memory addresses of event processing start
                    # and event submit to identify matching events
                    if self.temp_events[i].data[0] == self.start_event.data[0]:
                        self.submit_event = self.temp_events[i]
                        self.submitted_event_type = self.submit_event.type_id
                        del self.temp_events[i]
                        break

            elif event.type_id == self.event_processing_end_id:
                # comparing memory addresses of event processing start and
                # end to identify matching events
                if self.submitted_event_type is not None and event.data[0] \
                            == self.start_event.data[0]:
                    tracked_event = TrackedEvent(
                            self.submit_event,
                            self.start_event.timestamp,
                            event.timestamp)
                    if self.csvfile is not None:
                        self._write_event_to_file(self.csvfile, tracked_event)
                    if self.sending:
                        self._send_event(tracked_event)
                    self.submitted_event_type = None

            elif not self.processed_events.is_event_tracked(event.type_id):
                tracked_event = TrackedEvent(event, None, None)
                if self.csvfile is not None:
                    self._write_event_to_file(self.csvfile, tracked_event)
                if self.sending:
                    self._send_event(tracked_event)

            else:
                self.temp_events.append(event)

    def start(self):
        self.transmit_all_events_descriptions()
        self.transmit_events()

    def close(self):
        self.logger.info("Real time transmission closed")
        self.shutdown()
        self.logger.info("Events data saved to files")
        sys.exit()
Пример #42
0
def timed_unzip_f(in_stream, num_out_streams):
    out_streams = [Stream() for _ in range(num_out_streams)]
    timed_unzip(in_stream, out_streams)
    return out_streams
Пример #43
0
        sage: from sage.combinat.species.generating_series import factorial_gen
        sage: g = factorial_gen()
        sage: [g.next() for i in range(5)]
        [1, 1, 2, 6, 24]
    """
    z = Integer(1)
    yield z
    yield z
    n = Integer(2)
    while True:
        z *= n
        yield z
        n += 1


factorial_stream = Stream(factorial_gen())


@cached_function
def CycleIndexSeriesRing(R):
    """
    Returns the ring of cycle index series. Note that is is just a
    LazyPowerSeriesRing whose elements have some extra methods.

    EXAMPLES::

        sage: from sage.combinat.species.generating_series import CycleIndexSeriesRing
        sage: R = CycleIndexSeriesRing(QQ); R
        Cycle Index Series Ring over Symmetric Functions over Rational Field in the powersum basis
        sage: R([1]).coefficients(4)
        [1, 1, 1, 1]
Пример #44
0
class CheatGame:
    def __init__(self):
        self.engine = None
        self.roomKey = None
        self.localPlayer = None

        self.callStream = None
        self.turnStream = None

        self.takeTurnAfterCalls = False

    def printHeader(self):
        clearScreen()
        print("______________________________")
        print("            CHEAT             ")
        print("       by Anmol Parande       ")
        print("______________________________")

    def printWelcome(self, name):
        clearScreen()
        print(f"Welcome {name}. What would you like to do?")
        print("1. Join Room")
        print("2. Create Room")
        
    def start(self):
        self.printHeader()

        name = self.enterName()
        self.localPlayer = Player(name)
        
        self.printWelcome(name)
        choice = self.handleEntrance()
        clearScreen()
        if choice == 1:
            success = False
            while not success:
                self.roomKey = input("Please enter your room key: ")
                success = self.joinRoom()
        else:
            self.localPlayer.isHost = True
            self.roomKey = self.createRoom()

        self.engine = Engine(self.localPlayer)
        self.waitForOthers()

    def enterName(self):
        name = input("Please enter your name: ")
        return name

    def joinRoom(self):
        success, message = firebaseutils.joinRoom(self.roomKey, self.localPlayer.name)
        if not success:
            print(f"Error: Could not join room because {message}")
        return success

    def createRoom(self):
        success, message, data = firebaseutils.createRoom(self.localPlayer.name)
        if not success:
            print("Could not create room key. Please try again")
            os._exit(1)
        clearScreen()
        print(message)
        return data['roomKey']

    def waitForOthers(self):
        recentlyJoined = True
        def putFunc(data):
            nonlocal recentlyJoined
            for p in data:
                success = self.engine.addPlayer(p)
                if recentlyJoined and success:
                    print(f"{p} is already in the room")
                elif success:
                    print(f"{p} joined the room")

            recentlyJoined = False
        
        playerStream = Stream(putFunc)
        firebaseutils.listenToPlayers(playerStream, self.roomKey)

        if self.localPlayer.isHost:
            self.waitForStart()
            playerStream.close()

            self.engine.startGame()
            self.startGame()
        else:
            def shouldStartGame(stillWaiting):
                if not stillWaiting:
                    clearScreen()
                    print("The host has started the game.")
                    startStream.close()
                    playerStream.close()
                    try:
                        self.loadGame()
                    except firebaseutils.FirebaseError:
                        self.exitWithError()
                    
            startStream = Stream(shouldStartGame)
            firebaseutils.listenForStart(startStream, self.roomKey)

    def handleEntrance(self):
        choice = input()
        while choice != "1" and choice != "2":
            choice = input("Please enter either 1 or 2: ")
        
        return int(choice)

    def startGame(self):
        hands = self.engine.listHands()
        self.engine.orderPlayers()

        firebaseutils.startGame(self.roomKey, hands, self.engine.playerList)

        clearScreen()
        self.printTurns()

        self.turnStream = Stream(self.turnListener)
        firebaseutils.listenForTurn(self.turnStream, self.roomKey)

    def turnListener(self, data):
        if data != None:
            actual = data.get("lastPlayedCard", None)
            calls = data.get('calls', [])
        else:
            actual = None
            calls = []

        self.engine.lastPlayedCard = actual

        if actual is not None and self.engine.currentPlayer().name != self.localPlayer.name:
            print(f"{self.engine.currentPlayer().name} played a {strFromValue(self.engine.currentRank + 2)}, but they might be lying")
            self.engine.registerTurn()
            self.takeTurnAfterCalls = True
            didCall = self.makeDecision() == 'c'
            firebaseutils.logCall(self.roomKey, self.localPlayer.name, didCall)
            
            self.callStream = Stream(self.callListener)
            firebaseutils.listenForCall(self.callStream, self.roomKey)
        else:
            self.engine.registerTurn()
            self.takeTurn()

    def callListener(self, data):
        if data is None:
            return

        result = self.engine.logCalls(data)
        if self.engine.isReadyForNextPlayer:
            clearScreen()
            if result == 0:
                if self.engine.previousPlayer().name == self.localPlayer.name:
                    print("Nobody thought you were bluffing :)")
                else:
                    print(f"Nobody thought {self.engine.previousPlayer().name} was bluffing")
            elif result == -1:
                if self.engine.previousPlayer().name == self.localPlayer.name:
                    print("You were called on your bluff! You just picked up the pile :(")
                else:
                    print(f"{self.engine.previousPlayer().name} was bluffing!")
            elif result == 1:
                if self.engine.previousPlayer().name == self.localPlayer.name:
                    print("People thought you bluffed, but they were wrong :)")
                else:
                    print(f"{self.engine.previousPlayer().name} was not bluffing! All players who thought they were have divided the pile amongst themselves")
            print()
            self.callStream.close()

            if self.engine.isGameOver():
                self.endGame()
            
            self.printTurns()
            if self.takeTurnAfterCalls:
                self.takeTurn()

    def takeTurn(self):
        if self.engine.currentPlayer().name != self.localPlayer.name:
            return

        cardHash = self.engine.takeTurn()
        clearScreen()

        firebaseutils.clearCalls(self.roomKey)
        firebaseutils.logTurn(self.roomKey, cardHash)

        print("Waiting for other players to call your bluff or let you pass")

        self.takeTurnAfterCalls = False
        self.callStream = Stream(self.callListener)
        firebaseutils.listenForCall(self.callStream, self.roomKey)

    def waitForStart(self):
        shouldExit = input("Press a key when you are ready to start the game: \n")
        while len(self.engine.players) == 1:
            shouldExit = input("At least two people need to be in the game to start: \n")

    def makeDecision(self):
        didCall = input("Type 'c' to call their bluff and 'p' to let them pass\n")
        if didCall != 'c' and didCall != 'p':
            didCall = input("Please type 'c' or 'p'")

        return didCall

    def loadGame(self):
        time.sleep(0.1)
        hands, turnList = firebaseutils.loadGameData(self.roomKey)
        self.engine.setGameState(hands, turnList)
        self.printTurns()
        turnStream = Stream(self.turnListener)
        firebaseutils.listenForTurn(turnStream, self.roomKey)

    def endGame(self):
        print(f"Game Over: {self.engine.previousPlayer().name} won")
        _thread.interrupt_main()
        os._exit(0)

    def exitWithError(self):
        print("Oops. Something went wrong. Gameplay was ended")
        _thread.interrupt_main()
        os.exit(1)

    def printTurns(self):
        if self.engine.currentPlayer().name == self.localPlayer.name:
            print("It is your turn")
        else:
            print(f"It is {self.engine.currentPlayer().name}'s turn")
        self.localPlayer.printHand()
        print()
Пример #45
0
def examples():
    #-----------------------------------------------
    # Example with @fmap_w and @map_w
    @fmap_w
    def sum_window(window):
        return sum(window)

    @map_w
    def total_window(window):
        return sum(window)

    r = Stream()
    s = Stream()
    t = sum_window(r, window_size=2, step_size=2)
    total_window(in_stream=r, out_stream=s, window_size=2, step_size=2)

    r.extend(range(10))
    run()
    assert recent_values(t) == [1, 5, 9, 13, 17]
    assert recent_values(t) == recent_values(s)

    #-----------------------------------------------
    # Example with keyword argument
    @fmap_w
    def sum_add(v, addend):
        return sum(v) + addend

    s = Stream()
    t = sum_add(s, window_size=2, step_size=2, addend=10)
    s.extend(range(10))
    Stream.scheduler.step()
    assert recent_values(t) == [11, 15, 19, 23, 27]

    # Example with keyword argument using map_w
    @map_w
    def sum_add_relation(v, addend):
        return sum(v) + addend

    s = Stream()
    t = Stream()
    sum_add_relation(s, t, window_size=2, step_size=2, addend=10)
    s.extend(range(10))
    Stream.scheduler.step()
    assert recent_values(t) == [11, 15, 19, 23, 27]

    #-----------------------------------------------
    # Example with state and keyword argument
    @fmap_w
    def h(v, state, addend):
        next_state = state + 1
        return sum(v) + state + addend, next_state

    s = Stream()
    t = h(s, window_size=2, step_size=2, state=0, addend=10)
    s.extend(range(10))
    run()
    assert recent_values(t) == [11, 16, 21, 26, 31]

    #-----------------------------------------------
    # Output stream is the average of the max of
    # successive windows
    @fmap_w
    def h(window, state):
        count, total = state
        next_total = total + max(window)
        next_count = count + 1
        next_output = next_total / next_count
        next_state = next_count, next_total
        return next_output, next_state

    s = Stream()
    t = h(s, window_size=4, step_size=4, state=(0, 0.0))
    s.extend(range(20))
    run()
    assert recent_values(t) == [3.0, 5.0, 7.0, 9.0, 11.0]
    return
Пример #46
0
def test_sink():
    import numpy as np
    scheduler = Stream.scheduler

    ## ----------------------------------------------
    ## # Examples from AssembleSoftware website: KEEP!!
    ## ----------------------------------------------
    ## def print_index(v, state, delimiter):
    ##     print str(state) + delimiter + str(v)
    ##     return state+1 # next state
    ## s = Stream()
    ## sink(print_index, s, 0, delimiter=':')
    ## s.extend(list(range(100,105)))

    ## s = Stream()
    ## def print_index(v, state, delimiter):
    ##     print str(state) + delimiter + str(v)
    ##     return state+1 # next state
    ## sink(print_index, s, 0, delimiter=':')
    ## s.extend(list(range(100,105)))
    # Set up parameters for call to stream_to_list
    ## ----------------------------------------------
    ## # Finished examples from AssembleSoftware website
    ## ----------------------------------------------

    #-----------------------------------------------
    # Set up parameters for call to sink
    print_list = []
    print_list_for_array = []

    def print_index(v, state, print_list):
        print_list.append(str(state) + ':' + str(v))
        return state + 1  # next state

    s = Stream('s')
    s_array = StreamArray('s_array', dtype=int)

    #-----------------------------------------------
    # Call sink with initial state of 0
    sink(func=print_index, in_stream=s, state=0, print_list=print_list)
    sink(func=print_index,
         in_stream=s_array,
         state=0,
         print_list=print_list_for_array)

    s.extend(list(range(100, 103)))
    s_array.extend(np.arange(100, 103))
    scheduler.step()
    assert print_list == ['0:100', '1:101', '2:102']
    assert print_list_for_array == print_list
    s.extend(list(range(200, 203)))
    scheduler.step()
    assert print_list == ['0:100', '1:101', '2:102', '3:200', '4:201', '5:202']

    #-----------------------------------------------
    input_stream = Stream('input stream')
    input_stream_array = StreamArray('input stream array', dtype=int)
    output_list = []
    output_list_array = []

    # Call stream_to_list with no function
    stream_to_list(input_stream, output_list)
    stream_to_list(input_stream_array, output_list_array)
    # A test
    a_test_list = list(range(100, 105))
    a_test_array = np.arange(100, 105)
    input_stream.extend(a_test_list)
    input_stream_array.extend(a_test_array)
    scheduler.step()
    assert output_list == a_test_list
    assert output_list_array == a_test_list

    #-----------------------------------------------
    # test stream to list with a function
    def h(v, multiplier, addend):
        return v * multiplier + addend

    ss = Stream('ss')
    ss_array = StreamArray('ss_array', dtype=int)
    l = []
    l_array = []
    stream_to_list(ss, l, h, multiplier=2, addend=100)
    stream_to_list(in_stream=ss_array,
                   target_list=l_array,
                   element_function=h,
                   multiplier=2,
                   addend=100)
    test_list = [3, 23, 14]
    ss.extend(test_list)
    ss_array.extend(np.array(test_list))
    scheduler.step()
    assert l == [v * 2 + 100 for v in test_list]
    assert l_array == l

    #-----------------------------------------------
    # test stream to list with a function and state
    def h(v, state, multiplier, addend):
        return v * multiplier + addend + state, v + state

    ss = Stream('ss')
    ss_array = StreamArray('ss_array', dtype=int)
    l = []
    l_array = []
    stream_to_list(ss, l, h, 0, multiplier=2, addend=100)
    stream_to_list(in_stream=ss_array,
                   target_list=l_array,
                   element_function=h,
                   state=0,
                   multiplier=2,
                   addend=100)
    test_list = [3, 23, 14]
    ss.extend(test_list)
    ss_array.extend(np.array(test_list))
    scheduler.step()
    assert l == [106, 149, 154]
    assert l_array == l

    ss = Stream('ss')
    ss_array = StreamArray('ss_array', dtype=int)
    l = []
    l_array = []
    stream_to_list(ss, l, h, 0, multiplier=2, addend=100)
    stream_to_list(in_stream=ss_array,
                   target_list=l_array,
                   element_function=h,
                   state=0,
                   multiplier=2,
                   addend=100)
    test_list = list(range(5))
    ss.extend(test_list)
    ss_array.extend(np.array(test_list))
    scheduler.step()
    assert l == [100, 102, 105, 109, 114]
    assert l_array == l

    # Test sink
    # func operates on a single element of the single input stream and does
    # not return any value.
    def p(v, lst):
        lst.append(v)

    in_stream_sink = Stream('in_stream_sink')
    a_list = []
    b_list = []
    sink_agent = sink_element(func=p,
                              in_stream=in_stream_sink,
                              name='sink_agent',
                              lst=a_list)
    sink(func=p, in_stream=in_stream_sink, lst=b_list)
    test_list = [1, 13, 29]
    in_stream_sink.extend(test_list)
    scheduler.step()
    assert a_list == test_list
    assert b_list == test_list

    # ------------------------------------
    # Test sink with state
    # func operates on a single element of the single input stream and state.
    # func does not return any value.

    def p_s(element, state, lst, stream_name):
        lst.append([stream_name, element])
        return state + 1

    in_stream_sink_with_state = Stream('s')
    c_list = []
    sink_with_state_agent = sink_element(func=p_s,
                                         in_stream=in_stream_sink_with_state,
                                         state=0,
                                         name='sink_with_state_agent',
                                         lst=c_list,
                                         stream_name='s')

    #------------------------------------------------------------------------------
    # Test sink as a function with state
    d_list = []
    sink(p_s, in_stream_sink_with_state, state=0, lst=d_list, stream_name='s')
    in_stream_sink_with_state.extend(list(range(2)))
    scheduler.step()
    assert c_list == [['s', 0], ['s', 1]]
    assert d_list == c_list

    # ------------------------------------
    # Test sink with side effect
    # func operates on a single element of the single input stream and state.
    # func does not return any value.

    def sink_with_side_effect_func(element, side_effect_list, f):
        side_effect_list.append(f(element))
        return None

    side_effect_list_0 = []
    side_effect_list_1 = []
    side_effect_list_2 = []

    def ff(element):
        return element * 2

    def fff(element):
        return element + 10

    stm = Stream('stm')

    sink_with_side_effect_agent_0 = sink_element(
        func=sink_with_side_effect_func,
        in_stream=stm,
        name='sink_with_side_effect_agent_0',
        side_effect_list=side_effect_list_0,
        f=ff)

    sink_with_side_effect_agent_1 = sink_element(
        func=sink_with_side_effect_func,
        in_stream=stm,
        name='sink_with_side_effect_agent_1',
        side_effect_list=side_effect_list_1,
        f=fff)

    def f_stateful(element, state):
        return element + state, element + state

    def f_stateful_2(element, state):
        return element * state, element + state

    target_stream_to_list_simple = []
    stream_to_list(stm, target_stream_to_list_simple)
    stream_to_list(in_stream=stm,
                   target_list=side_effect_list_2,
                   element_function=lambda v: 2 * v)
    target_stream_to_list_stateful = []
    stream_to_list(in_stream=stm,
                   target_list=target_stream_to_list_stateful,
                   element_function=f_stateful,
                   state=0)
    target_stream_to_list_stateful_2 = []
    stream_to_list(in_stream=stm,
                   target_list=target_stream_to_list_stateful_2,
                   element_function=f_stateful_2,
                   state=0)

    stream_to_file(stm, 'test1.txt')
    stream_to_file(stm, 'test2.txt', lambda v: 2 * v)
    stream_to_file(stm, 'test3.txt', f_stateful, state=0)

    is_py2 = sys.version[0] == '2'
    if is_py2:
        import Queue as queue
    else:
        import queue as queue
    queue_1 = queue.Queue()
    queue_2 = queue.Queue()
    queue_3 = queue.Queue()
    stream_to_queue(stm, queue_1)
    stream_to_queue(stm, queue_2, lambda v: 2 * v)
    stream_to_queue(stm, queue_3, f_stateful, 0)

    stm.extend(list(range(5)))
    scheduler.step()
    assert target_stream_to_list_stateful == [0, 1, 3, 6, 10]
    assert target_stream_to_list_stateful_2 == [0, 0, 2, 9, 24]
    assert side_effect_list_0 == [0, 2, 4, 6, 8]
    assert side_effect_list_1 == [10, 11, 12, 13, 14]
    assert side_effect_list_0 == side_effect_list_2
    assert target_stream_to_list_simple == list(range(5))

    with open('test1.txt') as the_file:
        file_contents_integers = [int(v) for v in (the_file.readlines())]
    assert file_contents_integers == recent_values(stm)

    with open('test2.txt') as the_file:
        file_contents_integers = [int(v) for v in (the_file.readlines())]
    assert file_contents_integers == [2 * v for v in recent_values(stm)]

    with open('test3.txt') as the_file:
        file_contents_integers = [int(v) for v in (the_file.readlines())]
    assert file_contents_integers == [0, 1, 3, 6, 10]
    os.remove('test1.txt')
    os.remove('test2.txt')
    os.remove('test3.txt')

    def h(v, multiplier, addend):
        return v * multiplier + addend

    ss = Stream()
    stream_to_file(ss, 'test4.txt', h, multiplier=2, addend=100)
    test_list = [3, 23, 14]
    ss.extend(test_list)
    scheduler.step()
    with open('test4.txt') as the_file:
        file_contents_integers = [int(v) for v in (the_file.readlines())]
    assert file_contents_integers == [v * 2 + 100 for v in test_list]
    os.remove('test4.txt')

    def h(v, state, multiplier, addend):
        return v * multiplier + addend + state, v + state

    ss = Stream()
    stream_to_file(ss, 'test5.txt', h, 0, multiplier=2, addend=100)
    test_list = [3, 23, 14]
    ss.extend(test_list)
    scheduler.step()
    with open('test5.txt') as the_file:
        file_contents_integers = [int(v) for v in (the_file.readlines())]
    scheduler.step()
    assert file_contents_integers == [106, 149, 154]
    os.remove('test5.txt')

    # ------------------------------------
    # Testing stream_to_queue
    def h(v, state, multiplier, addend):
        return v * multiplier + addend + state, v + state

    ss = Stream()
    queue_4 = queue.Queue()
    stream_to_queue(ss, queue_4, h, 0, multiplier=2, addend=100)
    test_list = [3, 23, 14]
    ss.extend(test_list)
    scheduler.step()
    queue_contents = []
    while not queue_4.empty():
        queue_contents.append(queue_4.get())
    assert queue_contents == [106, 149, 154]

    # Test with state and keyword arguments
    def h(v, state, multiplier, addend):
        return v * multiplier + addend + state, v + state

    ss = Stream()
    stream_to_queue(ss, queue_4, h, 0, multiplier=2, addend=100)
    test_list = [3, 23, 14]
    ss.extend(test_list)
    queue_contents = []
    scheduler.step()
    while not queue_4.empty():
        queue_contents.append(queue_4.get())
    assert queue_contents == [106, 149, 154]

    # Another test with state and keyword arguments
    ss = Stream()
    queue_5 = queue.Queue()
    stream_to_queue(ss, queue_5, h, 0, multiplier=2, addend=100)
    test_list = list(range(5))
    ss.extend(test_list)
    scheduler.step()
    queue_contents = []
    while not queue_5.empty():
        queue_contents.append(queue_5.get())
    assert queue_contents == [100, 102, 105, 109, 114]

    # Test stream_to_buffer
    s = Stream()
    buf = Buffer(max_size=10)
    stream_to_buffer(s, buf)
    test_list = list(range(5))
    s.extend(test_list)
    scheduler.step()
    assert buf.get_all() == test_list
    next_test = list(range(5, 10, 1))
    s.extend(next_test)
    scheduler.step()
    assert buf.read_all() == next_test
    assert buf.get_all() == next_test

    s = Stream('s')
    print_list = []

    def f(lst):
        print_list.extend(lst)

    sink_window(func=f, in_stream=s, window_size=4, step_size=2)
    s.extend(list(range(10)))
    scheduler.step()
    assert print_list == [0, 1, 2, 3, 2, 3, 4, 5, 4, 5, 6, 7, 6, 7, 8, 9]

    s = Stream('s')
    print_list = []

    def f(lst):
        print_list.extend(lst)

    sink_list(func=f, in_stream=s)
    s.extend(list(range(10)))
    Stream.scheduler.step()
    assert print_list == list(range(10))

    import numpy as np
    t = StreamArray('t', dtype='int')
    print_list = []

    def f(lst):
        print_list.extend(lst)

    sink_list(func=f, in_stream=t)
    t.extend(np.arange(10))
    Stream.scheduler.step()
    assert print_list == list(range(10))
    print('TEST OF SINK IS SUCCESSFUL')
Пример #47
0
 def __init__(self,n_bits):#, arg):
     self.a = Stream(n_bits, name='a')
     self.b = Stream(n_bits, name='b')
     #opto por definir que la salida tiene el mismo numero de bits que las entradas
     self.r = Stream(n_bits, name='r')
Пример #48
0
                    help="video path")
parser.add_argument("-fps", required=False, dest="fps", default=-1, help="fps")
parser.add_argument("-fs",
                    required=False,
                    dest="fs",
                    default=1,
                    help="start frame")
parser.add_argument("-fe",
                    required=False,
                    dest="fe",
                    default=-1,
                    help="end frame, -1 for the last")
parser.add_argument("-t",
                    required=False,
                    dest="tmp",
                    default=None,
                    help="temp path")
args = parser.parse_args()

model_id = int(args.model_id)
thing = Thing(model_id)
vdata = dict(path=args.path if args.path else thing.cfg_thing.video["path"],
             fps=int(args.fps) if args.fps else thing.cfg_thing.video["fps"],
             fs=int(args.fs) if args.fs else thing.cfg_thing.video["fs"],
             fe=int(args.fe) if args.fe else thing.cfg_thing.video["fe"],
             tmp=args.tmp if args.tmp else thing.cfg_thing.video["tmp"])
stream = Stream(thing, vdata)
#stream.countVideo()
#stream.extractVideo()
stream.lastVideo()
Пример #49
0
def map_list_f(func, in_stream, state=None, *args, **kwargs):
    out_stream = Stream(func.__name__ + in_stream.name)
    map_list(func, in_stream, out_stream, state, None, None, *args, **kwargs)
    return out_stream
"""
This is a example script to use the MP model on recorded Mode-S data
"""

import pandas as pd
import matplotlib.pyplot as plt
import mp_vis
from stream import Stream
import warnings

warnings.filterwarnings("ignore")

# specify the receiver location, the example receiver is located at TU Delft
stream = Stream(lat0=51.99, lon0=4.37, correction=True)

# you can set up the MP model parameters
stream.mp.N_AC_PTCS = 250
stream.mp.AGING_SIGMA = 180

# read the message dumps
adsb0 = pd.read_csv("data/adsb_raw_20180101_0900_utc.csv.gz")
ehs0 = pd.read_csv("data/ehs_raw_20180101_0900_utc.csv.gz")

# rounding up the timestamp to 1 second for batch process
adsb0.loc[:, "tsr"] = adsb0.ts.round().astype(int)
ehs0.loc[:, "tsr"] = ehs0.ts.round().astype(int)

ts0 = int(adsb0.tsr.min())
ts1 = int(adsb0.tsr.max())

for t in range(ts0, ts0 + 100):
Пример #51
0
 def compute_func(in_streams, out_streams):
     y = Stream('Bloom output stream')
     bloom_filter_on_stream(in_stream=in_streams[0], out_stream=y, blm=blm)
     stream_to_file(in_stream=y, filename=out_filename)
Пример #52
0
def make_out_stream(in_stream):
    if isinstance(in_stream, Stream):
        return Stream()
    if isinstance(in_stream, StreamArray):
        return StreamArray(dimension=in_stream.dimension,
                           dtype=in_stream.dtype)
Пример #53
0
def extract_from_cab(data, cab_name, resources=None):
    cab = CABFile(Stream(data))

    for i in cab.object_data:
        if i[2][0] == TEXTURE_2D:
            data = cab.decode(i)
            name = bytearray(data['m_Name']['Array'])

            try:
                fmt = TextureFormat(data['m_TextureFormat'])
            except ValueError:
                print('Unable to decode:', data['m_TextureFormat'], name)
                continue
            size = (data['m_Width'], data['m_Height'])

            idat = data['image data']
            if not idat and resources:
                if 'm_StreamData' in data:
                    path = bytes(
                        data['m_StreamData']['path']['Array']).decode()
                    if not path.startswith('archive:'):
                        print('Unknown path:', path)
                        return
                    path = path[8:]
                    if path not in resources:
                        print('Unresolved resources:', path)
                        return
                    idat = resources[path]
                resources

            if fmt == TextureFormat.RGBA32:
                im = Image.frombytes('RGBA', size, bytes(idat))
            elif fmt == TextureFormat.RGB24:
                im = Image.frombytes('RGB', size, bytes(idat))
            elif fmt == TextureFormat.RGBA4444:
                im = Image.frombytes('RGBA', size, bytes(idat), 'raw',
                                     'RGBA;4B')

                a, b, g, r = im.split()
                im = Image.merge('RGBA', (r, g, b, a))
            elif fmt == TextureFormat.ETC2_RGBA8:
                ktx = bytes([
                    0xAB, 0x4B, 0x54, 0x58, 0x20, 0x31, 0x31, 0xBB, 0x0D, 0x0A,
                    0x1A, 0x0A
                ])
                ktx += struct.pack(
                    '<14I',
                    *(
                        0x04030201,
                        0,
                        0,
                        0,
                        0x9278,  # GL_COMPRESSED_RGBA8_ETC2_EAC
                        0x1908,  # RGBA
                        data['m_Width'],
                        data['m_Height'],
                        0,
                        0,
                        1,
                        1,
                        0,
                        len(idat)))
                with open('~tmp.ktx', 'wb') as f:
                    f.write(ktx)
                    f.write(idat)
                os.system('etcpack ~tmp.ktx ~tmp.tga')
                os.remove('~tmp.ktx')
                im = Image.open('~tmp.tga').copy()
                os.remove('~tmp.tga')
                os.remove('tmp.ppm')
                os.remove('alphaout.pgm')
            elif fmt == TextureFormat.ETC_RGB4:
                ktx = bytes([
                    0xAB, 0x4B, 0x54, 0x58, 0x20, 0x31, 0x31, 0xBB, 0x0D, 0x0A,
                    0x1A, 0x0A
                ])
                ktx += struct.pack(
                    '<14I',
                    *(
                        0x04030201,
                        0,
                        0,
                        0,
                        0x9274,  # GL_COMPRESSED_RGB8_ETC2
                        0x1907,  # RGB
                        data['m_Width'],
                        data['m_Height'],
                        0,
                        0,
                        1,
                        1,
                        0,
                        len(idat)))
                with open('~tmp.ktx', 'wb') as f:
                    f.write(ktx)
                    f.write(idat)
                os.system('etcpack ~tmp.ktx ~tmp.ppm')
                os.remove('~tmp.ktx')
                im = Image.open('~tmp.ppm').copy()
                os.remove('~tmp.ppm')
            else:
                print('Well crap!')
                quit()

            im = im.transpose(Image.FLIP_TOP_BOTTOM)

            num = 0
            os.makedirs(f'extracted/{cab_name}/', exist_ok=True)
            while True:
                pth = f'extracted/{cab_name}/tex2d_' + name.decode() + (
                    f'_{num}' if num else '') + '.png'
                if not os.path.exists(pth):
                    break
                num += 1
            im.save(pth)
        elif i[2][0] == TEXT_ASSET:
            data = cab.decode(i)

            name = bytearray(data['m_Name']['Array'])
            if b'skel' not in name and b'atlas' not in name:
                continue
            num = 0
            os.makedirs(f'extracted/{cab_name}/', exist_ok=True)
            while True:
                pth = f'extracted/{cab_name}/text_' + name.decode() + (
                    f'_{num}' if num else '')
                if not os.path.exists(pth):
                    break
                num += 1

            with open(pth, 'wb') as f:
                f.write(bytearray(data['m_Script']['Array']))
Пример #54
0
import time
from pathlib import Path

from reader import RTMPStream, SaveFile
from stream import Stream

saveFile = SaveFile(Path("/data/zhangkai/VedioStream"), 2500)
rtmp = RTMPStream("rtmp://0.0.0.0:1935/live")
stream = Stream(input="rtsp://192.168.3.58", outputs=[rtmp, saveFile])
saveFile.start()
rtmp.start()

time.sleep(2)
stream.start()

rtmp.join()
Пример #55
0
    def decode(self, obj, debug=False):
        object_id, data, type_tree = obj
        queue = list(type_tree[3])

        if debug:
            print('-' * 50)

        stream = Stream(data)
        stream.enidan = self.stream.enidan

        structure = {}
        current, align = [structure], [False]
        prev = None
        while queue:
            i = queue.pop(0)
            while i[0] != 0 and i[0] < len(current):
                current.pop(-1)
                if align.pop(-1):
                    t = (4 - (stream.tell() % 4)) % 4
                    assert stream.read_bytes(t) == b'\x00' * t

            multi = False
            if i[2] == b'int':
                d = stream.read_sint()
            elif i[2] == b'unsigned int':
                d = stream.read_int()
            elif i[2] == b'float':
                d = stream.read_float()
            elif i[2] == b'bool':
                d = stream.read_bool()
            elif i[2] == b'char':
                if isinstance(current[-1], list) and len(current[-1]) == 1:
                    multi = True
                    d = stream.read_bytes(current[-1][0])
                else:
                    d = stream.read_bytes(1)
            elif i[2] == b'UInt8':
                if isinstance(current[-1], list) and len(current[-1]) == 1:
                    multi = True
                    d = stream.read_bytes(current[-1][0])
                else:
                    d = stream.read_byte()
            elif i[1]:
                d = []
            else:
                d = None

            if debug:
                print(len(current),
                      str(stream.tell()).rjust(3),
                      ('  ' * i[0] + i[2].decode() + ': ').ljust(23) +
                      i[3].decode().ljust(25), hex(i[-1]),
                      str(d)[:30] + ('' if len(str(d)) <= 30 else '...'))

            if i[0] > len(current) and isinstance(current[-1], dict):
                current.append({})
                current[-2][prev[3].decode()] = current[-1]
                align.append(i[-1] & 0x4000)
            elif i[-1] & 0x4000 and not i[1]:
                t = (4 - (stream.tell() % 4)) % 4
                assert stream.read_bytes(t) == b'\x00' * t

            if isinstance(current[-1], list):
                if multi:
                    current[-1].extend(d)
                else:
                    current[-1].append(d)

                if current[-1] and len(current[-1]) == current[-1][0] + 1:
                    if not current[-1].pop(0):
                        queue.pop(0)
                    current.pop(-1)
                    if align.pop(-1):
                        t = (4 - (stream.tell() % 4)) % 4
                        assert stream.read_bytes(t) == b'\x00' * t
                elif len(current[-1]) > 1:
                    queue.insert(0, i)

            elif d is not None:
                current[-1][i[3].decode()] = d

            if isinstance(d, list):
                current.append(d)
                align.append(i[-1] & 0x4000)

            prev = i
        return structure
Пример #56
0
from attribute import Code
from classfile import ClassFile
from const import Utf8Info
from stream import Stream

if __name__ == "__main__":
    with open('/Users/fxxing/Desktop/rt/java/lang/Byte.class', 'rb') as f:
        stream = Stream(f)
        cf = ClassFile(stream)
        # print(cf)
        for method in cf.methods:
            name: Utf8Info = cf.constants[method.name_index]
            print(name.bytes)
            code = method.get_attribute(Code)
            for ins in code.instructions:
                print('\t', ins)
Пример #57
0
from olympe.messages.ardrone3.PilotingState import PositionChanged
from stream import Stream

if __name__ == '__main__':

    try:
        print('Creating drone')
        drone = get_drone('192.168.42.1')
        time.sleep(2)

        drone.connection()
        print('Established connection')
        time.sleep(2)

        print('Creating stream')
        stream = Stream(drone)
        time.sleep(10)

        print('Starting stream')
        stream.start()
        time.sleep(5)

        print('Stopping stream')
        stream.stop()

    finally:

        time.sleep(10)

        drone.disconnection()
Пример #58
0
from stream import Stream
from tank import Tank
from system import System

for steps in [10, 50, 100, 200, 500]:
    tank_system = System()

    tank1 = Tank('Tank A')
    tank2 = Tank('Tank B', start_salt=20, line_color="-b")

    streams1_in = [Stream(6, 0.2), Stream(1, tank2)]
    streams1_out = [Stream(3, tank1), Stream(4, tank1)]

    for stream in streams1_in:
        tank1.add_stream_in(stream)

    for stream in streams1_out:
        tank1.add_stream_out(stream)

    streams2_in = [Stream(3, tank1)]
    streams2_out = [Stream(1, tank2), Stream(2, tank2)]

    for stream in streams2_in:
        tank2.add_stream_in(stream)

    for stream in streams2_out:
        tank2.add_stream_out(stream)

    tank_system.add_tank(tank1)
    tank_system.add_tank(tank2)
Пример #59
0
import sys
import os
sys.path.append(os.path.abspath("../../IoTPy/"))
sys.path.append(os.path.abspath("../../IoTPy/helper_functions"))
sys.path.append(os.path.abspath("../../IoTPy/core"))
sys.path.append(os.path.abspath("../../IoTPy/agent_types"))

from stream import Stream, StreamArray
from stream import _no_value, _multivalue
from check_agent_parameter_types import *
from recent_values import recent_values
from op import map_window

# In the following examples, x is a stream that is an input stream of
# the agents. 
x = Stream('x')

#----------------------------------------------------------------    
# Example with no state and no additional parameters
#----------------------------------------------------------------
s = Stream()
map_window(func=sum, in_stream=x, out_stream=s, 
 window_size=2, step_size=10)
# out_stream[j] = \
# func(in_stream[j*step_size : j*step_size + window_size])
# for all j.
# So:
#       s[j] = x[10*j] + x[10*j+1], for all j

#----------------------------------------------------------------    
# Example with state and no additional parameters
Пример #60
0
        def target():
            """
            This is the target function of this process. This function has the
            following steps:
            1. Create in_streams of the this process, i.e., the in_streams of
               the compute_func of the process.
            2. Create in_stream_signals, with an in_stream_signal corresponding
               to each in_stream.
            3. Create out_streams of this process, i.e. out_streams of the
               compute_func of this process.
            4. Create the computational agent (compute_func) of this process.
            5. For each out_stream of compute_func, create an agent to copy the
               out_stream to its buffer, and then copy the buffer to each
               in_stream to which it is connected.
            6. For each in_stream of compute_func, create an agent to copy its
               input buffer into the in_stream.
            7. Create the scheduler for this process. Starting the scheduler
               starts the thread that executes compute_func for this agent.
            8. Create the source threads for each source in this process. The
               source_thread gets data from a source, puts the data into a
               buffer, and then copies the buffer to each in_queue to which the
               source is connected.
            9. Start the scheduler and source threads.
            10. Join the scheduler and source threads.
               
            """
            # STEP 1
            # CREATE THE IN_STREAMS OF COMPUTE_FUNC
            # and compute the dict, name_to_stream.
            # in_streams is the list of in_stream of this process.
            self.in_streams = []
            # name_to_stream is a dict where the key is the name of an
            # input or output stream and the value is the stream itself.
            self.name_to_stream = {}
            for in_stream_name, in_stream_type in self.in_stream_names_types:
                in_stream = Stream(name=in_stream_name)
                self.in_streams.append(in_stream)
                self.name_to_stream[in_stream_name] = in_stream

            # STEP 2
            # CREATE IN_STREAM_SIGNALS which is a list of input streams, with
            # one in_stream_signal for each in_stream.
            # in_stream_signal[j] is the stream that tells
            # this process that it has data to be read into
            # in_stream[j]. The name of an in_stream_signal associated with an
            # in_stream called 's' is 's_signal_'.
            self.in_stream_signals = []
            for in_stream in self.in_streams:
                in_stream_signal_name = in_stream.name + '_signal_'
                in_stream_signal = Stream(name=in_stream_signal_name)
                self.in_stream_signals.append(in_stream_signal)
                self.name_to_stream[in_stream_signal_name] = in_stream_signal

            # STEP 3
            # CREATE THE OUT_STREAMS FOR COMPUTE_FUNC.
            # out_streams is a list of the output streams of this process.
            self.out_streams = []
            for out_stream_name, out_stream_type in self.out_stream_names_types:
                out_stream = Stream(out_stream_name)
                self.out_streams.append(out_stream)
                self.name_to_stream[out_stream_name] = out_stream

            # STEP 4
            # CREATE THE COMPUTE AGENT FOR THIS PROCESS.
            self.compute_func(self.in_streams, self.out_streams,
                              **self.keyword_args)

            # STEP 5
            # CREATE AGENTS TO COPY EACH OUT_STREAM OF COMPUTE_FUNC TO IN_STREAMS.
            # Note: Create an agent for each out_stream of compute_func and
            # create an agent for each source. This agent copies the elements
            # in each out_stream into the in_streams to which it is connected.
            # See copy_stream().
            #
            # self.out_stream_names_types is a list of pairs:
            #             (out_stream_name, out_stream_type)
            for out_stream_name, out_stream_type in self.out_stream_names_types:
                # STEP 5.1: Get parameters of each agent.
                # Step 5.1.1 Get the out_stream with the specified name.
                out_stream = self.name_to_stream[out_stream_name]
                # Step 5.1.2 Get the buffer and buffer_ptr into which this out_stream
                # is copied.
                buffer, buffer_ptr = self.out_to_buffer[out_stream_name]
                # Step 5.1.3 Get the list of pairs (q, in_stream_signal_name) connected
                # to this out_stream
                q_and_in_stream_signal_names = \
                    self.out_to_q_and_in_stream_signal_names[out_stream_name]
                # STEP 5.2: Make agent that copies out_stream to the in_streams to
                # which it is connected. The input stream to this agent is out_stream.
                # stream_name is a keyword argument of copy_stream().
                sink_list(func=self.copy_stream,
                          in_stream=out_stream,
                          stream_name=out_stream_name)

            # STEP 6
            # CREATE AGENTS TO COPY BUFFERS TO IN_STREAMS.
            # For each in_stream of this process, create an agent that
            # copies data from the input buffer of this in_stream into
            # the in_stream.
            # This agent subscribes to the in_stream_signal associated
            # with this in_stream. When in_stream_signal gets a message
            # (start, end) this agent copies the buffer segment between
            # start and end into the in_stream.
            # copy_buffer_segment() is the function executed by the agent
            # when a new message arrives. This function extends out_stream
            # with the segment of the buffer specified by the message.
            for in_stream_name, in_stream_type in self.in_stream_names_types:
                in_stream_signal_name = in_stream_name + '_signal_'
                # Get the in_stream_signal stream from its name.
                in_stream_signal = self.name_to_stream[in_stream_signal_name]
                # Get the in_stream from its name
                in_stream = self.name_to_stream[in_stream_name]
                # Get the buffer that feeds this in_stream.
                buffer, buffer_ptr = self.in_to_buffer[in_stream_name]
                # Create agents
                sink_element(func=copy_buffer_segment,
                             in_stream=in_stream_signal,
                             out_stream=in_stream,
                             buffer=buffer,
                             in_stream_type=in_stream_type)

            # STEP 7
            # CREATE A NEW STREAM.SCHEDULER FOR THIS PROCESS
            # Specify the scheduler, input_queue and name_to_stream for
            # this processes.
            Stream.scheduler = ComputeEngine(self)
            # input_queue is the queue into which all streams for this
            # process are routed.
            Stream.scheduler.input_queue = self.in_queue
            # The scheduler for a process uses a dict, name_to_stream.
            # name_to_stream[stream_name] is the stream with the name stream_name.
            Stream.scheduler.name_to_stream = self.name_to_stream

            # STEP 8
            # CREATE SOURCE_THREADS
            source_threads = []
            for source_name, description in self.sources.items():
                # thread_creation_func returns a thread which
                # gets data from a source with name source_name and then
                # uses self.copy_stream to copy the data into a
                # buffer associated with this source, and
                # informs all in_streams connected to this source that
                # new data has arrived.
                thread_target = description['func']
                if 'keyword_args' in description.keys():
                    self.source_keyword_args = description['keyword_args']
                else:
                    self.source_keyword_args = {}
                # Get the source_thread for the source with this name.
                #source_thread = thread_creation_func(self.copy_stream, source_name)
                source_thread = self.create_source_thread(
                    thread_target, source_name, **self.source_keyword_args)
                source_threads.append(source_thread)

            # STEP 9
            # START SOURCE THREADS AND START SCHEDULER.
            # Starting the scheduler starts a thread --- the main thread --- of this
            # process. The scheduler thread gets a ready agent from the in_queue of
            # this process and then executes the next step of the agent.
            Stream.scheduler.start()
            for source_thread in source_threads:
                source_thread.start()

            # STEP 10
            # JOIN SOURCE THREADS AND JOIN SCHEDULER.
            for source_thread in source_threads:
                source_thread.join()

            Stream.scheduler.join()
            return