Пример #1
0
    def get_movies(self, page=0, limit=5, filter=None, order='m.title ASC'):
        sql = "select m.title, m.description, m.airdate, m.full_path, m.parent_dir, m.added, m.id, datetime(m.watched, 'localtime'), m.runtime, m.poster from movie m"

        if filter is not None:
            _f = Filter("movie", {"movie": "m"})
            sql += _f.genFilter(filter)

        sql += " ORDER BY %s LIMIT ?, ?" % order

        movies = self.sql.select(sql, (page * limit, limit,))

        html = []
        for movie in movies:
            content = {}
            content["id"] = movie[6]
            content["title"] = movie[0] if movie[0] is not None else movie[4]
            content["desc"] = movie[1] if not movie[1] is None else ""
            content["path"] = "file:///opt/sybhttpd/localhost.drives/NETWORK_SHARE/mediaui/Movies/%s" % (movie[3])
            content["fanart"] = movie[9] if movie[9] is not None else "movie_fanart.png"
            content["added"] = self.convert_date_to_dayssince(movie[5])
            content["watchedicon"] = "/images/unwatched.png" if movie[7] is None else "/images/watched.png"
            content["watcheddate"] = self.convert_watched_to_dayssince(movie[7])
            content["fulltitle"] = content["title"]
            content["runtime"] = self.convert_runtime_seconds(movie[8])
            content["year"] = self.strip_year_from_date(movie[2])

            if content["desc"].__len__() > 300:
                content["desc"] = content["desc"][:300] + "..."

            if content["title"].__len__() > 30:
                content["title"] = content["title"][:30] + "..."

            html.append(content)

        return html
Пример #2
0
 def onBtnLoadCur(self):
     filter = Filter()
     filter.exec()
     if filter.ret:
         print(filter.sel)
         self.plotCur = filter.sel
     pass
Пример #3
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = [1.0, 5.0, 10.0]

        Filter.__init__(self, args, options, current)
Пример #4
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = list(range(25, 250, 50))

        Filter.__init__(self, args, options, current)
Пример #5
0
def main():
    HOME_PATH = "D:\\SENTIFI_DATA\\filter\\"

    FILE_PATH_TO_JSON_MESSAGES = HOME_PATH + "messages.json"
    FILE_PATH_TO_JSON_CRITERIA = HOME_PATH + "criteria.json"

    json_messages = IOUtils().read_json_data_from_file(FILE_PATH_TO_JSON_MESSAGES)
    json_criteria = IOUtils().read_json_data_from_file(FILE_PATH_TO_JSON_CRITERIA)

    list_criteria = []
    for criteria in json_criteria:
        item = SentifiSearchItem(criteria)
        list_criteria.append(item)

    list_messages = []
    for json_item in json_messages:
        message = SentifiMessage(json_item)
        list_messages.append(message)

    #For each message, compare its soid to criteria's soid
    for message in list_messages:
        for criteria in list_criteria:
            if message.soid == criteria.soid:
                filter = Filter(message, criteria)
                filter.apply()

    for msg in list_messages:
        message.display()

    return list_messages
Пример #6
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = ["Exclude loans with public records"]

        Filter.__init__(self, args, options, current)
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = [12, 24, 60]

        Filter.__init__(self, args, options, current)
Пример #8
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = [float(n) for n in range(5000, 30000, 5000)]

        Filter.__init__(self, args, options, current)
Пример #9
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        purpose_bitmap = {'other':              1 << 0,
                          'debt_consolidation': 1 << 1,
                          'educational':        1 << 2,
                          'credit_card':        1 << 3,
                          'car':                1 << 4,
                          'home_improvement':   1 << 5,
                          'small_business':     1 << 6,
                          'vacation':           1 << 7,
                          'moving':             1 << 8,
                          'wedding':            1 << 9,
                          'house':              1 << 10,
                          'medical':            1 << 11,
                          'major_purchase':     1 << 12,
                          'renewable_energy':   1 << 13,
                          }

        self.conversion_table = purpose_bitmap.copy()

        options = self.powerBitSet(purpose_bitmap.values())

        Filter.__init__(self, args, options, current)
Пример #10
0
 def getGrayscaleImagesImp(images):
     if isinstance(images, collections.Iterable):
         grayscaleImages = []
         for image in images:
             grayscaleImages.append(Filter.getGrayscale(image))
         return grayscaleImages
     else:
         return Filter.getGrayscale(images)
Пример #11
0
 def getCannyImp(images):
     if isinstance(images, collections.Iterable):
         cannyImages = []
         for image in images:
             cannyImages.append(Filter.getCanny(image))
         return cannyImages
     else:
         return Filter.getCanny(images)
Пример #12
0
    def __init__(self, args, current=None):
        '''
        Construct a set similar to this based on the passed in grades
         'A',
         'AB',
         'ABC',
         'ABCD',
         'ABCDE',
         'ABCDEF',
         'ABCDEFG',
         'B',
         'BC',
         'BCD',
         'BCDE',
         'BCDEF',
         'BCDEFG',
         'C',
         'CD',
         'CDE',
         'CDEF',
         'CDEFG',
         'D',
         'DE',
         'DEF',
         'DEFG',
         'E',
         'EF',
         'EFG',
         'F',
         'FG',
         'G'
        '''

        options = []
        grades_bitmap = {'A': 1 << 0,
                         'B': 1 << 1,
                         'C': 1 << 2,
                         'D': 1 << 3,
                         'E': 1 << 4,
                         'F': 1 << 5,
                         'G': 1 << 6,
                        }

        self.conversion_table = grades_bitmap.copy()
        self.reverse_table = {v:k for k, v in grades_bitmap.items()}
        num_grades = len(args.grades)
        for i in range(1, num_grades + 1):
            for j in range(num_grades):
                if (j + i) <= num_grades:
                    grades = args.grades[j:j + i]
                    grades_bit_value = 0
                    for grade in grades:
                        grades_bit_value += grades_bitmap[grade]
                    options.append(grades_bit_value)
                    self.conversion_table[grades] = grades_bit_value
                    self.reverse_table[grades_bit_value] = grades

        Filter.__init__(self, args, options, current)
Пример #13
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = [0, 1, 2, 3, 4, 5]

        Filter.__init__(self, args, options, current)

        self.reverse_table = ["MORTGAGE", "OWN", "RENT", "OTHER", "NONE", "NULL"]
Пример #14
0
    def onBtnLoadRef(self):
        filter = Filter()
        filter.exec()
        if filter.ret:
            print(filter.sel)
            self.plotsRef = filter.sel


        pass
Пример #15
0
    def __init__(self, args, current=None):
        '''
        Constructor
        '''
        options = [
                    1 << 0,  # 0
                    1 << 0 | 1 << 1 | 1 << 2 | 1 << 3,  # 0, 1-3
                    1 << 0 | 1 << 1 | 1 << 2 | 1 << 3,  # 0, 1-3, 4
                    1 << 1 | 1 << 2 | 1 << 3,  # 1-3
                    1 << 1 | 1 << 2 | 1 << 3 | 1 << 4,  # 1-3, 4
                    1 << 4,  # 4
                    1 << 5 | 1 << 6 | 1 << 7 | 1 << 8 | 1 << 9 | 1 << 10 | 1 << 11,  # 5 - 11
                    ]

        Filter.__init__(self, args, options, current)
Пример #16
0
    def added(self, id = None, page=0, play='false', f = None, date = None, dir = None):
        try:
            page = int(page)
        except:
            page = 0
        limit = 17

        if play == 'true' and id is not None:
            if date is None:
                id = None
                play = 'false'
            else:
                try:
                    callingDate = datetime.strptime(date, "%a %b %d %H:%M:%S %Y %Z")
                    currentDate = datetime.utcnow() - timedelta(seconds = 60)
                    # Sat Feb 23 19:35:57 2013 GMT popcorn hour example call
                    #callingDate = datetime.utcfromtimestamp(date)
                    if callingDate < currentDate:
                        id = None
                        play = 'false'
                        logger.warn('added', 'Play request time < now - 60 (%s < %s).', str(callingDate), str(currentDate))
                except:
                    id = None
                    play = 'false'
                    logger.error('added', 'Movie.py', 'Error converting UTC Javascript date for %s.', date)

        if id is not None:
            self.content.update_media_watched("movie",id)

        filters = ()
        if f is not None:
            pFilter = Filter.ParseFilter(f)
            if pFilter is not None:
                filters += pFilter,

        movies=self.content.get_movies(page, limit, filters, 'm.added DESC')
        total = self.content.get_total_items("movie m", "m.id", None, Filter("movie", {"movie": "m"}, filters))
        pPage = self.content.get_prev_page(page, limit, "movie", total)
        nPage = self.content.get_next_page(page, limit, "movie", total)
        filterLetters = self.content.get_available_letters("movie m", "m.file_name", None, None)
        tmpl = lookup.get_template("movies/movies.html")

        startOn = 'moviename_0'
        if id is not None and play == 'true':
            startOn = 'movieid_' + id
        elif dir is not None:
            startOn = 'moviename_' + str(len(movies)-1)

        return tmpl.render(movies = movies
            , prevPage = pPage
            , nextPage = nPage
            , totalPages = int(ceil(total/limit))
            , page = page
            , play = play
            , selected = Filter.getFilterValue(Filter.FILTER_LETTER, filters)
            , filterUrl = "f=" + (f if f is not None else '')
            , filterLetters = filterLetters
            , pageName = 'added'
            , id = id
            , startOn = startOn)
Пример #17
0
    def show(self, page=0, f = None):
        limit = 17
        try:
            page = int(page)
        except:
            page = 0

        filters = ()
        if f is not None:
            pFilter = Filter.ParseFilter(f)
            if pFilter is not None:
                filters += pFilter,

        content = self.content.get_tv_shows_html(page, limit, filters)

        total = self.content.get_total_items("show s", "s.id", None, Filter("tv", {"episode": "e", "show": "s"}, filters))
        prev = self.content.get_prev_page(page, limit, "episode", total)
        next = self.content.get_next_page(page, limit, "episode", total)
        content["total"] = int(ceil(total/limit))
        filterLetters = self.content.get_available_letters("show s", "s.title", None, None)

        tmpl = lookup.get_template("tv/show.html")
        return tmpl.render(content = content
            , prev = prev
            , next = next
            , selected = Filter.getFilterValue(Filter.FILTER_LETTER_EPISODE, filters)
            , filterUrl = "f=" + (f if f is not None else '')
            , filterLetters = filterLetters)
Пример #18
0
    def watchme(self, page=0, f=None):
        try:
            page = int(page)
        except:
            page = 0
        limit = 17
        filter = ((Filter.FILTER_NOTSEEN, ),)
        if f is not None:
            pFilter = Filter.ParseFilter(f)
            if pFilter is not None:
                filter += pFilter,

        episodes=self.content.get_latest_tv_html(page, limit, filter)
        total = self.content.get_total_items("episode e", "e.id", [" inner join show s on s.id = e.show "], filter)
        pPage = self.content.get_prev_page(page, limit, "episode", total)
        nPage = self.content.get_next_page(page, limit, "episode", total)
        filterLetters = self.content.get_available_letters("episode e", "s.title", [ " inner join show s on s.id = e.show "], filter)

        tmpl = lookup.get_template("tv/watchme.html")
        return tmpl.render(episodes=episodes
            , prevPage = pPage
            , nextPage = nPage
            , totalPages = int(ceil(total/limit))
            , page = page
            , selected = Filter.getFilterValue(Filter.FILTER_LETTER_EPISODE, filter)
            , filterUrl = "f=" + (f if f is not None else '')
            , filterLetters = filterLetters)
Пример #19
0
 def test_list(self):
     event = Event(0)
     event.the_list = range(10)
     cfg_ana = cfg.Analyzer(
         Filter,
         output = 'filtered',
         input_objects = 'the_list',
         filter_func = lambda x : x%2 == 0
         )
     cfg_comp = cfg.Component(
         'test',
         files = []
         )
     filter = Filter(cfg_ana, cfg_comp, self.outdir)
     filter.process(event)
     self.assertItemsEqual(event.filtered, [0,2,4,6,8])
Пример #20
0
 def test_dict(self):
     event = Event(0)
     event.the_dict = dict( [ (x, x**2) for x in range(10) ] )
     cfg_ana = cfg.Analyzer(
         Filter,
         output = 'filtered',
         input_objects = 'the_dict',
         filter_func = lambda x : x == 9
         )
     cfg_comp = cfg.Component(
         'test',
         files = []
         )
     filter = Filter(cfg_ana, cfg_comp, self.outdir)
     filter.process(event)
     self.assertDictEqual(event.filtered, {3:9})
def recordOutput():
	output = "\nTOTAL FILTER TIME:\t "+str(time)+"\n\nAVERAGE TIME PER FRAME:\t "+str(time/numFrames)+"\n\nAVERAGE FRAMES PER SECOND:\t "+str(1 / (time / numFrames))+"\n\nTOTAL TIME INCLUDING CAPTURE:\t "+str(total)+"\n\nAVERAGE TIME PER FRAME INCLUDING CAPTURE:\t "+str(total/numFrames)+"\n\nAVERAGE FRAMES PER SECOND INCLUDING CAPTURE:\t "+str(1 / (total/numFrames))+"\n\nTOTAL KNOWN CAPTURE TIME:\t "+str(timeCapture)+"\n\nAVERAGE CAPTURE TIME PER FRAME:\t "+str(timeCapture / numFrames)+"\n\nAVERAGE CAPTURE FPS:\t "+str(1/ (timeCapture / numFrames))+"\n\nSTOPPED AT FRAME:\t "+str(numFrames)+" OUT OF: "+str(controlFrames)+"\n\n"
	print output
	if doOutput:
		try:
			fil = open(outputDir, "r")
			fil.close()
		except:
			fil = open(outputDir, "w")
			fil.close()
		f = open(outputDir, "a")
		f.write(output)
		f.write("================================================================")
		f.write("\n\n")
		f.write("================================================================")
		f.close()
	queue.close()
	queue.join_thread()
	p.join()
Пример #22
0
 def confirmPush(self):
     limbList = []
     for p in self.selection:
         if self.selection[p] == 1:
             limbList.append(p)
     self.pbar.setValue(0)
     homedir = os.getcwd()
     filt = Filter(homedir)
     filt.dataProcess()
     self.pbar.setValue(25)
     select = RandomSelector(homedir)
     select.dataProcess()
     self.pbar.setValue(50)
     st = StaticAnalyzer(homedir,limbList)
     st.dataProcess()
     self.pbar.setValue(75)
     c = Classifier(homedir)
     count,rate,total,result = c.staticClassify()
     self.pbar.setValue(100)
     reply = QtGui.QMessageBox.question(self, 'Static Analysis Result',"Total number is %d"%(total)+"\nCorrect number is %d"%(count)+"\nCorrect rate is %f"%(100*rate)+"%", QtGui.QMessageBox.Yes)
Пример #23
0
    def get_new_filter(self):
        """
        Set self.fil to a new Filter reflecting the current status of the
        GUI elements
        """

        # Get the status of GUI elements
        # TODO: support for one date field but not the other
        # (should be done mostly in Filter class)
        if self.begin_date_field.text \
           and self.begin_date_field.text != DEFAULT_BEGIN_DATE_TEXT \
           and self.end_date_field.text \
           and self.end_date_field.text != DEFAULT_END_DATE_TEXT:
            daterange = (datetime.strptime(self.begin_date_field.text,
                                           DATEFORMAT),
                         datetime.strptime(self.end_date_field.text,
                                           DATEFORMAT))
        else:
            daterange = None

        # User
        user = [self.userids[u] for u, cb in self.user_checkboxes.iteritems() \
                if cb.value]
        if len(user) == len(self.user_checkboxes): #all users selected
            user = None

        # Server
        server = [self.serverids[s] for s, cb in self.server_checkboxes.iteritems() \
                  if cb.value]
        if len(server) == len(self.server_checkboxes):
            server = None

        # Search String List
        search_string = SearchStringList(self.any_all_radiogroup.value)
        search_string.extend([fld.text for fld in self.search_string_fields \
                              if fld.text and not fld.text.startswith('Query Search String ')])

        # Query types
        query_type = [qtype for qtype, cb in self.query_type_checkboxes.iteritems() if cb.value]
        query_type = clean_list(query_type)
        if not query_type or len(query_type) == len(querytypes):
            #all types selected
            query_type = None

        # Create the filter object
        self.fil = Filter(daterange = daterange,
                          user = user,
                          server = server,
                          search_string = search_string,
                          query_type = query_type,
                          negate = self.negate.value)
Пример #24
0
    def get_latest_tv(self, page=0, limit=5, _filter=None):
        sql = """
            select s.title, e.season, e.episode, e.title, e.full_path, e.file_name, e.description, e.added
            ,s.fanart, s.banner, s.poster, datetime(e.watched, 'localtime'), e.id
            from episode e
            inner join show s on s.id = e.show
            """

        if _filter is not None:
            f = Filter("tv", {"episode": "e", "show": "s"})
            sql += f.genFilter(_filter)

        sql += """
            order by added desc
            limit ?, ?"""

        episodes = self.sql.select(sql, (page * limit, limit,))
        content = []

        for episode in episodes:
            content.append(
                {"show": episode[0]
                    , "season": episode[1]
                    , "episode": episode[2]
                    , "title": episode[3]
                    , "full_path": episode[4]
                    , "file_name": episode[5]
                    , "desc": episode[6]
                    , "added": episode[7]
                    , "fanart": episode[8]
                    , "banner": episode[9]
                    , "poster": episode[10]
                    , "watched": episode[11]
                    , "id": episode[12]
                }
            )

        return content
Пример #25
0
    def get_tv_shows_html(self, page=0, limit=5, f=None):
        sql = """
                SELECT s.title, s.fanart
                , (select count(e.id) from episode e where e.show = s.id)
                , (select group_concat(distinct e2.season) from episode e2 where e2.show = s.id)
                , s.id
                , (select MAX(e3.added) from episode e3 where e3.show = s.id order by e3.added DESC)
                FROM show s
              """

        if f is not None:
            _f = Filter("tv", {"episode": None, "show": "s"})
            sql += _f.genFilter(f)

        sql += " ORDER BY s.title ASC LIMIT ?, ?"

        results = self.sql.select(sql, (page * limit, limit,))

        content = {
            "shows": []
            , "total": 0
            , "page": page
        }

        for result in results:
            content["shows"].append(
                {
                    "fulltitle": result[0] #title
                    , "fanart": result[1] if result[1] is not None else "tv_fanart.png" #fanart
                    , "epscount": result[2] #episode count
                    , "shorttitle": result[0][:30]
                    , "seasons": sorted(result[3].split(","), key=int) if result[3] is not None else ""
                    , "id": result[4]
                    , "lastadded": self.convert_date_to_dayssince(result[5])
                }
            )

        return content
Пример #26
0
def filter_entries(filter_str=''):
    """
    Enter and handle filter mode.

    Prompts for filter conditions to filter the list of entries and commands to handle entries in bulk.

    :param filter_str:      An optional initial filter condition string.
    :return:                None.
    """
    f = Filter(list(diary.entries))  # Pass a copy of diary.entries to prevent skipping when using `remove`
    if filter_str:
        handle_add_filter_condition(f, filter_str)
    else:
        display_filters(f)

    while True:
        cmd = get_input('{} (filter mode)> '.format(PROMPT),
                        condition=lambda x: x != '',  # Do not accept blank string
                        err_msg='')  # No error message if blank string is entered

        if f.is_valid_condition(cmd):
            handle_add_filter_condition(f, cmd)
        elif cmd in ['quit', 'q']:
            break
        elif cmd in ['clear', 'c']:
            f.reset()
            display_filters(f)
        elif cmd in ['l', 'list']:
            display_filters(f)
        else:  # Otherwise a diary command has been entered
            cmd, f_args = process_input(cmd)  # Separate command and arguments
            if cmd in [remove, edit, priority, extend]:  # These are the only commands available in filter mode
                for obj in f.objects:
                    new_args = '{} {}'.format(obj.uid, f_args)  # Insert UID of each entry one at a time
                    cmd(new_args)
                break
Пример #27
0
 def __init__(self):
     '''
     Constructor inisiates the filter. Along with the Taggers which will be used,
     And loads the copora. 
     '''   
     self.FF = Filter()
     
     try:
         #Attempt to open .plk file and load. 
         input = open("./Corpus/Brown-Uni.pkl", 'rb')
         self.unigram_tagger = load(input)
         input.close() 
     except IOError as e:   
         self.brown_tagged_sents = nltk.corpus.brown.tagged_sents(simplify_tags=True)
         t0 = nltk.DefaultTagger('NN')
         t1 = nltk.UnigramTagger(self.brown_tagged_sents, backoff=t0)
         t2 = nltk.BigramTagger(self.brown_tagged_sents, backoff=t1)
         self.unigram_tagger = nltk.UnigramTagger(self.brown_tagged_sents, backoff=t2)
         
         output = open("./Corpus/Brown-Uni.pkl", 'wb')
         dump(self.unigram_tagger, output, -1)
         output.close()
Пример #28
0
    def __init__(self, config, rawserver):
        self.config = config
        self.response_size = config['tracker_response_size']
        self.dfile = config['tracker_dfile']
        self.natcheck = config['tracker_nat_check']
        favicon = config['tracker_favicon']
        self.parse_dir_interval = config['tracker_parse_dir_interval']
        self.favicon = None
        if favicon:
            try:
                h = open(favicon, 'rb')
                self.favicon = h.read()
                h.close()
            except:
                print '**warning** specified favicon file -- %s -- does not exist.' % favicon

        self.rawserver = rawserver
        self.cached = {}
        self.cached_t = {}
        self.times = {}
        self.state = {}
        self.seedcount = {}
        self.allowed_IPs = None
        self.banned_IPs = None
        if config['tracker_allowed_ips'] or config['tracker_banned_ips']:
            self.allowed_ip_mtime = 0
            self.banned_ip_mtime = 0
            self.read_ip_lists()
        self.only_local_override_ip = config['tracker_only_local_override_ip']
        if self.only_local_override_ip == 2:
            self.only_local_override_ip = not config['tracker_nat_check']
        if exists(self.dfile):
            try:
                h = open(self.dfile, 'rb')
                if self.config['tracker_dfile_format'] == ITRACKDBFORMAT_BENCODE:
                    ds = h.read()
                    tempstate = bdecode(ds)
                else:
                    tempstate = pickle.load(h)
                h.close()
                if not tempstate.has_key('peers'):
                    tempstate = {'peers': tempstate}
                statefiletemplate(tempstate)
                self.state = tempstate
            except:
                print '**warning** statefile ' + self.dfile + ' corrupt; resetting'

        self.downloads = self.state.setdefault('peers', {})
        self.completed = self.state.setdefault('completed', {})
        self.becache = {}
        for infohash, ds in self.downloads.items():
            self.seedcount[infohash] = 0
            for x, y in ds.items():
                ip = y['ip']
                if self.allowed_IPs and not self.allowed_IPs.includes(ip) or self.banned_IPs and self.banned_IPs.includes(ip):
                    del ds[x]
                    continue
                if not y['left']:
                    self.seedcount[infohash] += 1
                if y.get('nat', -1):
                    continue
                gip = y.get('given_ip')
                if is_valid_ip(gip) and (not self.only_local_override_ip or local_IPs.includes(ip)):
                    ip = gip
                self.natcheckOK(infohash, x, ip, y['port'], y['left'])

        for x in self.downloads.keys():
            self.times[x] = {}
            for y in self.downloads[x].keys():
                self.times[x][y] = 0

        self.trackerid = createPeerID('-T-')
        seed(self.trackerid)
        self.reannounce_interval = config['tracker_reannounce_interval']
        self.save_dfile_interval = config['tracker_save_dfile_interval']
        self.show_names = config['tracker_show_names']
        rawserver.add_task(self.save_state, self.save_dfile_interval)
        self.prevtime = clock()
        self.timeout_downloaders_interval = config['tracker_timeout_downloaders_interval']
        rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
        self.logfile = None
        self.log = None
        if config['tracker_logfile'] and config['tracker_logfile'] != '-':
            try:
                self.logfile = config['tracker_logfile']
                self.log = open(self.logfile, 'a')
                sys.stdout = self.log
                print '# Log Started: ', isotime()
            except:
                print '**warning** could not redirect stdout to log file: ', sys.exc_info()[0]

        if config['tracker_hupmonitor']:

            def huphandler(signum, frame, self = self):
                try:
                    self.log.close()
                    self.log = open(self.logfile, 'a')
                    sys.stdout = self.log
                    print '# Log reopened: ', isotime()
                except:
                    print '**warning** could not reopen logfile'

            signal.signal(signal.SIGHUP, huphandler)
        self.allow_get = config['tracker_allow_get']
        self.t2tlist = T2TList(config['tracker_multitracker_enabled'], self.trackerid, config['tracker_multitracker_reannounce_interval'], config['tracker_multitracker_maxpeers'], config['tracker_multitracker_http_timeout'], self.rawserver)
        if config['tracker_allowed_list']:
            if config['tracker_allowed_dir']:
                print '**warning** allowed_dir and allowed_list options cannot be used together'
                print '**warning** disregarding allowed_dir'
                config['tracker_allowed_dir'] = ''
            self.allowed = self.state.setdefault('allowed_list', {})
            self.allowed_list_mtime = 0
            self.parse_allowed()
            self.remove_from_state('allowed', 'allowed_dir_files')
            if config['tracker_multitracker_allowed'] == ITRACKMULTI_ALLOW_AUTODETECT:
                config['tracker_multitracker_allowed'] = ITRACKMULTI_ALLOW_NONE
            config['tracker_allowed_controls'] = 0
        elif config['tracker_allowed_dir']:
            self.allowed = self.state.setdefault('allowed', {})
            self.allowed_dir_files = self.state.setdefault('allowed_dir_files', {})
            self.allowed_dir_blocked = {}
            self.parse_allowed()
            self.remove_from_state('allowed_list')
        else:
            self.allowed = None
            self.remove_from_state('allowed', 'allowed_dir_files', 'allowed_list')
            if config['tracker_multitracker_allowed'] == ITRACKMULTI_ALLOW_AUTODETECT:
                config['tracker_multitracker_allowed'] = ITRACKMULTI_ALLOW_NONE
            config['tracker_allowed_controls'] = 0
        self.uq_broken = unquote('+') != ' '
        self.keep_dead = config['tracker_keep_dead']
        self.Filter = Filter(rawserver.add_task)
        aggregator = config['tracker_aggregator']
        if aggregator == 0:
            self.is_aggregator = False
            self.aggregator_key = None
        else:
            self.is_aggregator = True
            if aggregator == 1:
                self.aggregator_key = None
            else:
                self.aggregator_key = aggregator
            self.natcheck = False
        send = config['tracker_aggregate_forward']
        if not send:
            self.aggregate_forward = None
        else:
            try:
                self.aggregate_forward, self.aggregate_password = send
            except:
                self.aggregate_forward = send
                self.aggregate_password = None

        self.cachetime = 0
        self.track_cachetimeupdate()
Пример #29
0
class Tracker():

    def __init__(self, config, rawserver):
        self.config = config
        self.response_size = config['tracker_response_size']
        self.dfile = config['tracker_dfile']
        self.natcheck = config['tracker_nat_check']
        favicon = config['tracker_favicon']
        self.parse_dir_interval = config['tracker_parse_dir_interval']
        self.favicon = None
        if favicon:
            try:
                h = open(favicon, 'rb')
                self.favicon = h.read()
                h.close()
            except:
                print '**warning** specified favicon file -- %s -- does not exist.' % favicon

        self.rawserver = rawserver
        self.cached = {}
        self.cached_t = {}
        self.times = {}
        self.state = {}
        self.seedcount = {}
        self.allowed_IPs = None
        self.banned_IPs = None
        if config['tracker_allowed_ips'] or config['tracker_banned_ips']:
            self.allowed_ip_mtime = 0
            self.banned_ip_mtime = 0
            self.read_ip_lists()
        self.only_local_override_ip = config['tracker_only_local_override_ip']
        if self.only_local_override_ip == 2:
            self.only_local_override_ip = not config['tracker_nat_check']
        if exists(self.dfile):
            try:
                h = open(self.dfile, 'rb')
                if self.config['tracker_dfile_format'] == ITRACKDBFORMAT_BENCODE:
                    ds = h.read()
                    tempstate = bdecode(ds)
                else:
                    tempstate = pickle.load(h)
                h.close()
                if not tempstate.has_key('peers'):
                    tempstate = {'peers': tempstate}
                statefiletemplate(tempstate)
                self.state = tempstate
            except:
                print '**warning** statefile ' + self.dfile + ' corrupt; resetting'

        self.downloads = self.state.setdefault('peers', {})
        self.completed = self.state.setdefault('completed', {})
        self.becache = {}
        for infohash, ds in self.downloads.items():
            self.seedcount[infohash] = 0
            for x, y in ds.items():
                ip = y['ip']
                if self.allowed_IPs and not self.allowed_IPs.includes(ip) or self.banned_IPs and self.banned_IPs.includes(ip):
                    del ds[x]
                    continue
                if not y['left']:
                    self.seedcount[infohash] += 1
                if y.get('nat', -1):
                    continue
                gip = y.get('given_ip')
                if is_valid_ip(gip) and (not self.only_local_override_ip or local_IPs.includes(ip)):
                    ip = gip
                self.natcheckOK(infohash, x, ip, y['port'], y['left'])

        for x in self.downloads.keys():
            self.times[x] = {}
            for y in self.downloads[x].keys():
                self.times[x][y] = 0

        self.trackerid = createPeerID('-T-')
        seed(self.trackerid)
        self.reannounce_interval = config['tracker_reannounce_interval']
        self.save_dfile_interval = config['tracker_save_dfile_interval']
        self.show_names = config['tracker_show_names']
        rawserver.add_task(self.save_state, self.save_dfile_interval)
        self.prevtime = clock()
        self.timeout_downloaders_interval = config['tracker_timeout_downloaders_interval']
        rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
        self.logfile = None
        self.log = None
        if config['tracker_logfile'] and config['tracker_logfile'] != '-':
            try:
                self.logfile = config['tracker_logfile']
                self.log = open(self.logfile, 'a')
                sys.stdout = self.log
                print '# Log Started: ', isotime()
            except:
                print '**warning** could not redirect stdout to log file: ', sys.exc_info()[0]

        if config['tracker_hupmonitor']:

            def huphandler(signum, frame, self = self):
                try:
                    self.log.close()
                    self.log = open(self.logfile, 'a')
                    sys.stdout = self.log
                    print '# Log reopened: ', isotime()
                except:
                    print '**warning** could not reopen logfile'

            signal.signal(signal.SIGHUP, huphandler)
        self.allow_get = config['tracker_allow_get']
        self.t2tlist = T2TList(config['tracker_multitracker_enabled'], self.trackerid, config['tracker_multitracker_reannounce_interval'], config['tracker_multitracker_maxpeers'], config['tracker_multitracker_http_timeout'], self.rawserver)
        if config['tracker_allowed_list']:
            if config['tracker_allowed_dir']:
                print '**warning** allowed_dir and allowed_list options cannot be used together'
                print '**warning** disregarding allowed_dir'
                config['tracker_allowed_dir'] = ''
            self.allowed = self.state.setdefault('allowed_list', {})
            self.allowed_list_mtime = 0
            self.parse_allowed()
            self.remove_from_state('allowed', 'allowed_dir_files')
            if config['tracker_multitracker_allowed'] == ITRACKMULTI_ALLOW_AUTODETECT:
                config['tracker_multitracker_allowed'] = ITRACKMULTI_ALLOW_NONE
            config['tracker_allowed_controls'] = 0
        elif config['tracker_allowed_dir']:
            self.allowed = self.state.setdefault('allowed', {})
            self.allowed_dir_files = self.state.setdefault('allowed_dir_files', {})
            self.allowed_dir_blocked = {}
            self.parse_allowed()
            self.remove_from_state('allowed_list')
        else:
            self.allowed = None
            self.remove_from_state('allowed', 'allowed_dir_files', 'allowed_list')
            if config['tracker_multitracker_allowed'] == ITRACKMULTI_ALLOW_AUTODETECT:
                config['tracker_multitracker_allowed'] = ITRACKMULTI_ALLOW_NONE
            config['tracker_allowed_controls'] = 0
        self.uq_broken = unquote('+') != ' '
        self.keep_dead = config['tracker_keep_dead']
        self.Filter = Filter(rawserver.add_task)
        aggregator = config['tracker_aggregator']
        if aggregator == 0:
            self.is_aggregator = False
            self.aggregator_key = None
        else:
            self.is_aggregator = True
            if aggregator == 1:
                self.aggregator_key = None
            else:
                self.aggregator_key = aggregator
            self.natcheck = False
        send = config['tracker_aggregate_forward']
        if not send:
            self.aggregate_forward = None
        else:
            try:
                self.aggregate_forward, self.aggregate_password = send
            except:
                self.aggregate_forward = send
                self.aggregate_password = None

        self.cachetime = 0
        self.track_cachetimeupdate()

    def track_cachetimeupdate(self):
        self.cachetime += 1
        self.rawserver.add_task(self.track_cachetimeupdate, 1)

    def aggregate_senddata(self, query):
        url = self.aggregate_forward + '?' + query
        if self.aggregate_password is not None:
            url += '&password='******'AggregateSendData' + rq.getName())
        rq.setDaemon(True)
        rq.start()

    def _aggregate_senddata(self, url):
        try:
            h = urlopen(url)
            h.read()
            h.close()
        except:
            return

    def get_infopage(self):
        try:
            if not self.config['tracker_show_infopage']:
                return (404,
                 'Not Found',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 alas)
            red = self.config['tracker_infopage_redirect']
            if red:
                return (302,
                 'Found',
                 {'Content-Type': 'text/html',
                  'Location': red},
                 '<A HREF="' + red + '">Click Here</A>')
            s = StringIO()
            s.write('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">\n<html><head><title>freestream Tracker Statistics</title>\n')
            if self.favicon is not None:
                s.write('<link rel="shortcut icon" href="/favicon.ico">\n')
            s.write('</head>\n<body>\n<h3>freestream Tracker Statistics</h3>\n')
            if self.config['tracker_allowed_dir']:
                if self.show_names:
                    names = [ (self.allowed[hash]['name'], hash) for hash in self.allowed.keys() ]
                else:
                    names = [ (None, hash) for hash in self.allowed.keys() ]
            else:
                names = [ (None, hash) for hash in self.downloads.keys() ]
            if not names:
                s.write('<p>Not tracking any files yet...</p>\n')
            else:
                names.sort()
                tn = 0
                tc = 0
                td = 0
                tt = 0
                ts = 0
                nf = 0
                if self.config['tracker_allowed_dir'] and self.show_names:
                    s.write('<table summary="files" border="1">\n<tr><th>info hash</th><th>torrent name</th><th align="right">size</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th><th align="right">transferred</th></tr>\n')
                else:
                    s.write('<table summary="files">\n<tr><th>info hash</th><th align="right">complete</th><th align="right">downloading</th><th align="right">downloaded</th></tr>\n')
                for name, hash in names:
                    l = self.downloads[hash]
                    n = self.completed.get(hash, 0)
                    tn = tn + n
                    c = self.seedcount[hash]
                    tc = tc + c
                    d = len(l) - c
                    td = td + d
                    if self.config['tracker_allowed_dir'] and self.show_names:
                        if self.allowed.has_key(hash):
                            nf = nf + 1
                            sz = self.allowed[hash]['length']
                            ts = ts + sz
                            szt = sz * n
                            tt = tt + szt
                            if self.allow_get == 1:
                                url = self.allowed[hash].get('url')
                                if url:
                                    linkname = '<a href="' + url + '">' + name + '</a>'
                                else:
                                    linkname = '<a href="/file?name=' + quote(name) + '">' + name + '</a>'
                            else:
                                linkname = name
                            s.write('<tr><td><code>%s</code></td><td>%s</td><td align="right">%s</td><td align="right">%i</td><td align="right" class="downloading">%i</td><td align="right">%i</td><td align="right">%s</td></tr>\n' % (b2a_hex(hash),
                             linkname,
                             size_format(sz),
                             c,
                             d,
                             n,
                             size_format(szt)))
                    else:
                        s.write('<tr><td><code>%s</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td><td align="right"><code>%i</code></td></tr>\n' % (b2a_hex(hash),
                         c,
                         d,
                         n))

                ttn = 0
                for i in self.completed.values():
                    ttn = ttn + i

                if self.config['tracker_allowed_dir'] and self.show_names:
                    s.write('<tr><td align="right" colspan="2">%i files</td><td align="right">%s</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td><td align="right">%s</td></tr>\n' % (nf,
                     size_format(ts),
                     tc,
                     td,
                     tn,
                     ttn,
                     size_format(tt)))
                else:
                    s.write('<tr><td align="right">%i files</td><td align="right">%i</td><td align="right">%i</td><td align="right">%i/%i</td></tr>\n' % (nf,
                     tc,
                     td,
                     tn,
                     ttn))
                s.write('</table>\n<ul>\n<li><em>info hash:</em> SHA1 hash of the "info" section of the metainfo (*.torrent)</li>\n<li><em>complete:</em> number of connected clients with the complete file</li>\n<li><em>downloading:</em> number of connected clients still downloading</li>\n<li><em>downloaded:</em> reported complete downloads (total: current/all)</li>\n<li><em>transferred:</em> torrent size * total downloaded (does not include partial transfers)</li>\n</ul>\n<hr>\n<address>%s (%s)</address>\n' % (version_short, isotime()))
            s.write('</body>\n</html>\n')
            return (200,
             'OK',
             {'Content-Type': 'text/html; charset=iso-8859-1'},
             s.getvalue())
        except:
            print_exc()
            return (500,
             'Internal Server Error',
             {'Content-Type': 'text/html; charset=iso-8859-1'},
             'Server Error')

    def scrapedata(self, hash, return_name = True):
        l = self.downloads[hash]
        n = self.completed.get(hash, 0)
        c = self.seedcount[hash]
        d = len(l) - c
        f = {'complete': c,
         'incomplete': d,
         'downloaded': n}
        if return_name and self.show_names and self.config['tracker_allowed_dir']:
            f['name'] = self.allowed[hash]['name']
        return f

    def get_scrape(self, paramslist):
        fs = {}
        if paramslist.has_key('info_hash'):
            if self.config['tracker_scrape_allowed'] not in [ITRACKSCRAPE_ALLOW_SPECIFIC, ITRACKSCRAPE_ALLOW_FULL]:
                return (400,
                 'Not Authorized',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 bencode({'failure reason': 'specific scrape function is not available with this tracker.'}))
            for hash in paramslist['info_hash']:
                if self.allowed is not None:
                    if self.allowed.has_key(hash):
                        fs[hash] = self.scrapedata(hash)
                elif self.downloads.has_key(hash):
                    fs[hash] = self.scrapedata(hash)

        else:
            if self.config['tracker_scrape_allowed'] != ITRACKSCRAPE_ALLOW_FULL:
                return (400,
                 'Not Authorized',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 bencode({'failure reason': 'full scrape function is not available with this tracker.'}))
            if self.allowed is not None:
                keys = self.allowed.keys()
            else:
                keys = self.downloads.keys()
            for hash in keys:
                fs[hash] = self.scrapedata(hash)

        return (200,
         'OK',
         {'Content-Type': 'text/plain'},
         bencode({'files': fs}))

    def get_peers(self, infohash):
        data = ''
        if infohash not in self.downloads:
            data = 'no peers'
        else:
            data = str(self.downloads[infohash])
        return (200,
         'OK',
         {'Content-Type': 'text/plain'},
         data)

    def get_file_by_name(self, name):
        for hash, rec in self.allowed.iteritems():
            if 'name' in rec and rec['name'] == name:
                return self.get_file(hash)

        return (404,
         'Not Found',
         {'Content-Type': 'text/plain',
          'Pragma': 'no-cache'},
         alas)

    def get_file(self, hash):
        if not self.allow_get:
            return (400,
             'Not Authorized',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             'get function is not available with this tracker.')
        if not self.allowed.has_key(hash):
            return (404,
             'Not Found',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             alas)
        fname = self.allowed[hash]['file']
        fpath = self.allowed[hash]['path']
        return (200,
         'OK',
         {'Content-Type': 'application/x-bittorrent',
          'Content-Disposition': 'attachment; filename=' + fname},
         open(fpath, 'rb').read())

    def get_tstream_from_httpseed(self, httpseedurl):
        if not self.allow_get:
            return (400,
             'Not Authorized',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             'get function is not available with this tracker.')
        wanturlhash = sha(httpseedurl).digest()
        found = False
        for infohash, a in self.allowed.iteritems():
            for goturlhash in a['url-hash-list']:
                if goturlhash == wanturlhash:
                    found = True
                    break

            if found:
                break

        if not found or not self.allowed.has_key(infohash):
            return (404,
             'Not Found',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             alas)
        fname = self.allowed[infohash]['file']
        fpath = self.allowed[infohash]['path']
        print >> sys.stderr, 'tracker: get_stream: Sending', fname
        return (200,
         'OK',
         {'Content-Type': 'application/x-bittorrent',
          'Content-Disposition': 'attachment; filename=' + fname},
         open(fpath, 'rb').read())

    def check_allowed(self, infohash, paramslist):
        if self.aggregator_key is not None and not (paramslist.has_key('password') and paramslist['password'][0] == self.aggregator_key):
            return (200,
             'Not Authorized',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             bencode({'failure reason': 'Requested download is not authorized for use with this tracker.'}))
        if self.allowed is not None:
            if not self.allowed.has_key(infohash):
                return (200,
                 'Not Authorized',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 bencode({'failure reason': 'Requested download is not authorized for use with this tracker.'}))
            if self.config['tracker_allowed_controls']:
                if self.allowed[infohash].has_key('failure reason'):
                    return (200,
                     'Not Authorized',
                     {'Content-Type': 'text/plain',
                      'Pragma': 'no-cache'},
                     bencode({'failure reason': self.allowed[infohash]['failure reason']}))
        if paramslist.has_key('tracker'):
            if self.config['tracker_multitracker_allowed'] == ITRACKMULTI_ALLOW_NONE or paramslist['peer_id'][0] == self.trackerid:
                return (200,
                 'Not Authorized',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 bencode({'failure reason': 'disallowed'}))
            if self.config['tracker_multitracker_allowed'] == ITRACKMULTI_ALLOW_AUTODETECT and not self.allowed[infohash].has_key('announce-list'):
                return (200,
                 'Not Authorized',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 bencode({'failure reason': 'Requested download is not authorized for multitracker use.'}))

    def add_data(self, infohash, event, ip, paramslist):
        peers = self.downloads.setdefault(infohash, {})
        ts = self.times.setdefault(infohash, {})
        self.completed.setdefault(infohash, 0)
        self.seedcount.setdefault(infohash, 0)

        def params(key, default = None, l = paramslist):
            if l.has_key(key):
                return l[key][0]
            return default

        myid = params('peer_id', '')
        if len(myid) != 20:
            raise ValueError, 'id not of length 20'
        if event not in ('started', 'completed', 'stopped', 'snooped', None):
            raise ValueError, 'invalid event'
        port = long(params('port', ''))
        if port < 0 or port > 65535:
            raise ValueError, 'invalid port'
        left = long(params('left', ''))
        if left < 0:
            raise ValueError, 'invalid amount left'
        uploaded = long(params('uploaded', ''))
        downloaded = long(params('downloaded', ''))
        peer = peers.get(myid)
        islocal = local_IPs.includes(ip)
        mykey = params('key')
        if peer:
            auth = peer.get('key', -1) == mykey or peer.get('ip') == ip
        else:
            auth = None
        gip = params('ip')
        if is_valid_ip(gip) and (islocal or not self.only_local_override_ip):
            ip1 = gip
        else:
            ip1 = ip
        if params('numwant') is not None:
            rsize = min(int(params('numwant')), self.response_size)
        else:
            rsize = self.response_size
        if DEBUG:
            log('itracker::add_data: infohash', infohash, 'event', event, 'ip', ip, 'gip', gip, 'port', port, 'myid', myid, 'mykey', mykey, 'auth', auth)
        if event == 'stopped':
            if peer:
                if auth:
                    if DEBUG:
                        log('itracker::add_data: delete peer: infohash', infohash, 'myid', myid)
                    self.delete_peer(infohash, myid)
        elif not peer:
            ts[myid] = clock()
            peer = {'ip': ip,
             'port': port,
             'left': left}
            if mykey:
                peer['key'] = mykey
            if gip:
                peer['given ip'] = gip
            if port:
                if not self.natcheck or islocal:
                    peer['nat'] = 0
                    self.natcheckOK(infohash, myid, ip1, port, left)
                else:
                    NatCheck(self.connectback_result, infohash, myid, ip1, port, self.rawserver)
            else:
                peer['nat'] = 1073741824
            if event == 'completed':
                self.completed[infohash] += 1
            if not left:
                self.seedcount[infohash] += 1
            if DEBUG:
                log('itracker::add_data: add new peer: myid', myid, 'peer', peer)
            peers[myid] = peer
        else:
            if not auth:
                return rsize
            ts[myid] = clock()
            if not left and peer['left']:
                self.completed[infohash] += 1
                self.seedcount[infohash] += 1
                if not peer.get('nat', -1):
                    for bc in self.becache[infohash]:
                        bc[1][myid] = bc[0][myid]
                        del bc[0][myid]

            if peer['left']:
                peer['left'] = left
            if port:
                recheck = False
                if ip != peer['ip']:
                    peer['ip'] = ip
                    recheck = True
                if gip != peer.get('given ip'):
                    if gip:
                        peer['given ip'] = gip
                    elif peer.has_key('given ip'):
                        del peer['given ip']
                    recheck = True
                natted = peer.get('nat', -1)
                if recheck:
                    if natted == 0:
                        l = self.becache[infohash]
                        y = not peer['left']
                        for x in l:
                            try:
                                del x[y][myid]
                            except KeyError:
                                pass

                        if not self.natcheck or islocal:
                            del peer['nat']
                if natted and natted < self.natcheck:
                    recheck = True
                if recheck:
                    if not self.natcheck or islocal:
                        peer['nat'] = 0
                        self.natcheckOK(infohash, myid, ip1, port, left)
                    else:
                        NatCheck(self.connectback_result, infohash, myid, ip1, port, self.rawserver)
        return rsize

    def peerlist(self, infohash, stopped, tracker, is_seed, return_type, rsize):
        data = {}
        seeds = self.seedcount[infohash]
        data['complete'] = seeds
        data['incomplete'] = len(self.downloads[infohash]) - seeds
        if self.config['tracker_allowed_controls'] and self.allowed[infohash].has_key('warning message'):
            data['warning message'] = self.allowed[infohash]['warning message']
        if tracker:
            data['interval'] = self.config['tracker_multitracker_reannounce_interval']
            if not rsize:
                return data
            cache = self.cached_t.setdefault(infohash, None)
            if not cache or len(cache[1]) < rsize or cache[0] + self.config['tracker_min_time_between_cache_refreshes'] < clock():
                bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]])
                cache = [clock(), bc[0][0].values() + bc[0][1].values()]
                self.cached_t[infohash] = cache
                shuffle(cache[1])
                cache = cache[1]
            data['peers'] = cache[-rsize:]
            del cache[-rsize:]
            return data
        data['interval'] = self.reannounce_interval
        if stopped or not rsize:
            data['peers'] = []
            return data
        bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]])
        len_l = len(bc[0][0])
        len_s = len(bc[0][1])
        if not len_l + len_s:
            data['peers'] = []
            return data
        l_get_size = int(float(rsize) * len_l / (len_l + len_s))
        cache = self.cached.setdefault(infohash, [None, None, None])[return_type]
        if cache and (not cache[1] or is_seed and len(cache[1]) < rsize or len(cache[1]) < l_get_size or cache[0] + self.config['tracker_min_time_between_cache_refreshes'] < self.cachetime):
            cache = None
        if not cache:
            peers = self.downloads[infohash]
            vv = [[], [], []]
            for key, ip, port in self.t2tlist.harvest(infohash):
                if not peers.has_key(key):
                    vv[0].append({'ip': ip,
                     'port': port,
                     'peer id': key})
                    vv[1].append({'ip': ip,
                     'port': port})
                    vv[2].append(compact_peer_info(ip, port))

            cache = [self.cachetime, bc[return_type][0].values() + vv[return_type], bc[return_type][1].values()]
            shuffle(cache[1])
            shuffle(cache[2])
            self.cached[infohash][return_type] = cache
            for rr in xrange(len(self.cached[infohash])):
                if rr != return_type:
                    try:
                        self.cached[infohash][rr][1].extend(vv[rr])
                    except:
                        pass

        if len(cache[1]) < l_get_size:
            peerdata = cache[1]
            if not is_seed:
                peerdata.extend(cache[2])
            cache[1] = []
            cache[2] = []
        else:
            if not is_seed:
                peerdata = cache[2][l_get_size - rsize:]
                del cache[2][l_get_size - rsize:]
                rsize -= len(peerdata)
            else:
                peerdata = []
            if rsize:
                peerdata.extend(cache[1][-rsize:])
                del cache[1][-rsize:]
        if return_type == 2:
            peerdata = ''.join(peerdata)
        data['peers'] = peerdata
        return data

    def get(self, connection, path, headers):
        real_ip = connection.get_ip()
        ip = real_ip
        if is_ipv4(ip):
            ipv4 = True
        else:
            try:
                ip = ipv6_to_ipv4(ip)
                ipv4 = True
            except ValueError:
                ipv4 = False

        if self.config['tracker_logfile']:
            self.getlog(ip, path, headers)
        if self.allowed_IPs and not self.allowed_IPs.includes(ip) or self.banned_IPs and self.banned_IPs.includes(ip):
            return (400,
             'Not Authorized',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             bencode({'failure reason': 'your IP is not allowed on this tracker'}))
        nip = get_forwarded_ip(headers)
        if nip and not self.only_local_override_ip:
            ip = nip
            try:
                ip = to_ipv4(ip)
                ipv4 = True
            except ValueError:
                ipv4 = False

        paramslist = {}

        def params(key, default = None, l = paramslist):
            if l.has_key(key):
                return l[key][0]
            return default

        try:
            scheme, netloc, path, pars, query, fragment = urlparse(path)
            if self.uq_broken == 1:
                path = path.replace('+', ' ')
                query = query.replace('+', ' ')
            path = unquote(path)[1:]
            for s in query.split('&'):
                if s:
                    i = s.find('=')
                    if i == -1:
                        break
                    kw = unquote(s[:i])
                    paramslist.setdefault(kw, [])
                    paramslist[kw] += [unquote(s[i + 1:])]

            if DEBUG:
                _t_start_request = time()
                print >> sys.stderr, 'tracker: Got request /' + path + '?' + query
            if path == '' or path == 'index.html':
                return self.get_infopage()
            if path == 'file':
                if paramslist.has_key('name'):
                    return self.get_file_by_name(params('name'))
                else:
                    return self.get_file(params('info_hash'))
            if path == 'tlookup':
                return self.get_tstream_from_httpseed(unquote(query))
            if path == 'favicon.ico' and self.favicon is not None:
                return (200,
                 'OK',
                 {'Content-Type': 'image/x-icon'},
                 self.favicon)
            if path == 'scrape':
                return self.get_scrape(paramslist)
            if path == 'peers':
                infohash = params('infohash')
                if infohash is None:
                    raise ValueError, 'no infohash'
                return self.get_peers(infohash)
            if path != 'announce':
                return (404,
                 'Not Found',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 alas)
            filtered = self.Filter.check(real_ip, paramslist, headers)
            if filtered:
                return (400,
                 'Not Authorized',
                 {'Content-Type': 'text/plain',
                  'Pragma': 'no-cache'},
                 bencode({'failure reason': filtered}))
            infohash = params('info_hash')
            if not infohash:
                raise ValueError, 'no info hash'
            notallowed = self.check_allowed(infohash, paramslist)
            if notallowed:
                return notallowed
            event = params('event')
            rsize = self.add_data(infohash, event, ip, paramslist)
        except ValueError as e:
            if DEBUG:
                print_exc()
            return (400,
             'Bad Request',
             {'Content-Type': 'text/plain'},
             'you sent me garbage - ' + str(e))

        if self.aggregate_forward and not paramslist.has_key('tracker'):
            self.aggregate_senddata(query)
        if self.is_aggregator:
            return (200,
             'OK',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             bencode({'response': 'OK'}))
        if params('compact') and ipv4:
            return_type = 2
        elif params('no_peer_id'):
            return_type = 1
        else:
            return_type = 0
        data = self.peerlist(infohash, event == 'stopped', params('tracker'), not params('left'), return_type, rsize)
        if paramslist.has_key('scrape'):
            data['scrape'] = self.scrapedata(infohash, False)
        if DEBUG:
            print >> sys.stderr, 'Tracker: request time:', time() - _t_start_request
        return (200,
         'OK',
         {'Content-Type': 'text/plain',
          'Pragma': 'no-cache'},
         bencode(data))

    def natcheckOK(self, infohash, peerid, ip, port, not_seed):
        if DEBUG:
            print >> sys.stderr, 'tracker: natcheck: Recorded succes'
        bc = self.becache.setdefault(infohash, [[{}, {}], [{}, {}], [{}, {}]])
        bc[0][not not_seed][peerid] = Bencached(bencode({'ip': ip,
         'port': port,
         'peer id': peerid}))
        bc[1][not not_seed][peerid] = Bencached(bencode({'ip': ip,
         'port': port}))
        bc[2][not not_seed][peerid] = compact_peer_info(ip, port)

    def natchecklog(self, peerid, ip, port, result):
        year, month, day, hour, minute, second, a, b, c = localtime(time())
        print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "!natcheck-%s:%i" %i 0 - -' % (ip,
         quote(peerid),
         day,
         months[month],
         year,
         hour,
         minute,
         second,
         ip,
         port,
         result)

    def getlog(self, ip, path, headers):
        year, month, day, hour, minute, second, a, b, c = localtime(time())
        print '%s - %s [%02d/%3s/%04d:%02d:%02d:%02d] "GET %s HTTP/1.1" 100 0 - -' % (ip,
         ip,
         day,
         months[month],
         year,
         hour,
         minute,
         second,
         path)

    def connectback_result(self, result, downloadid, peerid, ip, port):
        record = self.downloads.get(downloadid, {}).get(peerid)
        if record is None or record['ip'] != ip and record.get('given ip') != ip or record['port'] != port:
            if self.config['tracker_log_nat_checks']:
                self.natchecklog(peerid, ip, port, 404)
            if DEBUG:
                print >> sys.stderr, 'tracker: natcheck: No record found for tested peer'
            return
        if self.config['tracker_log_nat_checks']:
            if result:
                x = 200
            else:
                x = 503
            self.natchecklog(peerid, ip, port, x)
        if not record.has_key('nat'):
            record['nat'] = int(not result)
            if result:
                self.natcheckOK(downloadid, peerid, ip, port, record['left'])
        elif result and record['nat']:
            record['nat'] = 0
            self.natcheckOK(downloadid, peerid, ip, port, record['left'])
        elif not result:
            record['nat'] += 1
            if DEBUG:
                print >> sys.stderr, 'tracker: natcheck: Recorded failed attempt'

    def remove_from_state(self, *l):
        for s in l:
            try:
                del self.state[s]
            except:
                pass

    def save_state(self):
        self.rawserver.add_task(self.save_state, self.save_dfile_interval)
        h = None
        try:
            h = open(self.dfile, 'wb')
            if self.config['tracker_dfile_format'] == ITRACKDBFORMAT_BENCODE:
                h.write(bencode(self.state))
            else:
                pickle.dump(self.state, h, -1)
            h.close()
        except:
            if DEBUG:
                print_exc()
        finally:
            if h is not None:
                h.close()

    def parse_allowed(self, source = None):
        if DEBUG:
            print >> sys.stderr, 'tracker: parse_allowed: Source is', source, 'alloweddir', self.config['tracker_allowed_dir']
        if source is None:
            self.rawserver.add_task(self.parse_allowed, self.parse_dir_interval)
        if self.config['tracker_allowed_dir']:
            r = parsedir(self.config['tracker_allowed_dir'], self.allowed, self.allowed_dir_files, self.allowed_dir_blocked, ['.torrent', TRIBLER_TORRENT_EXT])
            self.allowed, self.allowed_dir_files, self.allowed_dir_blocked, added, garbage2 = r
            if DEBUG:
                print >> sys.stderr, 'tracker: parse_allowed: Found new', `added`
            self.state['allowed'] = self.allowed
            self.state['allowed_dir_files'] = self.allowed_dir_files
            self.t2tlist.parse(self.allowed)
        else:
            f = self.config['tracker_allowed_list']
            if self.allowed_list_mtime == os.path.getmtime(f):
                return
            try:
                r = parsetorrentlist(f, self.allowed)
                self.allowed, added, garbage2 = r
                self.state['allowed_list'] = self.allowed
            except (IOError, OSError):
                print '**warning** unable to read allowed torrent list'
                return

            self.allowed_list_mtime = os.path.getmtime(f)
        for infohash in added.keys():
            self.downloads.setdefault(infohash, {})
            self.completed.setdefault(infohash, 0)
            self.seedcount.setdefault(infohash, 0)

    def read_ip_lists(self):
        self.rawserver.add_task(self.read_ip_lists, self.parse_dir_interval)
        f = self.config['tracker_allowed_ips']
        if f and self.allowed_ip_mtime != os.path.getmtime(f):
            self.allowed_IPs = IP_List()
            try:
                self.allowed_IPs.read_fieldlist(f)
                self.allowed_ip_mtime = os.path.getmtime(f)
            except (IOError, OSError):
                print '**warning** unable to read allowed_IP list'

        f = self.config['tracker_banned_ips']
        if f and self.banned_ip_mtime != os.path.getmtime(f):
            self.banned_IPs = IP_Range_List()
            try:
                self.banned_IPs.read_rangelist(f)
                self.banned_ip_mtime = os.path.getmtime(f)
            except (IOError, OSError):
                print '**warning** unable to read banned_IP list'

    def delete_peer(self, infohash, peerid):
        try:
            dls = self.downloads[infohash]
            peer = dls[peerid]
            if not peer['left']:
                self.seedcount[infohash] -= 1
            if not peer.get('nat', -1):
                l = self.becache[infohash]
                y = not peer['left']
                for x in l:
                    del x[y][peerid]

            del self.times[infohash][peerid]
            del dls[peerid]
        except:
            if DEBUG:
                print_exc()

    def expire_downloaders(self):
        for x in self.times.keys():
            for myid, t in self.times[x].items():
                if t < self.prevtime:
                    self.delete_peer(x, myid)

        self.prevtime = clock()
        if self.keep_dead != 1:
            for key, value in self.downloads.items():
                if len(value) == 0 and (self.allowed is None or not self.allowed.has_key(key)):
                    del self.times[key]
                    del self.downloads[key]
                    del self.seedcount[key]

        self.rawserver.add_task(self.expire_downloaders, self.timeout_downloaders_interval)
Пример #30
0
			# Open file for appending a new line of data
			writer   = csv.writer(f, delimiter='\t', lineterminator='\n')  # Writer will separate data with tabs
			writer.writerow(self.varNames)									# Write header to first row (list of variable names)

			for cycle in self.data:
				writer.writerow(cycle)		 # Write a row of data to the file

			


#####################################
#      Main during unit testing     #
#####################################
if __name__ == "__main__":
	testLogger = Logger()
	LP = Filter(order=10, cutoff=0.01, channelNum=1)

	x = np.arange(0, 7, 0.01)
	# print(x)
	lastTime = time.perf_counter()
	
	for i in range(len(x)):
		curtime = time.perf_counter()
		originalSignal = sin(x[i])
		inFilter = sin(x[i])+(np.random.rand(1)-0.5)
		outFilter = LP.runFIR(inFilter)
		testLogger.getData([('originalSignal', originalSignal), ('posx', inFilter[0]), ('posxFiltered', outFilter[0]), ('dt', curtime-lastTime)])
		testLogger.saveData()
		lastTime = curtime
		
	testLogger.saveDataToFile()
Пример #31
0
class SuperHash(UserDict):
    """Interface and parent class for all hash/dict based objects. """

    filter = Filter()
    sample = "none"

    def __init__(self, log, filter_filename="__none__"):

        # Call parent init
        UserDict.__init__(self)

        if log[0] != "__none__" and filter_filename != "__none__":
            # Setup log and filter
            self.filter = Filter(filter_filename)
            self.fill(log)

        elif log[0] != "__none__":
            # Setup log without filter
            self.fill(log)
            
        else:
            # Create empty filter
            pass

    def fill(self, log):
        """Interface method which is flled in by subclasses"""
        pass

    def increment(self, key, entry):
        """Adds a new entry to superhash data structures.
        Similar to append for a list"""

        # Check to make sure it exists
        if key not in self:
            self[key] = [0, []]

        # Increment the hashed count
        # Create an array of un-hashed values for sampling later
        self[key][0] += 1
        self[key][1].append(entry)

    def display(self):
        """Displays all entries held in the SuperHash structure"""

        global logging

        # Set static sample threshold
        sample_threshold = 3

        # Debugging
        logging.info("Sample Type: "+self.sample)

        # Print out the dictionary first sorted by the word with
        # the most entries with an alphabetical subsort
        for key in sorted(sorted(self.keys()), \
            cmp=lambda x,y: cmp(self[y][0], self[x][0])):

            # Print all lines as sample
            if self.sample == "all":
                print str(self[key][0]) + ":	" + \
                choice(self[key][1]).log_entry

            elif self.sample == "none":
                print str(self[key][0]) + ":	"+str(key)

            elif self.sample == "threshold":
                # Print sample for small values below/equal to threshold
                if self[key][0] <= sample_threshold:
                    print str(self[key][0]) + ":	" + \
                    self[key][1][0].log_entry
                else:
                    print str(self[key][0]) + ":	" + str(key)
            else:
                print "That type of sampling is not supported:", self.sample
                sys.exit(16)

    def fingerprint(self):
        """
        Remove all fingerprints from a given LogHash and replace with a 
        single string"
        """

        global logging

        # Declarations & Variables
        threshold_coefficient = 0.31
        fingerprints = []
        fingerprint_files = ["__none__"]

        # Load & assign fingerprint files
        prefixes =  [ \
            "/var/lib/petit/fingerprints/", \
            "/usr/local/petit/var/lib/fingerprints/" \
            "/opt/petit/var/lib/fingerprints/"]

        for prefix in prefixes:
            if os.path.exists(prefix) and len(os.listdir(prefix)) >= 1:

                # Process in order from largest to smallest which prevents 
                # double labeling with similar fingerprints
                fingerprint_files = os.listdir(prefix)
		fingerprint_files = [os.path.join(prefix, f) for f in fingerprint_files]
		fingerprint_files.sort(key=lambda x: os.path.getsize(x))
		fingerprint_files.reverse()
                break

        if fingerprint_files[0] == "__none__":
            print "Could not locate fingerprint files: ", prefix
            sys.exit()

        for fingerprint_file in fingerprint_files:
            if re.search("fp",fingerprint_file):

                # Build a Log for the fingerprint
                log = CrunchLog(fingerprint_file)

                # Build a SuperHash
                x = SuperHash.manufacture(log, "hash.stopwords")

		# Remove the prefix & set name
                x.file_name = re.sub(prefix, "", fingerprint_file)
                fingerprints.append(x)


        # Iterate each fingerprint
        for fingerprint in fingerprints:

            logging.info("Testing Fingerprint:"+fingerprint.file_name)

            # Reset counter for each fingerprint
            count = 0
            threshold = (len(fingerprint) * threshold_coefficient)
            logging.info("Threshold:"+str(threshold))

            # Look for fingerpring
            for key in fingerprint.keys():
                if key in self:
                    count = count+1

                # If Threshold is reached, remove everyline of fingerprint
                # Saves time on searching every entry
                if count > threshold:
                    logging.info("Found Fingerprint:"+fingerprint.file_name)
                    for key in fingerprint.keys():

                        # Key found, plenty to remove
                        if key in self:
                            del self[key]

                    # Force the sample entry to be the same as the key
                    # and based off of the filename of the fingerprint
                    fingerprint[key][1][0].log_entry = fingerprint.file_name
                    self.increment(fingerprint.file_name, fingerprint[key][1][0]) 
                    break

            logging.info("Count: "+str(count))

    def manufacture(log, filter):
        """Factory method which creates new SuperHash of correct subtype"""

        # Select the correct build method
        if log.contains(SyslogEntry):
            LogHash = SyslogHash
        elif log.contains(RSyslogEntry):
            LogHash = SyslogHash
        elif log.contains(ApacheAccessEntry):
            LogHash = ApacheLogHash
        elif log.contains(ApacheErrorEntry):
            LogHash = ApacheLogHash
        elif log.contains(SnortEntry):
            LogHash = SnortLogHash
        elif log.contains(RawEntry):
            LogHash = RawLogHash
        elif log.contains(SecureLogEntry):
            LogHash = SecureLogHash
        else:
            print "Could not determine what type of objects are contained in generic Log"""
            sys.exit(15)

        # Build and return the correct subclass instance based on log file type
        return LogHash(log, filter)

    manufacture = staticmethod(manufacture)
Пример #32
0
def filtrar():

    #Obtengo el archivo que se cargó
    #Los archivos tienen que tener nombre en la columna obligatoriamente para que me funcione
    f = request.files['archivo1']
    fstring = f.read().decode("utf-8-sig").encode("utf-8")
    fieldnames = [
        'valor'
    ]  #Le voy a poner un valor que yo conozca a la columna porque no se definió el nombre para el archivo de entrada
    diccionario_csv = [{
        k: v
        for k, v in row.items()
    } for row in csv.DictReader(
        fstring.splitlines(), fieldnames=fieldnames, skipinitialspace=True)]

    #Elimino el primer elemento porque agrega el nombre de columna que yo le puse (fieldnames)
    diccionario_csv.pop(0)

    #Creo el arreglo de la señal de entrada para el filtro
    arreglo_senal_entrada = []
    for x in diccionario_csv:
        ##print(x['valor'])
        #print(x)
        arreglo_senal_entrada.append(float(x['valor']))

    #Creo la señal
    senal = Signal()
    senal.y = arreglo_senal_entrada

    #Obtengo los coeficientes del filtro que voy a usar para filtrar la señal
    arreglo_coeficientes = []
    arreglo_coeficientes.append(float(request.form['b0']))
    arreglo_coeficientes.append(float(request.form['b1']))
    arreglo_coeficientes.append(float(request.form['b2']))
    arreglo_coeficientes.append(float(request.form['b3']))
    arreglo_coeficientes.append(float(request.form['b4']))
    arreglo_coeficientes.append(float(request.form['b5']))
    arreglo_coeficientes.append(float(request.form['b6']))
    arreglo_coeficientes.append(float(request.form['b7']))
    arreglo_coeficientes.append(float(request.form['b8']))
    arreglo_coeficientes.append(float(request.form['b9']))
    arreglo_coeficientes.append(float(request.form['b10']))
    arreglo_coeficientes.append(float(request.form['b11']))
    arreglo_coeficientes.append(float(request.form['b12']))
    arreglo_coeficientes.append(float(request.form['b13']))
    arreglo_coeficientes.append(float(request.form['b14']))
    arreglo_coeficientes.append(float(request.form['b15']))
    arreglo_coeficientes.append(float(request.form['b16']))
    arreglo_coeficientes.append(float(request.form['b17']))
    print("***** COEFICIENTES *****")
    print(arreglo_coeficientes)

    #Creo el filtro con los coeficientes del filtro
    #arreglo_coeficientes = [0.5999402, -0.5999402, 0, 1, -0.7265425, 0, 1, -2, 1, 1, -1.52169043, 0.6, 1, -2, 1, 1, -1.73631017, 0.82566455]
    #print(arreglo_coeficientes)

    filtro = Filter(arreglo_coeficientes)

    #Se filtra la señal de entrada
    salida = filtro.filter(senal)

    #Genero las gráficas
    fig, (ax1, ax2) = chart.subplots(2, 1, sharex=True)
    #ax1.plot(input.t, input.y)
    #ax1.plot([], senal.y)
    ax1.plot(senal.t, senal.y)
    ax1.set_title('Entrada del filtro')
    ax1.axis([0, 1, -10, 10])

    ax2.plot(salida.t, salida.y)
    ax2.set_title('Salida del filtro')
    ax2.axis([0, 1, -10, 10])
    ax2.set_xlabel('Tiempo [segundos]')

    chart.tight_layout()
    #chart.show()
    chart.savefig("salida.png", transparent=True)
    #plt.savefig("Ejemplo1.jpg")

    #Devuelvo una respuesta cualquiera
    return jsonify(
        estado=200,
        mensaje='La operación de filtrar se realizó con éxito',
    )
Пример #33
0
class Anchor:
    """Anchor class: represents an anchor"""
    def __init__(self, x, y, z, name="unnamed", color='red'):
        self.x = x
        self.y = y
        self.z = z
        self.color = color
        self.name = name
        self.shown = False
        self.isActive = False
        self.distance = {}
        self.raw_distance = {}
        self.rangings = {}
        self.correction_filter = Filter('COR')
        # initialization of the ranging lists for the two robots
        self.init_rangings()
        
        

        
        # genereates the visual representation of the anchor in the 3D engine
        self.create_sphere()

        # displays the name of the anchor in the 3D engine
        self.create_text()
        
        
    def init_rangings(self):
        """generates an entry for each robot id in the rangings dictionary"""
        for id_bot in bots_id:
            # adding an entry in the rangings dictionary for each robot id
            self.rangings[id_bot] = []
            self.raw_distance[id_bot] = 0
            
    def set_raw_distance(self,distance,robotname):
        """ sets the unfiltered distance between the anchor and the given robot"""
        self.raw_distance[robotname] = distance
        #print("distance set " + str(distance))

    def get_raw_distance(self,robotname):
        """ gets the unfiltered distance between the anchor and the given robot"""
        return(self.raw_distance[robotname])

    def update_rangings(self, distance,target):
        """updates the list of the last NB_RANGINGS rangings"""
        global correction_coeff
        global correction_offset
        
        

        # if this is the first ranging
        # writing NB_RANGINGS times the first distance to fill up the list
        
            
        corrected_distance = self.correction_filter.apply( [ distance, correction_coeff[self.name],correction_offset[self.name] ] )[0]
        
        if (self.rangings[target] == [] ):
            for i in range(1, NB_RANGINGS):
                
                self.rangings[target].append(corrected_distance)
        

       
        else:
            
            self.rangings[target].append(corrected_distance)
            # removes first element to maintain list size
            self.rangings[target].pop(0)
        

    def create_sphere(self):
        """Create the anchors representation for the 3D world"""
        self.model = loader.load_model("smiley")
        self.model.set_color(utils.colors[self.color])
        self.model.set_scale(0.1)
        self.model.set_pos(self.x, self.y, self.z)

    def create_text(self):
        """Create display text with anchor name"""
        self.label = TextNode('anchor label {}'.format(self.name))
        self.label.set_text(self.name)
        if (bots_id[0] in self.distance and bots_id[1] in self.distance):
            self.label.set_text(self.name + ": " + str(self.get_distance(bots_id[0])) + " / " + str(self.get_distance(bots_id[1])))
            
        self.label.set_card_decal(True)
        self.label.set_text_color(utils.colors[self.color])
        self.label_np = render.attach_new_node(self.label)
        self.label_np.set_pos(self.x - 0.1, self.y + 0.1, self.z)
        self.label_np.set_scale(0.2)
        self.label_np.look_at(-base.cam.get_x(), -base.cam.get_y(), -base.cam.get_z())
        taskMgr.add(self.update_text_task, 'update text {}'.format(self.name))

    def show(self):
        """Displays anchor"""
        if not self.shown:
            self.model.reparent_to(render)
            self.shown = True

    def hide(self):
        """Hides anchor"""
        if self.shown:
            self.model.detach_node()
            self.shown = False

    def get_distance(self, robot_id):
        """ gets the filtered distance between the anchor and the given robot"""
        if (robot_id in self.distance):
            
            return self.distance[robot_id]
        else:
            # unknown robot id 
            return(0)

    def set_distance(self, distance, robot_id):
        """ sets the filtered distance between the anchor and the given robot"""
        if robot_id in self.distance:
            self.distance[robot_id] = distance
        else:
            self.distance[robot_id] = distance
            print("new robot id added")
    
    
    def split_dist(self, distance):
        result = ""
        x = 0
        for i in distance:
            if x <= 4:
                result += i
                x += 1
            else:
                break
        return result

    def update_text_task(self, task):
        """Updates the text angle for it to always face the camera"""
        self.label_np.look_at(-base.cam.get_x(), -base.cam.get_y(), -base.cam.get_z())
        if (bots_id[0] in self.distance):
            self.label.set_text(self.name + ": " + self.split_dist( str( self.get_distance(bots_id[0]) ) ) )
        return task.cont
Пример #34
0
t_end = mono_traj.t[-1]

traj = VisualTraj("kf")
for i, t in enumerate(mono_traj.t):
    current_vis = mono_traj.at_index(i)

    # initialisation
    if i == 0:
        x0 = States(p0, v0, q0, bw0, ba0, scale0, p_BC_0, q_BC_0)
        cov0 = np.square(np.diag(stdevs0))

        num_states = x0.size
        num_meas = 7
        num_control = len(u0)

        kf = Filter(num_states, num_meas, num_control)
        kf.dt = 0.
        kf.set_states(x0)
        kf.set_covariance(cov0)

        current_imu = imu_traj.at_index(i)
        kf.om_old = current_imu.om
        kf.acc_old = current_imu.acc

        old_t = t
        old_ti = t

        continue

    # propagate
    imu_queue = imu_traj.get_queue(old_t, t)
Пример #35
0
 def getLabelList(self, torrentList):
     labels = FilterList()
     for torrent in torrentList:
         labels.append(Filter(torrent.label, 1))
     return labels
Пример #36
0
class RobotThread(threading.Thread):
    def __init__(self,
                 group=None,
                 target=None,
                 name=None,
                 args=(),
                 kwargs=None,
                 verbose=None):
        super(RobotThread, self).__init__(group=group,
                                          target=target,
                                          name=name)
        self.args = args
        self.kwargs = kwargs

        self._continueFlag = True  # flag to continue running the thread
        self._resetFlag = False  # flag to reset everything
        self._runControl = False  # process control

        # Initialize the pigpio library
        self._pi = pigpio.pi()

        # Initialize all hardware
        self.InitMPU6050()
        self.InitMotors()
        self.InitControl()

        # Store current time
        self._currTimePrev = time.time()

        # Sleep time for control loop
        self._sleepTime = 0.1

        return

    @property
    def CtrlErr(self):
        return self._ctrlErr

    @property
    def CtrlErrRate(self):
        return self._ctrlErrRate

    @property
    def CtrlOutput(self):
        return self._ctrlOutput

    @property
    def Kp(self):
        return self._P

    @Kp.setter
    def Kp(self, val):
        self._P = val
        self._pid.setKp(val)

    @property
    def Ki(self):
        return self._I

    @Ki.setter
    def Ki(self, val):
        self._I = val
        self._pid.setKi(val)

    @property
    def Kd(self):
        return self._D

    @Kd.setter
    def Kd(self, val):
        self._D = val
        self._pid.setKd(val)

    def InitMPU6050(self):

        # Accelerometer Values and Device
        self._mpuIdx = ['x', 'y', 'z']
        self._a = [0.0, 0.0, 0.0]  # Acceleration (g's)
        self._w = [0.0, 0.0, 0.0]  # Angular rate (deg/s)
        self._wFilt = [0.0, 0.0, 0.0]  # Filtered Angular rate (deg/s)
        self._theta = [0.0, 0.0, 0.0]  # Angle (deg)
        self._thetaFilt = [0.0, 0.0, 0.0]  # Filtered Angle (deg)
        self._a_off = [0.66, -0.26, 10.22 - 9.81]
        self._w_off = [-1.07, 0.67, -1.55]
        #self._a_off = [0.0, 0.0, 0.0]
        #self._w_off = [0.0, 0.0, 0.0]
        self._thetaXFilt = ComplementaryFilter(alpha=0.05)  # alpha = 0.02
        self._thetaXFilt2 = Filter(timeConst=0.0)
        self._wXFilt = Filter(timeConst=0.0)
        self._mpu6050 = mpu6050(0x68)  # 0x68 - default I2C slave addr
        self._mpu6050.set_accel_range(mpu6050.ACCEL_RANGE_2G)
        self._mpu6050.set_gyro_range(mpu6050.GYRO_RANGE_250DEG)

        return

    def InitMotors(self):

        # Motor Controllers
        # Pins - PWM, FWD, REV
        #self._motorLookupV = [0, 12]
        #self._motorLookupPWM = [0, 100]
        self._motorLookupV = [0.0, 1.0, 1.1, 10.4, 11.0, 11.5, 11.8, 12.0]
        self._motorLookupPWM = [0.0, 1.0, 2.0, 80.0, 85.0, 90.0, 95.0, 100.0]

        self._motorLookup = LookupTable(\
            self._motorLookupV, self._motorLookupPWM)

        self._motorLeft = MotorControl_pigpio(\
            self._pi, 13, 19, 26)
        self._motorRight = MotorControl_pigpio(\
            self._pi, 16, 20, 21)

        self._motorLeft.SetVoltageLookupTable(\
            self._motorLookup)
        self._motorRight.SetVoltageLookupTable(\
            self._motorLookup)

        # Motor Encoders
        # TODO

        return

    def InitControl(self):

        self._P = 0.8
        self._I = 14.0
        self._D = 0.12
        self._maxIntegralOutput = 9.0
        self._pid = PID(self._P, self._I, self._D)
        self._pid.setWindup(100.0)
        self._ctrlOutputMin = 1.0  # Volts, min motor output
        self._ctrlOutputMax = 12.0  # Volts, max motor output

        return

    def ProcessMPU6050(self):

        # Get the raw data
        self._mpu6050AccelData = self._mpu6050.get_accel_data()
        self._mpu6050GyroData = self._mpu6050.get_gyro_data()
        #self._mpu6050AccelData = {'x': 0.0, 'y': 0.0, 'z': 9.81}
        #self._mpu6050GyroData = {'x': 0.0, 'y': 0.0, 'z': 0.0}

        # Subtract out calibration offsets
        for i in range(0, 3):  # (0, 1, 2)
            self._a[i] = self._mpu6050AccelData[self._mpuIdx[i]] - \
                self._a_off[i]
            self._w[i] = self._mpu6050GyroData[self._mpuIdx[i]] - \
                self._w_off[i]

        # Calculate angle from accelerometer data
        self._theta[0] = math.degrees( \
            math.atan2(self._a[1], self._a[2])) # atan2(ay, az)

        # Complementary filter on accel and gyro data
        thetaFilt = self._thetaXFilt.Filter(self._dT, self._w[0],
                                            self._theta[0])

        # Filter the resulting angle
        self._thetaFilt[0] = self._thetaXFilt2.Filter(self._dT, thetaFilt)

        # Filter the angular velocity for controller deriviative term
        self._wFilt[0] = self._wXFilt.Filter(self._dT, self._w[0])

        # If the robot has tipped over, stop trying to control
        if math.fabs(self._thetaFilt[0]) > 30.0:
            self._runControl = False

        return

    def ProcessControl(self):
        # Calculate the control error and rate
        self._ctrlErr = self._thetaFilt[0]
        self._ctrlErrRate = -self._wFilt[0]

        # Adjust the max integral windup
        if self._pid.Ki > 0.0:
            self._pid.setWindup(self._maxIntegralOutput / self._pid.Ki)

        # Run the PID controller
        self._ctrlOutput = self._pid.update(self._ctrlErr, self._ctrlErrRate)

        # Control saturation
        if self._ctrlOutput > self._ctrlOutputMax:
            self._ctrlOutput = self._ctrlOutputMax
        if self._ctrlOutput < -self._ctrlOutputMax:
            self._ctrlOutput = -self._ctrlOutputMax

        # Clear integrator if not running
        if not self._runControl:
            self._pid.ITerm = 0.0

        return

    def ProcessMotors(self):
        # If not running control, set both motors to brake
        if not self._runControl:
            self._motorLeft.Brake()
            self._motorRight.Brake()
            return

        # Set directions for left and right motors
        self._ctrlLeft = self._ctrlOutput
        self._ctrlRight = -self._ctrlOutput

        # Write the motor control signal
        self._motorLeft.SetVoltage(self._ctrlLeft)
        self._motorRight.SetVoltage(self._ctrlRight)

        # Process feedback from encoders
        # TODO

        return

    def run(self):
        while self._continueFlag:

            #print self._runControl

            # Calculate time delta
            self._currTime = time.time()
            self._dT = self._currTime - self._currTimePrev
            self._currTimePrev = self._currTime

            # Read accelerometer and gyro data and process
            self.ProcessMPU6050()

            # Run PID Controller
            self.ProcessControl()

            # Run motor output
            self.ProcessMotors()

            #logging.debug('running with %s and %s', self.args, self.kwargs)
            #time.sleep(self._sleepTime)

        # Stop condition
        self._pi.stop()

        return

    def StartControl(self):
        self._runControl = True

    def StopControl(self):
        self._runControl = False

    def Stop(self):
        self._continueFlag = False

    def Reset(self):
        self._resetFlag = False

    def ProcessCommands(self, cmd, conn):
        #print cmd
        next = 0
        if cmd[0] == "CMD":
            if cmd[1] == "START":
                t.StartControl()
                next = 2
            if cmd[1] == "STOP":
                t.StopControl()
                next = 2

        if cmd[0] == "GET":
            if cmd[1] == "GAINS":
                resp = "RESP GAINS {0} {1} {2} ".format(
                    self.Kp, self.Ki, self.Kd)
                conn.send(resp)
                next = 2
            if cmd[1] == "PARAMS":
                resp = "RESP PARAMS {:.4f} {:.4f} {:.4f} {:.4f} {:.4f} {:.4f} ".format(\
                    self.CtrlErr, self.CtrlErrRate, self.CtrlOutput, \
                    self._pid.PTerm, self._pid.ITerm * self._pid.Ki, self._pid.DTerm * self._pid.Kd)
                conn.send(resp)
                next = 2

        if cmd[0] == "SET":
            if cmd[1] == "GAINS":
                t.Kp = float(cmd[2])
                t.Ki = float(cmd[3])
                t.Kd = float(cmd[4])
                resp = "RESP GAINS {0} {1} {2} ".format(
                    self.Kp, self.Ki, self.Kd)
                conn.send(resp)
                next = 5

        if (next < len(cmd)) and (next > 0):
            self.ProcessCommands(cmd[next:], conn)
Пример #37
0
						output = filt.run(filt.image)

					capture.truncate(0)
					capture.seek(0)
					e2 = cv2.getTickCount()
					time += (e2 - e1) / cv2.getTickFrequency()
					numFrames += 1
					if controlFrames != 0 and numFrames >= controlFrames:
						break
			elif cType == 3:
				camera.capture_sequence(outputs(), 'jpeg', use_video_port=True)
		except KeyboardInterrupt:
			breaking = True
		tot2 = cv2.getTickCount()
		totalTime = (tot2-tot1) / cv2.getTickFrequency()
		# print "Test %i with settings %i: Captured %i frames at total fps of %.2f, with internal fps of %.2f. Saw a contour in %i frames." % (i, cType, numFrames, numFrames/totalTime, numFrames/time, goodFrames)
		out += "Test %i with settings %i:\tCaptured %i frames at total fps of %.2f, with internal fps of %.2f. Saw a contour in %i frames.\n" % (i, cType, numFrames, numFrames/totalTime, numFrames/time, filt.contourCount)
	if breaking:
		print "BREAKING OUT"
		break
try:
	f = open(outputDir, "r")
except:
	f = open(outputDir, "w")
	f.write(str(outputDir)+": A place to store data from various Baseline tests.")
f.close()
print out
f = open(outputDir, "a")
f.write(out)
f.close()
Пример #38
0
from ArucoCamera import ArucoCamera
from KinectCamera import KinectCamera
from pykinect2 import PyKinectV2
from pykinect2.PyKinectV2 import *
from pykinect2 import PyKinectRuntime
from pykinect2.PyKinectV2 import _CameraSpacePoint
from Filter import Filter
import time
import math

a = 441
b = 45
x0 = 30

kinect = KinectCamera()
KinXFilter = Filter(5)
KinYFilter = Filter(5)
KinZFilter = Filter(5)
KinPitchFilter = Filter(5)
KinYawFilter = Filter(5)
KinRollFilter = Filter(5)

Cam1 = ArucoCamera(1)

CamXFilter = Filter(5)
CamYFilter = Filter(5)
CamZFilter = Filter(5)
CamPitchFilter = Filter(5)
CamYawFilter = Filter(5)
CamRollFilter = Filter(5)
Пример #39
0
    def update_positions_task(self, task):

        filter_type = None
        """Updates moving entities positions"""
        # increments positions updates counter
        # is triggered by update_sock_task when new data is sent into the socket

        robotname = self.processed_robot
        if robotname in self.robots:
            robot = self.robots[robotname]
        else:
            # quits the task if invalid robot id is received
            print("invalid robot id has been received")
            return (0)

        # computes multilateration to localize the robot
        self.world.get_target(self.robots[robotname])

        if (PLAYBACK):
            # reading  data for playback
            for i in range(len(self.anchors)):
                if (self.playback_counter >= len(self.playback_rangings)):
                    # quitting
                    print("playback file finished !")
                    d_print(self.playback_counter)
                    self.done = True
                    return (task.done)
                data = self.playback_rangings[self.playback_counter]
                self.world.update_anchor(id_to_name(data['anchorID']),
                                         data['distance'], data['botID'],
                                         'SAT')
                self.playback_counter += 1

        pos = self.world.localize(robotname)

        # applying saturation filter
        robot.set_pos(pos)
        robot.compute_speed()
        robot.compute_acc()

        if (filter_type == 'SAT'):

            pre_pos = robot.get_pre_pos()

            if not ('SAT' in self.filters):
                self.filters['SAT'] = Filter('SAT')

            filtered_pos = self.filters['SAT'].apply([
                pre_pos, pos,
                robot.get_speed_vector(),
                robot.get_abs_acc(), STEP
            ])[0]
            # replaces the raw values with the filtered results
            robot.set_pos(filtered_pos, replace_last=True)
            robot.compute_speed(replace_mode=True)
            robot.compute_acc(replace_mode=True)

        robot.display_pos()

        # writing position in logs

        pos = robot.get_pos()
        (x, y, z) = pos
        self.log_pos(position(robotname, x, y, z))

        # computes the robot speed based on position variation
        robot.compute_speed()

        # computes the robot acceleration based on speed variation
        robot.compute_acc()

        if not robot.shown:
            robot.show()

        return (task.done)
Пример #40
0
class RequestHandler():
    #######################################################
    # Constructor, save the socket (data handler) that created this request handler
    #######################################################
    def __init__(self, data_handler):
        self.data_handler = data_handler
        self.io = FileIO(data_handler)
        self.filter = Filter(self.io, self.data_handler)

    #######################################################
    # Receive a message from the web application through the socket
    # Handle the request by referring the data to a helper method
    #
    # This could be considered the server-socket side API
    #######################################################
    def receive(self, message):
        try:
            request = json.loads(message)
            if 'type' not in request:
                self.data_handler.send({'error': 'invalid call'})

            if request['type'] == 'ping':
                self.ping_request(request)

            if request['type'] == 'nearest':
                self.nearest_request(request)

            if request['type'] == 'all_restaurants':
                self.all_restaurants_request(request)

            if request['type'] == 'update':
                self.update_dataset_request(request)

            if request['type'] == 'equal':
                self.equal_request(request)

            if request['type'] == 'categories':
                self.categories_request(request)

            if request['type'] == 'reviews':
                self.yelpreviews_request(request)

        except ValueError:
            print "Error, {} is not a json object".format(message)
            self.data_handler.send(
                {'error': "{} is not a json object".format(message)})
        except KeyError as e:
            self.data_handler.send({
                'error':
                "Missing parameter {0} in: {1}".format(e.args[0], message)
            })

    #######################################################
    # Helper methods for handling specific requests       #
    #######################################################

    def ping_request(self, request):
        self.data_handler.send({'ping': 'pong'})

    def nearest_request(self, request):
        lat = float(request['latitude'])
        lon = float(request['longitude'])
        price = request['price']
        limit = request['limit']
        categories = None
        if 'categories' in request:
            categories = request['categories']
        self.data_handler.send(
            self.filter.get_nearest_restaurant(lat, lon, price, categories,
                                               limit))

    def all_restaurants_request(self, request):
        self.data_handler.send(
            {'all_restaurants': self.filter.get_all_restaurants()})

    def update_dataset_request(self, request):
        self.data_handler.send({'updating': True})
        self.io.update_dataset()

    def equal_request(self, request):
        current_restaurant = request['restaurant']
        limit = request['limit']
        self.data_handler.send(
            self.filter.get_equal_restaurants(current_restaurant, limit))

    def categories_request(self, request):
        self.data_handler.send({'categories': self.filter.get_categories()})

    def yelpreviews_request(self, request):
        yelp_id = request['yelp_id']
        self.data_handler.send(
            {'reviews': self.filter.get_yelpreviews(yelp_id)})