Esempio n. 1
0
    def FetchTableData(self, args):
        """Fetch a table's worth of data."""

        global sensor_names

        db = MySQLdb.connect(user=FLAGS.dbuser, db=FLAGS.dbname)
        cursor = db.cursor(MySQLdb.cursors.DictCursor)
        (ranges, times, step_size) = self.ParseTimeField(args[2])
        if not step_size:
            step_size = 60

        self.log('Time ranges: %s' % repr(ranges))

        wideinterp = True
        for param in args[1].split(','):
            self.log('Considering table param: %s' % param)
            if param == 'nowideinterpolation':
                wideinterp = False
        self.log('Data fetch parameters: wideinterpolation = %s' % wideinterp)

        sensors = mhttp.urldecode(args[3]).split(',')
        data = ['<h1>%s</h1><ul>' % ', '.join(sensors)]

        values = {}
        returned = []
        max_window_size = ranges[0][1] - ranges[0][0]

        # Fetch data points for the table
        for sensor in sensors:
            for r in ranges:
                self.log(
                    'Fetching values for %s between %s and %s (%s seconds)' %
                    (sensor, r[0], r[1], r[1] - r[0]))
                max_window_size = r[1] - r[0]
                (t_values, t_redirects) = self.ResolveSensor({}, {}, cursor,
                                                             sensor, r[0],
                                                             r[1], step_size,
                                                             wideinterp)
                # self.log('Resolved values: %s' % t_values.keys())

                if len(ranges) == 1:
                    for unique_sensor in t_values.keys():
                        values[unique_sensor] = t_values.get(unique_sensor, [])
                        returned.append(unique_sensor)
                        self.log('Creating simple value for %s' %
                                 unique_sensor)

                else:
                    t = times[ranges.index(r)]
                    k = '%s %s' % (sensor, t)
                    values[k] = t_values.get(sensor, [])
                    returned.append(k)
                    self.log('Creating meta value %s from %s at %s' %
                             (k, sensor, t))

        return (int(ranges[0][0]), int(ranges[0][0] + max_window_size),
                step_size, returned, values)
Esempio n. 2
0
    def handleurl_deletetag(self, urlfile):
        """Delete this tag."""

        (_, _, tag_encoded, tracks) = urlfile.split('/')
        tag = mhttp.urldecode(tag_encoded.split('=')[1])
        tracks = tracks.split(',')

        self.log('Deleting tag %s from %s' % (tag, repr(tracks)))
        db.ExecuteSql('delete from tags where tag="%s" and '
                      'track_id in (%s);' % (tag, ','.join(tracks)))
        db.ExecuteSql('commit;')
        self.sendfile('done.html')
Esempio n. 3
0
    def handleurl_addtag(self, urlfile):
        """Add this tag."""

        (_, _, tag_encoded, tracks) = urlfile.split('/')
        tag = mhttp.urldecode(tag_encoded.split('=')[1])
        tracks = tracks.split(',')

        for track in tracks:
            db.ExecuteSql('insert ignore into tags(tag, track_id) '
                          'values("%s", %s);' % (tag, track))
            db.ExecuteSql('commit;')
        self.sendfile('done.html')
Esempio n. 4
0
    def handleurl_flash(self, urlpath, post_data):
        """Pretty graphs done with flash wrapped in HTML."""

        global sensor_names

        db = MySQLdb.connect(user=FLAGS.dbuser, db=FLAGS.dbname)
        cursor = db.cursor(MySQLdb.cursors.DictCursor)

        args = urlpath.split('/')
        (ranges, times, step_size) = self.ParseTimeField(args[2])
        links = [
            self.time_link(urlpath, args[2], ranges[0], -one_day, '&lt;&lt;'),
            self.time_link(urlpath, args[2], ranges[0], one_day, '&gt;&gt;'),
            '<a href="/chart/%s">Static</a>' % '/'.join(args[2:]),
            '<a href="/table/%s">Table</a>' % '/'.join(args[2:]),
            '<a href="/csv/%s">CSV</a>' % '/'.join(args[2:]),
            '<a href="/json/%s">JSON</a>' % '/'.join(args[2:])
        ]

        self.log('Time ranges: %s' % repr(ranges))

        data = []
        head = []
        if len(args) < 4:
            data = self.AvailableSensors(cursor, ranges[0])

        else:
            sensors = mhttp.urldecode(args[3]).split(',')
            data = [
                '<h1>%s</h1><ul>' % ', '.join(sensors),
                '<div id="my_chart"></div>', '</ul>'
            ]

            f = open('ofc-helper.html')
            template = f.read()
            f.close()
            head = substitute.substitute(
                template,
                subst={'data_file': urlpath.replace('flash', 'json')})

        self.sendfile('index.html',
                      subst={
                          'head': head,
                          'data': '\n'.join(data),
                          'links': ' '.join(links),
                          'refresh': '3600'
                      })
Esempio n. 5
0
    def handleurl_tag(self, urlfile):
        """Show songs with a given tag."""

        # This handles how soundmanager uses null sounds stupidly
        if urlfile.endswith('/null.mp3'):
            self.senderror(404, 'No such file')
            return

        (_, _, tag_encoded) = urlfile.split('/')
        tag = mhttp.urldecode(tag_encoded.split('=')[1])
        sql = (
            'select * from tags inner join tracks on tags.track_id = tracks.id '
            'where tag="%s";' % tag)
        self.log('Tag browse SQL = %s' % sql)
        results = self.renderbrowseresults(sql, includemissing=True)

        tags = {}
        tags['results'] = '\n'.join(results)
        tags['tag'] = tag
        tags['tag_encoded'] = tag_encoded
        self.sendfile('tag.html', subst=tags)
Esempio n. 6
0
    def handleurl_chart(self, urlpath, post_data):
        """Pretty graphs wrapped in HTML."""

        global sensor_names

        db = MySQLdb.connect(user=FLAGS.dbuser, db=FLAGS.dbname)
        cursor = db.cursor(MySQLdb.cursors.DictCursor)

        args = urlpath.split('/')
        (ranges, times, step_size) = self.ParseTimeField(args[2])
        links = [
            self.time_link(urlpath, args[2], ranges[0], -one_day, '&lt;&lt;'),
            self.time_link(urlpath, args[2], ranges[0], one_day, '&gt;&gt;'),
            '<a href="/flash/%s">Flash</a>' % '/'.join(args[2:]),
            '<a href="/table/%s">Table</a>' % '/'.join(args[2:]),
            '<a href="/csv/%s">CSV</a>' % '/'.join(args[2:]),
            '<a href="/json/%s">JSON</a>' % '/'.join(args[2:])
        ]

        self.log('Time ranges: %s' % repr(ranges))

        data = []
        if len(args) < 4:
            data = self.AvailableSensors(cursor, ranges[0])

        else:
            sensors = mhttp.urldecode(args[3]).split(',')
            data = [
                '<h1>%s</h1><ul>' % ', '.join(sensors),
                '<img src="%s">' % urlpath.replace('chart', 'image'), '</ul>'
            ]

        self.sendfile('index.html',
                      subst={
                          'head': '',
                          'data': '\n'.join(data),
                          'links': ' '.join(links),
                          'refresh': '60'
                      })
Esempio n. 7
0
    def handleurl_browse(self, file, post_data):
        """Browse the database."""

        global blogic

        # Parse filters
        filters = {
            'artist_filter': '',
            'artist_filter_compiled': '.*',
            'album_filter': '',
            'album_filter_compiled': '.*',
            'track_filter': '',
            'track_filter_compiled': '.*',
            'recent_filter': '',
            'recent_filter_compiled': '',
            'recent_checked': '',
            'unplayed_filter': '',
            'unplayed_filter_compiled': '',
            'unplayed_checked': '',
            'random_filter': 'Random',
            'random_filter_compiled': '',
            'random_checked': 'checked',
        }

        # I am sure there is a better way than this
        if post_data:
            for l in post_data.split('\r\n'):
                if len(l) > 0:
                    for arg in l.split('&'):
                        (name, value) = arg.split('=')
                        if value:
                            value = mhttp.urldecode(value.replace('+', ' '))
                            filters['%s_filter' % name] = value
                            filters['%s_filter_compiled' %
                                    name] = value.replace(' ', '[ _+]+')

        recent_sql = ''
        if filters['recent_filter'] == 'Recent':
            recent_sql = 'and (to_days(now()) - to_days(creation_time)) < 15'
            filters['recent_checked'] = 'checked'

        unplayed_sql = ''
        if filters['unplayed_filter'] == 'Unplayed':
            unplayed_sql = ('and last_played=makedate(1970,1) and '
                            'last_skipped=makedate(1970,1)')
            filters['unplayed_checked'] = 'checked'

        random_sql_cols = ''
        random_sql_order = ''
        if filters['random_filter'] == 'Random':
            random_sql_cols = ', %s' % business.GenerateRankSql(2)
            random_sql_order = 'idx desc,'
            filters['random_checked'] = 'checked'

        if (filters['artist_filter'] or filters['album_filter']
                or filters['track_filter']):
            limit_sql = ''
        else:
            limit_sql = 'limit 100'

        sql = (
            'select *%s from tracks join usersummary on usersummary.user="******" and usersummary.track_id = id '
            'where artist rlike "%s" and album rlike "%s" and song rlike "%s" '
            '%s %s order by %s artist, song, album, number %s;' %
            (random_sql_cols,
             blogic.getclientsetting(self.client_id, 'user', 'shared'),
             filters['artist_filter_compiled'],
             filters['album_filter_compiled'],
             filters['track_filter_compiled'], unplayed_sql, recent_sql,
             random_sql_order, limit_sql))
        self.log('Browse SQL = %s' % sql)

        results = self.renderbrowseresults(sql)
        filters['results'] = '\n'.join(results)
        self.sendfile('browse.html', subst=filters)
Esempio n. 8
0
    def handleurl_image(self, urlpath, post_data):
        """Pretty graphs with the HTML."""

        global sensor_names

        db = MySQLdb.connect(user=FLAGS.dbuser, db=FLAGS.dbname)
        cursor = db.cursor(MySQLdb.cursors.DictCursor)

        args = urlpath.split('/')
        (ranges, times, step_size) = self.ParseTimeField(args[2])
        self.log('Time ranges: %s' % repr(ranges))

        # TODO(mikal): add options parsing here
        sensors = mhttp.urldecode(args[3]).split(',')
        wideinterp = True
        size = (600, 400)
        for param in args[1].split(','):
            self.log('Considering table param: %s' % param)
            if param == 'nowideinterpolation':
                wideinterp = False

            elif param.startswith('size'):
                x, y = param.split('=')[1].split('x')
                size = (int(x), int(y))

        self.log('Data fetch parameters: wideinterpolation = %s' % wideinterp)

        # Build a chart
        chart = SimpleLineChart(size[0], size[1], y_range=[MIN_Y, MAX_Y])
        chart.set_title('Sensors')
        chart.set_colours([
            '0000FF', '00FF00', 'FF0000', 'dd5500', 'ee11ff', '88ddff',
            '44cc00', 'bb0011', '11aaff'
        ])
        chart.set_grid(0, 20, 5, 5)

        # Chart axes
        left_axis = []
        right_axis = []
        for v in range(MIN_Y, MAX_Y + 1.0, 5):
            left_axis.append('%s' % v)
            right_axis.append('%.01fk' % ((v * 50.0) / 1000))
        chart.set_axis_labels(Axis.LEFT, left_axis)
        chart.set_axis_labels(Axis.RIGHT, right_axis)

        bottom_axis = []
        for v in range(ranges[0][0], ranges[0][1] + 1,
                       max(1, (ranges[0][1] - ranges[0][0]) / 5)):
            tuple = time.localtime(v)
            if len(ranges) == 1:
                bottom_axis.append('%d/%d %02d:%02d' %
                                   (tuple[2], tuple[1], tuple[3], tuple[4]))
            else:
                bottom_axis.append('%02d:%02d' % (tuple[3], tuple[4]))
        chart.set_axis_labels(Axis.BOTTOM, bottom_axis)

        # Determine how many values will be on the graph
        returned = []
        for sensor in sensors:
            for item in self.ResolveWouldReturn(cursor, sensor, ranges[0][0],
                                                ranges[0][1]):
                if not item in returned:
                    returned.append(item)
        self.log('Calculations will return: %s' % repr(returned))

        # Fetch those values
        step_size = ((ranges[0][1] - ranges[0][0]) /
                     (MAX_READINGS_PER_GRAPH / (len(returned) * len(ranges))))
        self.log('%d time series, each pixel is %d seconds' %
                 (len(returned) * len(ranges), step_size))

        # Put the values on the chart
        legend = []
        for r in ranges:
            values = {}
            redirects = {}

            for sensor in sensors:
                (values,
                 redirects) = self.ResolveSensor(values, redirects, cursor,
                                                 sensor, r[0], r[1], step_size,
                                                 wideinterp)
            # self.log('Resolved values: %s' % values.keys())

            for value in returned:
                self.log('Adding %s' % value)
                chart.add_data(values[value])

                if len(ranges) == 1:
                    legend.append(value)
                else:
                    legend.append('%s %s' % (value, times[0]))
            times = times[1:]

        chart.set_legend(legend)

        # Add markers
        cursor.execute('select * from events where '
                       'epoch_seconds > %d and epoch_seconds < %d '
                       'order by epoch_seconds asc;' %
                       (ranges[0][0], ranges[0][1]))
        for row in cursor:
            chart.add_marker(0,
                             (row['epoch_seconds'] - ranges[0][0]) / step_size,
                             'o', '00ff00', 10)

        self.sendredirect(chart.get_url())