Example #1
0
File: views.py Project: ox-it/OPMS
def create_metric_textfiles(traffic_to_plot,metrics_to_plot):
    """Put together CSVs of dates and traffic."""
    if traffic_to_plot:
        start = traffic_to_plot[0].date
        stop = start
        for t in traffic_to_plot:
            d = t.date
            if d < start:
                start = d
            if d > stop:
                stop = d

        x = start
        date_range = [start]
        while x != stop:
            x += datetime.timedelta(days=1)
            date_range.append(x)
    else:
        date_range = []
        debug.onscreen('WARNING: No traffic to plot. Did you put any in the database?')

    #Timeplot is designed to take in CSV text files, so build one as a string for each metric:
    metric_textfiles = {}
    for m in metrics_to_plot:
        metric_textfile_strlist = []
        append = metric_textfile_strlist.append
        for d in date_range:
            sd = str(d)
            for t in traffic_to_plot:
                if t.date == d:
                    if t.metric == m:
                        append('%s%s%s' % (sd,',',str(t.count)))
        metric_textfiles[m.id] = '\\n'.join(metric_textfile_strlist)
    return metric_textfiles
Example #2
0
 def scan_url(self, url):
     USER_AGENT = 'OPMS/1.0 (Ubuntu 10.04; Virtual Server) Django/1.4.0'
     request = urllib2.Request(url)
     request.add_header('User-Agent', USER_AGENT)
     opener = urllib2.build_opener()
     time_of_request = datetime.datetime.utcnow()
     start = time.time()
     request = opener.open(request)
     ttfb = time.time() - start
     output = request.read()
     status = 200 # Presumed because there would be an error otherwise
     ttlb = time.time() - start
     debug.onscreen("{0}:{1} - TTFB={2} - TTLB={3}".format(time_of_request, url, ttfb, ttlb))
     return status, time_of_request, ttfb, ttlb
Example #3
0
File: views.py Project: ox-it/OPMS
def event_add(request,event=None, error='', message=''):
    """Adds a new event to the database. Optionally, it may replace the event instead."""
    categories = Category.objects.all()
    error_fields=[]

    try:
        widget = bool(request.POST['widget'])
    except:
        widget = False
    if widget == True:
        url = request.POST['url']
        detail = request.POST['description']
        title = request.POST['title']
        try:
            timestamp = request.POST['timestamp']
            datetimestamp = parse(timestamp)
        except:
            datetimestamp = datetime.datetime.now()
            debug.onscreen('WARNING: Widget returned datetime we couldn\'t process. Defaulting to today.')
        debug.onscreen('Autocompleting form from widget... ' + url + str(timestamp) + title)
        default_event = Event(date=datetimestamp.date(), title=title, detail=detail, category=Category.objects.filter(description='Found on the internet')[0], user_email='')
    else:
        default_event = Event(date=datetime.date.today(), title='', detail='', category=Category.objects.filter(description='Events')[0], user_email='')

    try:
        added = bool(request.POST['add'])
    except:
        added = False
    try:
        action = request.POST['action']
    except:
        action = 'add'

    if added == True:
        try:
            new_date = parse(request.POST['date'], dayfirst=True)
        except:
            error += ' Date invalid or not specified.'

        try:
            new_detail = request.POST['detail']
            if new_detail == '':
                error += ' Event text is blank.'
        except:
            error += ' No event text provided.'

        try:
            new_title = request.POST['title']
            if new_title == '':
                error += ' Title is blank.'
        except:
            error += ' No event title provided.'

        try:
            new_category = Category.objects.filter(pk=int(request.POST['category_id']))[0] #The [0] is OK since the fact that category_id is a primary key ensures that the array has only length 1.
        except:
            error += ' Category invalid or nonexistent.'

        try:
            new_user_email = request.POST['user_email']
            if new_user_email == '':
                error += ' You haven\'t provided your e-mail address.'
        except:
            error += ' No user e-mail address provided.'

        if error == '':
            new_event = Event(date=new_date, title=new_title, detail=new_detail, category=new_category, user_email=new_user_email)
            try:
                new_event.full_clean()
                try:
                    new_event.save()
                    message += 'Your event was added to the database.'
                except:
                    error += 'Failed to access the database.'
            except ValidationError as ve:
                for k in ve.message_dict.keys():
                    error_fields.append(k)
                    for m in ve.message_dict[k]:
                        error += m + ' '
                default_event = new_event

    if action == 'saveandaddanother' or action == 'add' or error != '':
        return render_to_response('feedback/event_add.html',
            {'categories': categories,
             'error': error,
             'added': added,
             'message': message,
             'error_fields': error_fields,
             'event': default_event},
            context_instance=RequestContext(request))
    elif action == 'save':
        return index(request, error=error, message=message)
    else:
        error += 'Invalid submit action requested.'
        return render_to_response('feedback/event_add.html',
                {'categories': categories,
                 'error': error,
                 'added': added,
                 'message': message,
                 'error_fields': error_fields,
                 'event': default_event},
            context_instance=RequestContext(request))
Example #4
0
File: views.py Project: ox-it/OPMS
def index(request, error='', message='', tag=None, tag_id=None, comment_id=None, event_id=None, metric_id=None):
    """Display a report on either a given tag, or a default set of comments, events and metrics if a tag is not specified."""
    if tag:
        metrics_to_plot = Metric.objects.filter(tags=tag)
    else:
        metrics_to_plot = Metric.objects.filter(source='appleweekly')

    traffic_to_plot = []
    for metric in metrics_to_plot:
        metric_traffic = list(Traffic.objects.filter(metric=metric))
        if metric_traffic:
            traffic_to_plot.append(metric_traffic)

    chart = False

    for m in metrics_to_plot:
        if m.source == 'appleweekly':
            try:
                #Import Apple weekly summary metrics, but just for one-time use - don't save in db.
                append = traffic_to_plot.append #Avoid re-calling the .append function in the middle of all those loops.
                for w in AppleWeeklySummary.merged.all():
                    for field in AppleWeeklySummary._meta._fields():             #This grabs a list of field objects from the model specified as part of the stats app
                        if field.verbose_name == m.appleweeklyfield:             #Verbose name is specified as ("verbose_name") in stats/models/apple_summary.py
                            append(Traffic(date=w.week_beginning, count=w.__dict__[field.name], metric=m))
            except:
                debug.onscreen('WARNING: Can\'t find any Apple summary data. Have you imported it?')
        elif m.source == 'itu-collection-chart':
            try:
                #Add the first chartrecord of the day to traffic_to_plot
                dates = []
                chartrecords = ItuCollectionChartScan.objects.filter(itucollection=m.itucollection).order_by('date')
                for chartrecord in chartrecords:
                    if chartrecord.date.date() not in dates:
                        dates.append(chartrecord.date.date())
                for date in dates:
                    chartrecords_day = []
                    for chartrecord in chartrecords:
                        if chartrecord.date.date() == date:
                            chartrecords_day.append(chartrecord)
                    traffic_to_plot.append(
                        Traffic(date=date, count=(-1 * chartrecords_day[0].position), metric=m))
                chart = True
            except:
                error += 'Failed to process traffic for an itu-collection-chart.'
        elif m.source == 'itu-item-chart':
            try:
                #Add the first chartrecord of the day to traffic_to_plot
                dates = []
                chartrecords = ItuItemChartScan.objects.filter(ituitem=m.ituitem).order_by('date')
                for chartrecord in chartrecords:
                    if chartrecord.date.date() not in dates:
                        dates.append(chartrecord.date.date())
                for date in dates:
                    chartrecords_day = []
                    for chartrecord in chartrecords:
                        if chartrecord.date.date() == date:
                            chartrecords_day.append(chartrecord)
                    traffic_to_plot.append(
                        Traffic(date=date, count=(-1 * chartrecords_day[0].position), metric=m))
                chart = True
            except:
                error += 'Failed to process traffic for an itu-item-chart.'
        elif m.source =='itu-#tc':
            try:
                dates_processed = []
                for tc_scan in ItuScanLog.objects.filter(mode=2).order_by('time'):
                    date = tc_scan.time.date()
                    if date not in dates_processed:
                        dates_processed.append(date)
                        tc_count = ItuCollectionChartScan.objects.filter(scanlog=tc_scan,
                            itucollection__institution=m.ituinstitution).count()
                        traffic_to_plot.append(Traffic(date=date, count=tc_count, metric=m))
            except:
                error += 'Failed to process traffic for the # of collections in the top 200.'
        elif m.source =='itu-#ti':
            try:
                dates_processed = []
                for ti_scan in ItuScanLog.objects.filter(mode=3).order_by('time'):
                    date = ti_scan.time.date()
                    if date not in dates_processed:
                        dates_processed.append(date)
                        ti_count = ItuItemChartScan.objects.filter(scanlog=ti_scan,
                            ituitem__institution=m.ituinstitution).count()
                        traffic_to_plot.append(Traffic(date=date, count=ti_count, metric=m))
            except:
                error += 'Failed to process traffic for the # of collections in the top 200.'

    #NOTE: We do not need to handle the temporal range of comments and events since this is done automatically by Timeplot.

    from_itunes_u = Category.objects.get(description='From iTunes U')
    #Create comments in the feedback database if they don't already exist.
    for itu_comment in ItuComment.objects.filter(ituinstitution__name = 'Oxford University'):
        comment = Comment(
            date=itu_comment.date,
            time=datetime.time(0,0,0),
            source=itu_comment.itucollectionhistorical.name + ' - comment by ' + itu_comment.source,
            detail=itu_comment.detail,
            user_email='*****@*****.**',
            moderated=True,
            category=from_itunes_u,
            itu_source=itu_comment
        )
        if Comment.objects.filter(detail=itu_comment.detail).count() > 0:
            pass
        else:
            comment.save()
    if tag:
        comments_to_plot = Comment.objects.filter(moderated=True,tags=tag)
        events_to_plot = Event.objects.filter(moderated=True,tags=tag)
    else:
        comments_to_plot = Comment.objects.filter(moderated=True)
        events_to_plot = Event.objects.filter(moderated=True)

    categories_to_plot = []
    for category in comments_to_plot.values_list('category').distinct():
        categories_to_plot.append(Category.objects.get(id=category[0]))
    for category in events_to_plot.values_list('category').distinct():
        if Category.objects.get(id=category[0]) not in categories_to_plot:
            categories_to_plot.append(Category.objects.get(id=category[0]))

    return render_to_response('feedback/index.html', {
        'metrics_to_plot': metrics_to_plot,
        'metric_textfiles': create_metric_textfiles(traffic_to_plot,metrics_to_plot),
        'categories_to_plot': categories_to_plot,
        'comments_to_plot': comments_to_plot,
        'events': events_to_plot,
        'chart': chart,
        'error': error,
        'message': message,
        'tag': tag, 'tag_id': tag_id, 'tags': Tag.objects.all(), 'comment_id': comment_id, 'event_id': event_id, 'metric_id': metric_id,
    }, context_instance=RequestContext(request))
Example #5
0
    def handle_label(self, path, **options):
        verbosity = int(options.get('verbosity', 0))
        if verbosity > 1:
            debug.DEBUG = True

#        # Some basic checking
#        if not filename.endswith('.txt'):
#           raise CommandError("This is not a text (.txt) log file.\n\n")

        # Scan directory for files, compare them to names in the existing LogFile list. Import the first X new files.
        found_files_list = self._list_files(path)
        found_files_list.sort() # Trust the naming conventions to put a sortable date on them
        import_file_limit = 21
        if len(found_files_list) < import_file_limit:
            import_file_limit = len(found_files_list)
        # Have this only work on the one file when debug is switched on
        if debug.DEBUG:
            import_file_limit = 1
        print "{0} files have been found. Importing up to {1} of them now.".format(
            len(found_files_list),
            import_file_limit
        )
        for filename in found_files_list:
            if import_file_limit > 0:
                # Reset statistics
                self.import_stats['filename'] = filename
                self.import_stats['line_counter'] = 0
                self.import_stats['line_count'] = 0
                self.import_stats['import_starttime'] = datetime.datetime.now(pytz.utc)
                self.import_stats['import_startline'] = int(options.get('start_at_line', 1))

                # This only needs setting/getting the once per call of this function
                logfile_obj, created = self._logfile(filename, 'itu-raw')
                if not created:
                    err_string = "This file has already been imported: ({0})".format(filename)
                    debug.onscreen(err_string)
                    continue

                import_file_limit -= 1

                print "Import of [{0}] started at {1:%Y-%m-%d %H:%M:%S}\n".format(
                    filename,
                    datetime.datetime.now(pytz.utc)
                )

                # Create an error log per import file
                debug.errorlog_start(filename + '_import-error.log')

                # Send the file off to be parsed
                self._parsefile(logfile_obj)

                # Final stats output at end of file
                try:
                    self.import_stats['import_duration'] = float((datetime.datetime.now(pytz.utc) - self.import_stats.get('import_starttime')).seconds)
                    self.import_stats['import_rate'] = float(self.import_stats.get('line_counter')-self.import_stats.get('import_startline')) /\
                                                            self.import_stats['import_duration']
                except ZeroDivisionError:
                    self.import_stats['import_rate'] = 0

                # Write the error cache to disk
                debug.errorlog_stop()

                print """
                    Import finished at {0:%Y-%m-%d %H:%M:%S}
                    {1:d} Lines parsed over {2:.1f} seconds
                    Giving a rate of {3:.3f} lines/sec
                    """.format(
                        datetime.datetime.now(pytz.utc),
                        self.import_stats.get('line_counter'),
                        self.import_stats.get('import_duration'),
                        self.import_stats.get('import_rate')
                    )

        return None
Example #6
0
    def _parseline(self, entrydict, logfile_obj):
#        # Build the log entry dictionary
#        arle = {
#            "logfile" : logfile_obj,
#            "artist_id" : long(entrydict.get("artist_id")),
#            "itunes_id" : long(entrydict.get("itunes_id")),
#            "action_type" : self._action_type_validation(entrydict.get("action_type")),
#            "title" : entrydict.get("title","Unknown"),
#            "url" : entrydict.get("url",""),
#            "episode_id" : long(entrydict.get("episode_id",0)),
#            "episode_title" : entrydict.get("episode_title",None),
#            "episode_type" : entrydict.get("episode_type",None),
#            "storefront" : int(entrydict.get("storefront",0)),
#            "user_agent" : self._user_agent(entrydict.get("useragent","")),
#            "ipaddress" : self._ip_to_domainname(entrydict.get("ip_address",None)),
#            "timestamp" : self._parse_timestamp(entrydict.get("timestamp")),
#            "user_id" : entrydict.get("user_id","")
#        }
        debug.onscreen("_parseline():entrydict=" + str(entrydict))

        # Build the log entry dictionary
        arle = AppleRawLogEntry()
        arle.logfile = logfile_obj
        try:
            arle.artist_id = long(entrydict.get("artist_id"))
        except ValueError:
            arle.artist_id = -1
        except TypeError:
            arle.artist_id = -1
        try:
            arle.itunes_id = long(entrydict.get("itunes_id"))
        except ValueError:
            arle.itunes_id = -1
        except TypeError:
            arle.itunes_id = -1
        arle.action_type = self._action_type_validation(entrydict.get("action_type"))
        arle.title = entrydict.get("title","Unknown")
        arle.url = entrydict.get("url","")
        try:
            arle.episode_id = long(entrydict.get("episode_id"))
        except ValueError:
            arle.episode_id = None
        except TypeError:
            arle.episode_id = None
        arle.episode_title = entrydict.get("episode_title",None)
        arle.episode_type = entrydict.get("episode_type",None)
        arle.storefront = self._storefront(entrydict.get("storefront","0"))
        arle.user_agent = self._user_agent(entrydict.get("useragent",""))
        arle.ipaddress = self._ip_to_domainname(entrydict.get("ip_address",None))
        arle.timestamp = self._parse_timestamp(entrydict.get("timestamp"))
        arle.user_id = entrydict.get("user_id","")
        arle.save(force_insert=True)

        # Add to the daily summary dictionary
        if self.summary.get("date", None) is None:
            self.summary["date"] = "{0:%Y-%m-%d}".format(arle.timestamp)
        if arle.action_type == "AutoDownload":
            self.summary["auto_download"] = int(self.summary.get("auto_download",0) + 1)
        elif arle.action_type == "Browse":
            self.summary["browse"] = int(self.summary.get("browse",0) + 1)
        elif arle.action_type == "Download":
            self.summary["download"] = int(self.summary.get("download",0) + 1)
        elif arle.action_type == "DownloadAll":
            self.summary["download_all"] = int(self.summary.get("download_all",0) + 1)
        elif arle.action_type == "Stream":
            self.summary["stream"] = int(self.summary.get("stream",0) + 1)
        elif arle.action_type == "Subscribe":
            self.summary["subscribe"] = int(self.summary.get("subscribe",0) + 1)
        elif arle.action_type == "SubscriptionEnclosure":
            self.summary["subscription_enclosure"] = int(self.summary.get("subscription_enclosure",0) + 1)
        else:
            self.summary["unknown"] = int(self.summary.get("unknown",0) + 1)

        return None