def __init__(self): # read env-variables self.confdir = os.getenv( "FEED_FILTER_CONF", os.path.join(os.getenv("HOME"), ".feedfilter") ) self.debug_mode = utils.toBool(os.getenv("DEBUG", "false")) self.configs = configparser.ConfigParser() self.configs.read(os.path.join(self.confdir, "feedfilter.conf"))
def read_settings(): global sitename global logfile, loglevel_file, loglevel_stderr, appendlvl global confdir, outputfile global cmp_threshold, threshold, title_scale # read env-variables confdir = os.getenv('FEED_FILTER_CONF', os.path.join(os.getenv('HOME'), ".feedfilter")) debug_mode = os.getenv('DEBUG', "False") # read configfile configs = configparser.ConfigParser() configs.read(os.path.join(confdir, 'feedfilter.conf')) # default settings threshold = 1 cmp_threshold = 0.35 title_scale = 2 logfile = None loglevel_file = 'INFO' loglevel_stderr = 'CRITICAL' appendlvl = False outputfile = None for section in configs: if section == 'DEFAULT': config = configs[section] elif url.find(section) != -1: config = configs[section] sitename = config.get('sitename', sitename) else: continue threshold = float(config.get('threshold', threshold)) cmp_threshold = float(config.get('cmp_threshold', cmp_threshold)) title_scale = float(config.get('title_scale', title_scale)) logfile = config.get('logfile', logfile) loglevel_file = config.get('loglevel', loglevel_file) loglevel_stderr = config.get('verboselevel', loglevel_stderr) appendlvl = utils.toBool(config.get('appendlvl', appendlvl)) outputfile = config.get('outputfile', outputfile) if debug_mode == "dev": loglevel_file = 'DEBUG' loglevel_stderr = 'DEBUG' outputfile = 'output.xml'
def plotAll(csvDirectory, includeDownload=False, wasForeground=False, blocking=False): if (os.path.isfile(csvDirectory)): plot.plot(csvDirectory + '.csv', utils.toBool(includeDownload), utils.toBool(wasForeground), utils.toBool(blocking)) plt.show() else: csv_files = glob.glob(csvDirectory + '/*.csv') for csv in csv_files: print(csv) plot.plot(csv, utils.toBool(includeDownload), utils.toBool(wasForeground), utils.toBool(blocking)) plt.show()
def plot(file, showDownload, wasForeground, aggregatedTime=1, blocking=True): print("PLOT: file:" + file + "showDownload:" + str(showDownload) + " wasForeground:" + str(wasForeground) + " blocking:" + str(blocking)) record_id_idx = 0 record_time_idx = 1 package_name_idx = 2 foreground_time_usage_idx = 3 last_time_use_idx = 4 downloaded_data_idx = 5 uploaded_data_idx = 6 was_foreground_idx = 7 boot_idx = 8 showDownload = utils.toBool(showDownload) wasForeground = utils.toBool(wasForeground) blocking = utils.toBool(blocking) aggregatedTime = int(aggregatedTime) dataY = [] dataY_download = [] dataX = [] dataX_download = [] with open(file, 'rt') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') lastF = 1 lastF1 = 1 title = "" next(reader) next(reader) begin = 0 end = 0 for row in reader: #print("row 9") #print(row[9]) if (begin == 0): begin = int(row[record_time_idx]) if (int(row[record_time_idx]) > end): end = int(row[record_time_idx]) if (wasForeground == False): title = "data in background" if (row[was_foreground_idx] == "0" and int(lastF) == 0 and int(lastF1) == 0): #print("addded") if (int(row[uploaded_data_idx]) > 0): dataX.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY.append(int(row[uploaded_data_idx]) / 1024) if int(row[downloaded_data_idx]) > 0: dataY_download.append( int(row[downloaded_data_idx]) / 1024) dataX_download.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) lastF1 = lastF lastF = row[was_foreground_idx] else: title = "data in foreground" if (row[was_foreground_idx] == "1" or int(lastF) == 1 or int(lastF1) == 1): if (int(row[uploaded_data_idx]) > 0): dataX.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY.append(int(row[uploaded_data_idx]) / 1024) if int(row[downloaded_data_idx]) > 0: dataY_download.append( int(row[downloaded_data_idx]) / 1024) dataX_download.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) width = 1 / ((end - begin) / aggregatedTime / 1000 / 60 / 5) * 2 #print(dataX) fig, f = plt.subplots() #plt.plot_date(dataX, dataY, 'ro') if (showDownload == True): r1 = f.bar(dataX, dataY, width, color="b") f.legend(['download']) else: r1 = f.bar(dataX, dataY, width, color="r") f.legend(['upload']) plt.ylabel('Uploaded data [kB]') plt.title(title) f.xaxis_date() plt.show(blocking)
def plot(file, updown="upload", bar=True, aggregatedTime=1): print("PLOT: file:" + file + "updown:" + str(updown) + " bar:" + str(bar)) record_id_idx = 0 record_time_idx = 1 package_name_idx = 2 foreground_time_usage_idx = 3 last_time_use_idx = 4 downloaded_data_idx = 5 uploaded_data_idx = 6 was_foreground_idx = 7 boot_idx = 8 bar = utils.toBool(bar) dataY_back = [] dataY_fore = [] dataX_back = [] dataX_fore = [] dataX_bf = [] dataY_bf = [] aggregatedTime = int(aggregatedTime) with open(file, 'rt') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') lastF = 1 lastF1 = 1 title = "" next(reader) next(reader) begin = 0 end = 0 for row in reader: #print("row 9") #print(row[9]) if (begin == 0): begin = int(row[record_time_idx]) if (int(row[record_time_idx]) > end): end = int(row[record_time_idx]) if (updown == "download"): #print("addded") if (int(row[downloaded_data_idx]) > 0): if (row[was_foreground_idx] == "0" and lastF == 0 and lastF1 == 0): dataX_back.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY_back.append(int(row[downloaded_data_idx]) / 1024) elif (row[was_foreground_idx] == "1"): dataY_fore.append(int(row[downloaded_data_idx]) / 1024) dataX_fore.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) else: dataY_bf.append(int(row[downloaded_data_idx]) / 1024) dataX_bf.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) lastF1 = lastF lastF = int(row[was_foreground_idx]) print(lastF) print(lastF1) print("___") else: if (int(row[uploaded_data_idx]) > 0): if (row[was_foreground_idx] == "0" and lastF == 0 and lastF1 == 0): dataX_back.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY_back.append(int(row[uploaded_data_idx]) / 1024) elif (row[was_foreground_idx] == "1"): dataY_fore.append(int(row[uploaded_data_idx]) / 1024) dataX_fore.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) else: dataY_bf.append(int(row[uploaded_data_idx]) / 1024) dataX_bf.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) lastF1 = lastF lastF = int(row[was_foreground_idx]) #print(dataX) if (updown == "download"): plt.title("download") else: plt.title("upload") if (bar == False): plt.figure() plt.plot_date(dataX_back, dataY_back, 'ro') plt.plot_date(dataX_bf, dataY_bf, 'co') plt.plot_date(dataX_fore, dataY_fore, 'go') plt.ylabel('Data[kB]') else: fig, f = plt.subplots() #plt.plot_date(dataX, dataY, 'ro') width = 1 / ((end - begin) / aggregatedTime / 1000 / 60 / 5) * 2 print(width) r1 = f.bar(dataX_back, dataY_back, width, color="r") r2 = f.bar(dataX_bf, dataY_bf, width, color="c") r3 = f.bar(dataX_fore, dataY_fore, width, color="b") f.legend(["background", "ibtw", "foreground"]) plt.ylabel('Uploaded data [kB]') plt.title(title) f.xaxis_date() f.set_yscale('log') plt.show(True)
def plot(file, alsoDownload, wasForeground=False, blocking=True): print("PLOT: file:" + file + "alsoDownload:" + str(alsoDownload) + " wasForeground:" + str(wasForeground) + " blocking:" + str(blocking)) record_id_idx = 0 record_time_idx = 1 package_name_idx = 2 foreground_time_usage_idx = 3 last_time_use_idx = 4 downloaded_data_idx = 5 uploaded_data_idx = 6 was_foreground_idx = 7 boot_idx = 8 alsoDownload = utils.toBool(alsoDownload) wasForeground = utils.toBool(wasForeground) blocking = utils.toBool(blocking) dataY = [] dataY_download = [] dataX = [] dataX_download = [] dataX_o = [] dataY_o = [] dataX_download_o = [] dataY_download_o = [] with open(file, 'rt') as csvfile: reader = csv.reader(csvfile, delimiter=',', quotechar='"') lastF = 1 lastF1 = 1 title = "" next(reader) next(reader) for row in reader: #print("row 9") #print(row[9]) if (wasForeground == False): title = "data in background" if (row[was_foreground_idx] == "0" and lastF == 0 and lastF1 == 0): #print("addded") if (int(row[uploaded_data_idx]) > 0): dataX.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY.append(int(row[uploaded_data_idx]) / 1024) if int(row[downloaded_data_idx]) > 0: dataY_download.append( int(row[downloaded_data_idx]) / 1024) dataX_download.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) lastF1 = lastF lastF = int(row[was_foreground_idx]) print(lastF) print(lastF1) print("___") else: title = "data in foreground" if (row[was_foreground_idx] == "1"): if (int(row[uploaded_data_idx]) > 0): dataX.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY.append(int(row[uploaded_data_idx]) / 1024) if int(row[downloaded_data_idx]) > 0: dataY_download.append( int(row[downloaded_data_idx]) / 1024) dataX_download.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) lastF1 = lastF lastF = int(row[was_foreground_idx]) if (row[was_foreground_idx] == "0" and (lastF == 1 or lastF1 == 1)): if (int(row[uploaded_data_idx]) > 0): dataX_o.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) dataY_o.append(int(row[uploaded_data_idx]) / 1024) if int(row[downloaded_data_idx]) > 0: dataY_download_o.append( int(row[downloaded_data_idx]) / 1024) dataX_download_o.append( datetime.datetime.fromtimestamp( int(row[record_time_idx]) / 1000)) #print(dataX) f, ax = plt.subplots() plt.plot_date(dataX, dataY, 'ro') plt.plot_date(dataX_o, dataY_o, 'yo') if (alsoDownload == True): plt.plot_date(dataX_download, dataY_download, 'bo') plt.plot_date(dataX_download_o, dataY_download_o, 'co') ax.legend(('Upload', 'upload after user activity', 'download', 'download after user activity'), numpoints=1) else: ax.legend(('Upload', 'upload after user activity'), numpoints=1) plt.ylabel('Uploaded data [kB]') plt.title(title) plt.show(blocking)
def __init__(self, context, request): super(MobappBaseView, self).__init__(context, request) self.cirequest = utils.CaseInsensitiveDict() for key, value in request.form.iteritems(): self.cirequest[key]=value #although this should ALWAYS be called in subsite context... self.subsite = context.getSubsite() self.SiteID = self.cirequest.get('SiteID', 0) #Ignored. self.ZoneID = self.cirequest.get("ZoneID", "") self.ZoneIDs = self.ZoneID.split("|") if self.ZoneIDs == [""]: self.ZoneIDs = [] self.Zone = self.cirequest.get("Zone","") self.Zones = self.Zone.split("|") if self.Zones == [""]: self.Zones = [] self.DayCount = int(self.cirequest.get("DayCount", self.DefaultDays)) self.Count = int(self.cirequest.get("Count", 30)) if self.Count > self.MaxArticles: self.Count=self.MaxArticles self.ArticleID = self.cirequest.get("ArticleId", None) self.AuthorID = self.cirequest.get("AuthorId", None) self.Title = utils.toBool(self.cirequest.get("Title", True)) self.TitleAsCDATA = utils.toBool(self.cirequest.get("TitleAsCDATA", False)) self.Intro = utils.toBool(self.cirequest.get("Introduction", True)) self.IntroAsCDATA = utils.toBool(self.cirequest.get('IntroductionAsCDATA', False)) self.Content = utils.toBool(self.cirequest.get("Content", True)) self.ContentAsCDATA = utils.toBool(self.cirequest.get("ContentAsCDATA", False)) self.Image = utils.toBool(self.cirequest.get("Image", True)) self.ImageTitle = utils.toBool(self.cirequest.get("ImageTitle", True)) self.Authors = utils.toBool(self.cirequest.get("Authors", True)) self.AuthorsFullName = utils.toBool(self.cirequest.get("AuthorsFullName", False)) self.html = utils.toBool(self.cirequest.get("html", False)) if self.html: self.ContentAsCDATA = True self.TitleAsCDATA = True self.catalog = getToolByName(self.context, 'portal_catalog') if self.Zones: #we got some Section names we have to look up ID's for for sectionName in self.Zones: ### IS there a way to get an exact match?????? brains = self.catalog.searchResults(portal_type="Section",Title=sectionName) uid = brains[0].UID #first element by default for brain in brains: if brain.Title.upper()==sectionName.upper(): uid=brain.UID break self.ZoneIDs.append(uid) self.Article = Types.ArticleFactory(lookupObj_func=self.subsite.reference_catalog.lookupObject, request=self.request) #set the timezone tzOffset = getattr(self.context, 'getTimezoneOffset', lambda: 0)() #we assume US/Eastern when we don't have a timezone offset set. #XXX we shouldn't set numeric offsets, we should set tzinfo objects to support DST if tzOffset == 0 or tzOffset is None: self.subsiteTz = pytz.timezone('US/Eastern') else: #convert to minutes offset = int(tzOffset*60) self.subsiteTz = pytz.FixedOffset(offset)
def __init__(self, context, request): super(MobappBaseView, self).__init__(context, request) self.cirequest = utils.CaseInsensitiveDict() for key, value in request.form.iteritems(): self.cirequest[key] = value #although this should ALWAYS be called in subsite context... self.subsite = context.getSubsite() self.SiteID = self.cirequest.get('SiteID', 0) #Ignored. self.ZoneID = self.cirequest.get("ZoneID", "") self.ZoneIDs = self.ZoneID.split("|") if self.ZoneIDs == [""]: self.ZoneIDs = [] self.Zone = self.cirequest.get("Zone", "") self.Zones = self.Zone.split("|") if self.Zones == [""]: self.Zones = [] self.DayCount = int(self.cirequest.get("DayCount", self.DefaultDays)) self.Count = int(self.cirequest.get("Count", 30)) if self.Count > self.MaxArticles: self.Count = self.MaxArticles self.ArticleID = self.cirequest.get("ArticleId", None) self.AuthorID = self.cirequest.get("AuthorId", None) self.Title = utils.toBool(self.cirequest.get("Title", True)) self.TitleAsCDATA = utils.toBool( self.cirequest.get("TitleAsCDATA", False)) self.Intro = utils.toBool(self.cirequest.get("Introduction", True)) self.IntroAsCDATA = utils.toBool( self.cirequest.get('IntroductionAsCDATA', False)) self.Content = utils.toBool(self.cirequest.get("Content", True)) self.ContentAsCDATA = utils.toBool( self.cirequest.get("ContentAsCDATA", False)) self.Image = utils.toBool(self.cirequest.get("Image", True)) self.ImageTitle = utils.toBool(self.cirequest.get("ImageTitle", True)) self.Authors = utils.toBool(self.cirequest.get("Authors", True)) self.AuthorsFullName = utils.toBool( self.cirequest.get("AuthorsFullName", False)) self.html = utils.toBool(self.cirequest.get("html", False)) if self.html: self.ContentAsCDATA = True self.TitleAsCDATA = True self.catalog = getToolByName(self.context, 'portal_catalog') if self.Zones: #we got some Section names we have to look up ID's for for sectionName in self.Zones: ### IS there a way to get an exact match?????? brains = self.catalog.searchResults(portal_type="Section", Title=sectionName) uid = brains[0].UID #first element by default for brain in brains: if brain.Title.upper() == sectionName.upper(): uid = brain.UID break self.ZoneIDs.append(uid) self.Article = Types.ArticleFactory( lookupObj_func=self.subsite.reference_catalog.lookupObject, request=self.request) #set the timezone tzOffset = getattr(self.context, 'getTimezoneOffset', lambda: 0)() #we assume US/Eastern when we don't have a timezone offset set. #XXX we shouldn't set numeric offsets, we should set tzinfo objects to support DST if tzOffset == 0 or tzOffset is None: self.subsiteTz = pytz.timezone('US/Eastern') else: #convert to minutes offset = int(tzOffset * 60) self.subsiteTz = pytz.FixedOffset(offset)