def get_context_data(self, **kwargs): context = super(ViewSafetySystemInfo, self).get_context_data(**kwargs) v = Version() info = { 'product_name' : 'Web Safety for Squid Proxy', 'installed_version' : v.installed, 'latest_version' : v.latest, 'need_to_upgrade' : v.need_to_upgrade(), # 0 - no_need_to_upgrade, 1 - may_upgrade, 2 - should_upgrade, 3 - must_upgrade 'whats_new' : v.whats_new }; context['info'] = info # add hardcoded settings context['WEBSAFETY_ETC_DIR'] = Paths.etc_dir() context['WEBSAFETY_ETC_DIR_SIZE'] = long(FolderInfo(Paths.etc_dir()).get_size()) context['WEBSAFETY_VAR_DIR'] = Paths.var_dir() context['WEBSAFETY_VAR_DIR_SIZE'] = long(FolderInfo(Paths.var_dir()).get_size()) context['WEBSAFETY_BIN_DIR'] = Paths.bin_dir() context['WEBSAFETY_BIN_DIR_SIZE'] = long(FolderInfo(Paths.bin_dir()).get_size()) context['WEBSAFETY_VERSION'] = Build.version() context['WEBSAFETY_ARCH'] = Distrib.arch() context['WEBSAFETY_DISTRIB'] = Distrib.name() context['WEBSAFETY_SYSTEM'] = System.name() return context
def get_context_data(self, **kwargs): context = super(View_MonitorInfo, self).get_context_data(**kwargs) # set if we use sqlite and how many ips we have is_sqlite = (DATABASES['monitor']['ENGINE'] == 'django.db.backends.sqlite3') # amend context context['is_sqlite'] = is_sqlite context['db_engine'] = DATABASES['monitor']['ENGINE'] if is_sqlite: context['db_size'] = os.path.getsize(DATABASES['monitor']['NAME']) else: # for mysql we ask it directly try: context['db_size'] = 0 cursor = connection.cursor() cursor.execute("SELECT sum(round(((data_length + index_length) / 1024 / 1024 / 1024), 2)) as 'Size in GB' FROM information_schema.TABLES WHERE table_schema = 'websafety_monitor'") row = cursor.fetchone() # here we get error - no such table: information_schema.TABLES why??? context['db_size'] = row[0] except Exception as e: pass context['upload_size'] = FolderInfo(os.path.join(Paths.var_dir(), "monitor")).get_size() return context
def get(self, request, *args, **kwargs): # allocate default response data = { 'error' : False, 'desc' : '', 'info' : {}, 'stats' : {} } # dump amount of free place (success, info) = CommandDiskFree().run(os.path.join(Paths.var_dir(), "monitor")) if success: disk = { 'free' : info['avail'], 'used' : info['used'], 'total' : info['size'], 'ratio' : info['ratio'].replace('%', '') } data['stats']['disk'] = disk # we also need to know how many files to upload data['stats']['queue'] = FolderInfo(os.path.join(Paths.var_dir(), "monitor")).get_size() data['stats']['dbtype'] = 'mysql' data['stats']['dbsize'] = self.get_mysqldb_size() if DATABASES['monitor']['ENGINE'] == 'django.db.backends.sqlite3': data['stats']['dbsize'] = os.path.getsize(DATABASES['monitor']['NAME']) data['stats']['dbtype'] = 'sqlite' # get processes wsmgrd daemon processes = [] try: processes = CommandPs(WsUser.name()).run() except Exception as e: data['error'] = True data['desc'] = str(e) # see if wsmgrd binary is there and fill the stats found = False for process in processes: if process['path'].find(Paths.bin_dir() + "/wsmgrd") != -1: found = True data['info']['path'] = process['path'] data['info']['pid'] = process['pid'] data['info']['user'] = process['user'] data['stats']['cpu_time'] = process['cpu_time'] data['stats']['cpu_usage'] = process['cpu_usage'] data['stats']['mem_size'] = int(process['mem_size']) data['stats']['mem_usage'] = process['mem_usage'] # no wsmgrd daemon means something is really bad tell the caller if not found: data['error'] = True data['desc'] = 'the wsmgrd daemon is not running' # add the processes anyway data['info']['processes'] = processes # and store as array return HttpResponse(json.dumps([data], ensure_ascii=True), content_type='application/json')
def get_queryset(self): # we must always sync file system -> table on every get CustomCategorySyncer().sync_from_disk() # construct the list object_list = [] for item in CustomCategory.objects.order_by('name'): dir = os.path.join(Paths.var_dir(), "spool", "categories_custom", item.name) size = FolderInfo(dir).get_size() modified = FileInfo(os.path.join(dir, "domains")).last_modified() object_list.append({ 'name': item.name, 'title': item.title, 'dir': dir, 'size': size, 'modified': modified }) return object_list
def sync_from_disk(self): # we must always sync file system -> table dir = os.path.join(Paths.var_dir(), "spool", "categories_custom") list = [] for sub in os.walk(dir).next()[1]: name = sub path = os.path.join(dir, sub) file = os.path.join(path, "domains") if os.path.exists(file): list.append({'name': name, 'path': path}) # drop existing categories CustomCategory.objects.all().delete() # upload all new for object in list: category = CustomCategory() category.name = object['name'] category.title = object['name'].title() category.description = "%s, size on disk %s" % ( object['path'], FolderInfo(object['path']).get_size()) category.save()
def get_size(self, folder): return FolderInfo(folder).get_size()