def get_context_data(self, **kwargs): context = super(ViewSafetySystemInfo, self).get_context_data(**kwargs) v = Version() info = { 'product_name' : 'Web Safety for Squid Proxy', 'installed_version' : v.installed, 'latest_version' : v.latest, 'need_to_upgrade' : v.need_to_upgrade(), # 0 - no_need_to_upgrade, 1 - may_upgrade, 2 - should_upgrade, 3 - must_upgrade 'whats_new' : v.whats_new }; context['info'] = info # add hardcoded settings context['WEBSAFETY_ETC_DIR'] = Paths.etc_dir() context['WEBSAFETY_ETC_DIR_SIZE'] = long(FolderInfo(Paths.etc_dir()).get_size()) context['WEBSAFETY_VAR_DIR'] = Paths.var_dir() context['WEBSAFETY_VAR_DIR_SIZE'] = long(FolderInfo(Paths.var_dir()).get_size()) context['WEBSAFETY_BIN_DIR'] = Paths.bin_dir() context['WEBSAFETY_BIN_DIR_SIZE'] = long(FolderInfo(Paths.bin_dir()).get_size()) context['WEBSAFETY_VERSION'] = Build.version() context['WEBSAFETY_ARCH'] = Distrib.arch() context['WEBSAFETY_DISTRIB'] = Distrib.name() context['WEBSAFETY_SYSTEM'] = System.name() return context
def archive(self): # store zipped version of /opt/websafety/etc to the shared folder zip_dir = os.path.join(Paths.var_dir(), "cluster") if not os.path.exists(zip_dir): os.makedirs(zip_dir) # copy the database backup_db_file = os.path.join(Paths.etc_dir(), "config.sqlite") db_file = os.path.join(Paths.var_dir(), "db", "config.sqlite") try: c = connection.cursor() c.execute("BEGIN IMMEDIATE") shutil.copyfile(db_file, backup_db_file) finally: c.close() connection.rollback() # remove all old configs, only last 10 will stay files = [ f for f in os.listdir(zip_dir) if os.path.isfile(os.path.join(zip_dir, f)) ] for f in files[:-9]: os.remove(os.path.join(zip_dir, f)) # source and target paths etc_dir = Paths.etc_dir() zip_file = os.path.join( zip_dir, "%s-%s.zip" % (str(self.timestamp), str(self.version))) # zip it zipdir(etc_dir, zip_file)
def get(self, request, *args, **kwargs): # allocate default response data = { 'error' : False, 'desc' : '', 'info' : {}, 'stats' : {} } # dump amount of free place (success, info) = CommandDiskFree().run(os.path.join(Paths.var_dir(), "monitor")) if success: disk = { 'free' : info['avail'], 'used' : info['used'], 'total' : info['size'], 'ratio' : info['ratio'].replace('%', '') } data['stats']['disk'] = disk # we also need to know how many files to upload data['stats']['queue'] = FolderInfo(os.path.join(Paths.var_dir(), "monitor")).get_size() data['stats']['dbtype'] = 'mysql' data['stats']['dbsize'] = self.get_mysqldb_size() if DATABASES['monitor']['ENGINE'] == 'django.db.backends.sqlite3': data['stats']['dbsize'] = os.path.getsize(DATABASES['monitor']['NAME']) data['stats']['dbtype'] = 'sqlite' # get processes wsmgrd daemon processes = [] try: processes = CommandPs(WsUser.name()).run() except Exception as e: data['error'] = True data['desc'] = str(e) # see if wsmgrd binary is there and fill the stats found = False for process in processes: if process['path'].find(Paths.bin_dir() + "/wsmgrd") != -1: found = True data['info']['path'] = process['path'] data['info']['pid'] = process['pid'] data['info']['user'] = process['user'] data['stats']['cpu_time'] = process['cpu_time'] data['stats']['cpu_usage'] = process['cpu_usage'] data['stats']['mem_size'] = int(process['mem_size']) data['stats']['mem_usage'] = process['mem_usage'] # no wsmgrd daemon means something is really bad tell the caller if not found: data['error'] = True data['desc'] = 'the wsmgrd daemon is not running' # add the processes anyway data['info']['processes'] = processes # and store as array return HttpResponse(json.dumps([data], ensure_ascii=True), content_type='application/json')
def do_reset(self): src_db = os.path.join(Paths.var_dir(), "db", "config.sqlite.default") dst_db = os.path.join(Paths.var_dir(), "db", "config.sqlite") bak_db = os.path.join(Paths.var_dir(), "db", "config.sqlite.backup") if not os.path.isfile(src_db): raise Exception( "Default database '%s' not found or not accessible!" % src_db) if os.path.isfile(bak_db): os.unlink(bak_db) shutil.copy2(dst_db, bak_db) shutil.copy2(src_db, dst_db)
def set(self, value): assert(len(value) > 0) # check the provided timezone indeed exists in the pytz if value not in pytz.all_timezones: raise Exception("Wrong timezone %s (not found pytz.all_timezones)" % value) # save the new timezone into the system exe = os.path.join(Paths.bin_dir(), "timezone.py") arg1 = "--timezone=%s" % value arg2 = "--system=%s" % System.name() arg3 = "--distrib=%s" % Distrib.name() args = [exe, arg1, arg2, arg3] (ret, stdout, stderr) = CommandElevated().run(args) # the system zone is set if return value is 0 if ret == 0: # also generate the timezone.setting file tz_file = os.path.join(Paths.var_dir(), "console", "console", "timezone.setting") with open(tz_file,"w") as fout: fout.write(value) # and return return (ret, stdout, stderr)
def get(self): path = os.path.join(Paths.var_dir(), "log", "wssyncd.log") try: return FileReader(path).read() except Exception as e: return "%s does not exist yet or is not accessible, error - %s" % (path, str(e))
def get_context_data(self, **kwargs): context = super(View_MonitorInfo, self).get_context_data(**kwargs) # set if we use sqlite and how many ips we have is_sqlite = (DATABASES['monitor']['ENGINE'] == 'django.db.backends.sqlite3') # amend context context['is_sqlite'] = is_sqlite context['db_engine'] = DATABASES['monitor']['ENGINE'] if is_sqlite: context['db_size'] = os.path.getsize(DATABASES['monitor']['NAME']) else: # for mysql we ask it directly try: context['db_size'] = 0 cursor = connection.cursor() cursor.execute("SELECT sum(round(((data_length + index_length) / 1024 / 1024 / 1024), 2)) as 'Size in GB' FROM information_schema.TABLES WHERE table_schema = 'websafety_monitor'") row = cursor.fetchone() # here we get error - no such table: information_schema.TABLES why??? context['db_size'] = row[0] except Exception as e: pass context['upload_size'] = FolderInfo(os.path.join(Paths.var_dir(), "monitor")).get_size() return context
def categorize(self, domain): try: name = "categories_checker" if System.WS_WINDOWS == System.name(): name += ".exe" exe = os.path.join(Paths.bin_dir(), name) arg1 = "--definitions=%s" % (os.path.join( Paths.var_dir(), "spool", "categories", "definitions.dat")) arg2 = "--domain=%s" % domain (exit_code, stdout, stderr) = Command().run([exe, arg1, arg2]) if 0 == exit_code: data = stdout.strip() data = data.strip("]") data = data.strip("[") data = data.strip() if len(data) == 0: return [] else: return data.split(':') except Exception as e: pass return []
def collect_zip(self): # will write zip data to a temp folder folder = os.path.join(Paths.var_dir(), "temp", "websafety_restore") try: shutil.rmtree(folder) except: pass os.makedirs(folder) # collect all chunks into it data = self.request.FILES['file'] name = os.path.join(folder, "websafety_backup.zip") try: os.unlink(name) except: pass with open(name, "wb") as fout: for chunk in data.chunks(): fout.write(chunk) # fine, zip is there return name
def collect_license(self): # will write license data to a temp folder folder = os.path.join(Paths.var_dir(), "temp", "license_upload") try: shutil.rmtree(folder) except Exception as e: pass os.makedirs(folder) # collect all chunks into it data = self.request.FILES['file'] name = os.path.join(folder, "license.pem") try: os.unlink(name) except Exception as e: pass with open(name, "wb") as fout: for chunk in data.chunks(): fout.write(chunk) # also copy out the diladele.pem and websafety.pem files into temp folder too shutil.copy(os.path.join(Paths.etc_dir(), "diladele.pem"), os.path.join(folder, "diladele.pem")) shutil.copy(os.path.join(Paths.etc_dir(), "websafety.pem"), os.path.join(folder, "websafety.pem")) # fine, license is there return folder
def get(self, request, *args, **kwargs): # allocate default response data = {'error': False, 'desc': '', 'info': {}, 'stats': {}} # dump amount of free place (success, info) = CommandDiskFree().run(Paths.var_dir()) if success: disk = { 'free': info['avail'], 'used': info['used'], 'total': info['size'], 'ratio': info['ratio'].replace('%', '') } data['stats']['disk'] = disk # get processes wsicapd daemon processes = [] try: processes = CommandPs(WsUser.name()).run() except Exception as e: data['error'] = True data['desc'] = str(e) # see if wsicapd binary is there and fill the stats found = False for process in processes: if process['path'].find(Paths.bin_dir() + "/wsicapd") != -1: found = True data['info']['path'] = process['path'] data['info']['pid'] = process['pid'] data['info']['user'] = process['user'] data['stats']['cpu_time'] = process['cpu_time'] data['stats']['cpu_usage'] = process['cpu_usage'] data['stats']['mem_size'] = int(process['mem_size']) data['stats']['mem_usage'] = process['mem_usage'] # no wsicapd daemon means something is really bad tell the caller if not found: data['error'] = True data['desc'] = 'the wsicapd daemon is not running' # add the processes anyway data['info']['processes'] = processes # now read the connections connections = [] try: icap_port = str(Network.objects.first().wsicap_port) connections = CommandNetstat().run(icap_port) except Exception as e: pass # add the connections data['info']['connections'] = connections # and store as array return HttpResponse(json.dumps([data], ensure_ascii=True), content_type='application/json')
def run(self): (exit_code, stdout, stderr) = Command.run(self, ["df", "-k", Paths.var_dir()]) if exit_code == 0: line = filter(None, stdout.split('\n')[1:2]) rows = line.split(' ') for row in rows: if row.find('%') != -1: return row return '0%'
def switch_to_sqlite(): # first see if the flag is there flag = os.path.join(Paths.var_dir(), "console", "console", "database.mysql") if os.path.exists(flag): logging.info( "Current system uses MySQL as monitoring database (flag file %s exists)." % flag) logging.info("Removing flag to switch to SQLite...") os.remove(flag) logging.info("Flag removed successfully") # now resetting the SQLite database to default state db_cur = os.path.join(Paths.var_dir(), "db", "monitor.sqlite") db_old = os.path.join(Paths.var_dir(), "db", "monitor.sqlite.old") db_def = os.path.join(Paths.var_dir(), "db", "monitor.sqlite.default") # see if the old database exists if os.path.exists(db_old): # yes, it is better to stop here and ask the user to remove it manually logging.error( "Old version of database backup exists (%s), please remove it manually to continue." % db_old) raise Exception("FAILURE") # move the current one to the old one os.rename(db_cur, db_old) # if case when there is no default database we must generate it if not os.path.exists(db_def): CommandDatabase().run(["--db=sqlite"]) # now copy default to normal one shutil.copy(db_def, db_cur) # and remove the old os.remove(db_old)
def get(self, request, *args, **kwargs): job_name = self.kwargs["name"] job_file = self.kwargs["file"] fullname = os.path.join(Paths.var_dir(), "reports", job_name, job_file) data = None try: with open (fullname, "r") as fin: data = fin.read() except Exception, e: raise
def do_restore(self, unpack_dir, version2, version4): # restore is ONLY possible for the same version if version4 != Build.version(): raise Exception( "Cannot restore, version mismatch: uploaded %s != our %s" % (version4, Build.version())) # we do restore by replacing the database src_db = os.path.join(unpack_dir, "var", "config.sqlite") dst_db = os.path.join(Paths.var_dir(), "db", "config.sqlite") bak_db = os.path.join(Paths.var_dir(), "db", "config.sqlite.backup") if not os.path.isfile(src_db): raise Exception( "Backup database '%s' not found or not accessible!" % src_db) if os.path.isfile(bak_db): os.unlink(bak_db) shutil.copy2(dst_db, bak_db) shutil.move(src_db, dst_db)
def get(self): f = os.path.join(Paths.var_dir(), "spool") d = DefinitionFile() info = { 'adblock': d.get_date(os.path.join(f, "adblock", "current.xml")), 'privacy': d.get_date(os.path.join(f, "privacy", "current.xml")), 'categories': d.get_date(os.path.join(f, "categories", "current.xml")), 'youtube': d.get_date(os.path.join(f, "youtube", "current.xml")), 'iwf': d.get_date(os.path.join(f, "iwf", "current.xml")) } return info
def dump_db(self): output_cur = os.path.join(Paths.var_dir(), "db", "config.dump.json") output_new = output_cur + ".new" if os.path.isfile(output_new): os.unlink(output_new) try: call_command('dumpdata', '--output=%s' % output_new) if os.path.isfile(output_cur): os.unlink(output_cur) shutil.move(output_new, output_cur) except Exception as e: print(str(e))
def get(self, request, *args, **kwargs): job_name = self.kwargs["name"] if not valid_pdf_install(): return HttpResponse( "<body><h2>ReportLab is not installed</h2><p>Please visit <a href=\"https://docs.diladele.com/\">https://docs.diladele.com</a> " + "for installation instructions (for example for " + "<a href='https://docs.diladele.com/administrator_guide_6_1/install/ubuntu16/apache.html'>Ubuntu 16 LTS</a>).</p></body>", content_type="text/html" ) directory = os.path.join(Paths.var_dir(), "reports", job_name) pdfFile = os.path.join(directory, "data", "report.pdf") if os.path.isfile(pdfFile): return self.send_pdf_report(directory, job_name) else: return self.send_pdf_log(directory)
def get(self): users = 0 ips = 0 try: report_json = os.path.join(Paths.var_dir(), "db", "report.json") with open(report_json) as fin: data = json.load(fin) users = data['users'] ips = data['ips'] except Exception as e: pass return max(users, ips)
def get_queryset(self): # we must always sync file system -> table on every get CustomCategorySyncer().sync_from_disk() # construct the list object_list = [] for item in CustomCategory.objects.order_by('name'): dir = os.path.join(Paths.var_dir(), "spool", "categories_custom", item.name) size = FolderInfo(dir).get_size() modified = FileInfo(os.path.join(dir, "domains")).last_modified() object_list.append({ 'name': item.name, 'title': item.title, 'dir': dir, 'size': size, 'modified': modified }) return object_list
def sync_from_disk(self): # we must always sync file system -> table dir = os.path.join(Paths.var_dir(), "spool", "categories_custom") list = [] for sub in os.walk(dir).next()[1]: name = sub path = os.path.join(dir, sub) file = os.path.join(path, "domains") if os.path.exists(file): list.append({'name': name, 'path': path}) # drop existing categories CustomCategory.objects.all().delete() # upload all new for object in list: category = CustomCategory() category.name = object['name'] category.title = object['name'].title() category.description = "%s, size on disk %s" % ( object['path'], FolderInfo(object['path']).get_size()) category.save()
def switch_to_mysql(): # first see if the flag is there flag = os.path.join(Paths.var_dir(), "console", "console", "database.mysql") if not os.path.exists(flag): logging.info( "Current system uses SQLite as monitoring database (flag file %s does not exist)." % flag) logging.info("Adding flag to switch to MySQL...") with open(flag, "w") as fout: fout.write( "Switches local monitoring database to MySQL (connection settings are in /opt/websafety/var/console/console/settings.py)" ) logging.info("Flag added successfully") else: logging.warning( "Current system already uses MySQL as monitoring database (flag file %s exists)." % flag) logging.warning( "Your current monitoring database will be reinitialized") # pass the MySQL settings as command line parameters to the database initializing script args = [ "--db=mysql", "--host=%s" % WEBSAFETY_MONITOR_DB_MYSQL['HOST'], "--port=%d" % int(WEBSAFETY_MONITOR_DB_MYSQL['PORT']), "--user=%s" % WEBSAFETY_MONITOR_DB_MYSQL['USER'], "--pass=%s" % WEBSAFETY_MONITOR_DB_MYSQL['PASSWORD'] ] # and generate CommandDatabase().run(args)
def get(self, request, *args, **kwargs): # construct the form from GET parameters form = BackUpForm(request.GET) # call to populate cleaned data array form.is_valid() # create the temp folder structure folder = os.path.join(Paths.var_dir(), "temp", "websafety_backup") try: shutil.rmtree(folder) except: pass etc_dir = os.path.join(folder, "etc") var_dir = os.path.join(folder, "var") # and recreate os.makedirs(var_dir) # copy db if form.cleaned_data.get('include_sqlite', True): shutil.copy2(os.path.join(Paths.var_dir(), "db", "config.sqlite"), os.path.join(var_dir, "config.sqlite")) shutil.copy2( os.path.join(Paths.var_dir(), "db", "config.dump.json"), os.path.join(var_dir, "config.dump.json")) # construct patters to ignore to_ignore = [] if not form.cleaned_data.get('include_lic', False): to_ignore.append("license.pem") if not form.cleaned_data.get('include_certs', False): to_ignore.extend(["myca.pem", "myca.der"]) if not form.cleaned_data.get('include_ad', False): to_ignore.extend(["krb5.keytab", "krb5.conf", "ldaps.pem"]) shutil.copytree(Paths.etc_dir(), etc_dir, ignore=shutil.ignore_patterns(*to_ignore)) # pack it today = datetime.date.today() arc_name = "websafety_backup_%s_%s" % (Build.version(), today.strftime("%Y_%m_%d")) arc_path = os.path.join(Paths.var_dir(), "temp", arc_name) shutil.make_archive(arc_path, 'zip', folder) # and send it response = None with open(arc_path + ".zip", "rb") as fin: response = HttpResponse(fin.read(), content_type="application/x-compressed") response[ 'Content-Disposition'] = "attachment; filename=\"%s.zip\"" % arc_name # and remove os.unlink(arc_path + ".zip") # and return response return response
def get(): return os.path.join(Paths.var_dir(), "log")