def generate(self, contents): # debug check assert (len(contents) > 0) # first generate a temporary file in /opt/websafety/etc/node folder (it will be removed after close automatically) prefix = "etc_network_interfaces." folder = os.path.join(Paths.etc_dir(), "node") # write to temp file with tempfile.NamedTemporaryFile(prefix=prefix, dir=folder, delete=True) as temp: temp.write(contents) temp.flush() # call the sudoing binary exe = os.path.join(Paths.bin_dir(), "network_debian.py") arg1 = "--file=%s" % temp.name arg2 = "--system=%s" % System.name() arg3 = "--distrib=%s" % Distrib.name() args = [exe, arg1, arg2, arg3] # and run it (exit_code, stdout, stderr) = CommandElevated().run(args) if exit_code != 0: raise Exception( "Cannot generate network settings. Error: %d, STDOUT: %s, STDERR: %s" % (exit_code, stdout, stderr))
def archive(self): # store zipped version of /opt/websafety/etc to the shared folder zip_dir = os.path.join(Paths.var_dir(), "cluster") if not os.path.exists(zip_dir): os.makedirs(zip_dir) # copy the database backup_db_file = os.path.join(Paths.etc_dir(), "config.sqlite") db_file = os.path.join(Paths.var_dir(), "db", "config.sqlite") try: c = connection.cursor() c.execute("BEGIN IMMEDIATE") shutil.copyfile(db_file, backup_db_file) finally: c.close() connection.rollback() # remove all old configs, only last 10 will stay files = [ f for f in os.listdir(zip_dir) if os.path.isfile(os.path.join(zip_dir, f)) ] for f in files[:-9]: os.remove(os.path.join(zip_dir, f)) # source and target paths etc_dir = Paths.etc_dir() zip_file = os.path.join( zip_dir, "%s-%s.zip" % (str(self.timestamp), str(self.version))) # zip it zipdir(etc_dir, zip_file)
def set(self, value): assert(len(value) > 0) # check the provided timezone indeed exists in the pytz if value not in pytz.all_timezones: raise Exception("Wrong timezone %s (not found pytz.all_timezones)" % value) # save the new timezone into the system exe = os.path.join(Paths.bin_dir(), "timezone.py") arg1 = "--timezone=%s" % value arg2 = "--system=%s" % System.name() arg3 = "--distrib=%s" % Distrib.name() args = [exe, arg1, arg2, arg3] (ret, stdout, stderr) = CommandElevated().run(args) # the system zone is set if return value is 0 if ret == 0: # also generate the timezone.setting file tz_file = os.path.join(Paths.var_dir(), "console", "console", "timezone.setting") with open(tz_file,"w") as fout: fout.write(value) # and return return (ret, stdout, stderr)
def categorize(self, domain): try: name = "categories_checker" if System.WS_WINDOWS == System.name(): name += ".exe" exe = os.path.join(Paths.bin_dir(), name) arg1 = "--definitions=%s" % (os.path.join( Paths.var_dir(), "spool", "categories", "definitions.dat")) arg2 = "--domain=%s" % domain (exit_code, stdout, stderr) = Command().run([exe, arg1, arg2]) if 0 == exit_code: data = stdout.strip() data = data.strip("]") data = data.strip("[") data = data.strip() if len(data) == 0: return [] else: return data.split(':') except Exception as e: pass return []
def collect_license(self): # will write license data to a temp folder folder = os.path.join(Paths.var_dir(), "temp", "license_upload") try: shutil.rmtree(folder) except Exception as e: pass os.makedirs(folder) # collect all chunks into it data = self.request.FILES['file'] name = os.path.join(folder, "license.pem") try: os.unlink(name) except Exception as e: pass with open(name, "wb") as fout: for chunk in data.chunks(): fout.write(chunk) # also copy out the diladele.pem and websafety.pem files into temp folder too shutil.copy(os.path.join(Paths.etc_dir(), "diladele.pem"), os.path.join(folder, "diladele.pem")) shutil.copy(os.path.join(Paths.etc_dir(), "websafety.pem"), os.path.join(folder, "websafety.pem")) # fine, license is there return folder
def get(self, request, *args, **kwargs): # allocate default response data = {'error': False, 'desc': '', 'info': {}, 'stats': {}} # dump amount of free place (success, info) = CommandDiskFree().run(Paths.var_dir()) if success: disk = { 'free': info['avail'], 'used': info['used'], 'total': info['size'], 'ratio': info['ratio'].replace('%', '') } data['stats']['disk'] = disk # get processes wsicapd daemon processes = [] try: processes = CommandPs(WsUser.name()).run() except Exception as e: data['error'] = True data['desc'] = str(e) # see if wsicapd binary is there and fill the stats found = False for process in processes: if process['path'].find(Paths.bin_dir() + "/wsicapd") != -1: found = True data['info']['path'] = process['path'] data['info']['pid'] = process['pid'] data['info']['user'] = process['user'] data['stats']['cpu_time'] = process['cpu_time'] data['stats']['cpu_usage'] = process['cpu_usage'] data['stats']['mem_size'] = int(process['mem_size']) data['stats']['mem_usage'] = process['mem_usage'] # no wsicapd daemon means something is really bad tell the caller if not found: data['error'] = True data['desc'] = 'the wsicapd daemon is not running' # add the processes anyway data['info']['processes'] = processes # now read the connections connections = [] try: icap_port = str(Network.objects.first().wsicap_port) connections = CommandNetstat().run(icap_port) except Exception as e: pass # add the connections data['info']['connections'] = connections # and store as array return HttpResponse(json.dumps([data], ensure_ascii=True), content_type='application/json')
def form_valid(self, form): try: # get realm realm = form.cleaned_data['realm'] # debug check it is fine assert(len(realm) > 0) assert(realm.upper() == realm) # aways update the kerberos conf file self.write_krb5_conf(realm) # update the kerberos keytab file if provided by user self.write_keytab(form.cleaned_data['krb5_spn'], form.cleaned_data.get('keytab', None)) # see what we must do enable = form.cleaned_data['krb5_enable'] if enable: # ok user wants to enable the authenticator, check if everything is in place keytab = os.path.join(Paths.etc_dir(), "krb5.keytab") krb5conf = os.path.join(Paths.etc_dir(), "krb5.conf") if os.path.isfile(krb5conf) and os.path.isfile(keytab): # good everything is fine, proceed pass else: # no-no, we cannot enable! if not os.path.isfile(keytab): errors = form._errors.setdefault("keytab", ErrorList()) errors.append(u"This field is required to enable Kerberos authenticator.") errstr = "Cannot enable Kerberos authenticator. KeyTab file '%s' is not found! Please click on the Browse button and upload the KeyTab from your computer." % keytab else: errstr = "Cannot enable Kerberos authenticator. krb5.conf file '%s' is not found!" % krb5conf # and fail raise Exception(errstr) # if we got here everything is fine (either enabled or disabled successfully) return super(ViewAuthNegotiateSchemeEdit, self).form_valid(form) except Exception as e: form.errors['__all__'] = form.error_class([ "%s\n%s" % (str(e), traceback.format_exc()) ]) # failure return super(ViewAuthNegotiateSchemeEdit, self).form_invalid(form)
def get(self, request, *args, **kwargs): # allocate default response data = { 'error' : False, 'desc' : '', 'info' : {}, 'stats' : {} } # dump amount of free place (success, info) = CommandDiskFree().run(os.path.join(Paths.var_dir(), "monitor")) if success: disk = { 'free' : info['avail'], 'used' : info['used'], 'total' : info['size'], 'ratio' : info['ratio'].replace('%', '') } data['stats']['disk'] = disk # we also need to know how many files to upload data['stats']['queue'] = FolderInfo(os.path.join(Paths.var_dir(), "monitor")).get_size() data['stats']['dbtype'] = 'mysql' data['stats']['dbsize'] = self.get_mysqldb_size() if DATABASES['monitor']['ENGINE'] == 'django.db.backends.sqlite3': data['stats']['dbsize'] = os.path.getsize(DATABASES['monitor']['NAME']) data['stats']['dbtype'] = 'sqlite' # get processes wsmgrd daemon processes = [] try: processes = CommandPs(WsUser.name()).run() except Exception as e: data['error'] = True data['desc'] = str(e) # see if wsmgrd binary is there and fill the stats found = False for process in processes: if process['path'].find(Paths.bin_dir() + "/wsmgrd") != -1: found = True data['info']['path'] = process['path'] data['info']['pid'] = process['pid'] data['info']['user'] = process['user'] data['stats']['cpu_time'] = process['cpu_time'] data['stats']['cpu_usage'] = process['cpu_usage'] data['stats']['mem_size'] = int(process['mem_size']) data['stats']['mem_usage'] = process['mem_usage'] # no wsmgrd daemon means something is really bad tell the caller if not found: data['error'] = True data['desc'] = 'the wsmgrd daemon is not running' # add the processes anyway data['info']['processes'] = processes # and store as array return HttpResponse(json.dumps([data], ensure_ascii=True), content_type='application/json')
def do_reset(self): src_db = os.path.join(Paths.var_dir(), "db", "config.sqlite.default") dst_db = os.path.join(Paths.var_dir(), "db", "config.sqlite") bak_db = os.path.join(Paths.var_dir(), "db", "config.sqlite.backup") if not os.path.isfile(src_db): raise Exception( "Default database '%s' not found or not accessible!" % src_db) if os.path.isfile(bak_db): os.unlink(bak_db) shutil.copy2(dst_db, bak_db) shutil.copy2(src_db, dst_db)
def get_context_data(self, **kwargs): context = super(View_MonitorInfo, self).get_context_data(**kwargs) # set if we use sqlite and how many ips we have is_sqlite = (DATABASES['monitor']['ENGINE'] == 'django.db.backends.sqlite3') # amend context context['is_sqlite'] = is_sqlite context['db_engine'] = DATABASES['monitor']['ENGINE'] if is_sqlite: context['db_size'] = os.path.getsize(DATABASES['monitor']['NAME']) else: # for mysql we ask it directly try: context['db_size'] = 0 cursor = connection.cursor() cursor.execute("SELECT sum(round(((data_length + index_length) / 1024 / 1024 / 1024), 2)) as 'Size in GB' FROM information_schema.TABLES WHERE table_schema = 'websafety_monitor'") row = cursor.fetchone() # here we get error - no such table: information_schema.TABLES why??? context['db_size'] = row[0] except Exception as e: pass context['upload_size'] = FolderInfo(os.path.join(Paths.var_dir(), "monitor")).get_size() return context
def save_certificate(self, cert_contents): # first we try to write the file next to actual one old_pem = os.path.join(Paths.etc_dir(), "ldaps.pem") new_pem = old_pem + ".new" # remove the existing new file(s) which may not even exist try: os.remove(new_pem) except OSError as e: if e.errno != errno.ENOENT: raise # write the new pem file with open(new_pem, 'wb') as fout: fout.write(cert_contents) # run the certmgr that will verify this file - may throw! detector = LdapsDetector() output = detector.dump(new_pem) # now replace the new files if os.path.isfile(old_pem): os.remove(old_pem) os.rename(new_pem, old_pem)
def form_valid(self, form): try: data = self.request.FILES['file'] # first we try to write the file next to actual one old_file = os.path.join(Paths.etc_dir(), "users.htpasswd") new_file = old_file + ".new" # remove the existing new file(s) which may not even exist try: os.remove(new_file) except OSError as e: if e.errno != errno.ENOENT: raise # write the new file with open(new_file, 'wb') as fout: for chunk in data.chunks(): fout.write(chunk) # now replace the new files if os.path.isfile(old_file): os.remove(old_file) os.rename(new_file, old_file) # ok if we got here everything is fine return super(ViewAuthLocalDbImport, self).form_valid(form) except Exception as e: form.errors['__all__'] = form.error_class(["%s" % str(e)]) # failure return super(ViewAuthLocalDbImport, self).form_invalid(form)
def post(self, request, *args, **kwargs): form = ClusterClientForm(request.POST) if form.is_valid(): # ok form is valid, fill the data data = { "enabled" : form.cleaned_data['enabled'], "server_port" : form.cleaned_data['server_port'], "server_address": form.cleaned_data['server_address'], "sync_interval" : form.cleaned_data['sync_interval'] } # and write it w = FileWriter(os.path.join(Paths.etc_dir(), "node")) d = JsonDumper() w.write('cluster_client.json', d.dumps(data)) # mark as needing relstart messages.info(request, "need_squid_restart") return HttpResponseRedirect(self.get_success_url()) return render(request, self.template_name, {'form': form })
def form_valid(self, form): try: # ok the user wants us to reset the cache disk_cache = DiskCache.objects.first() # first we must enable the cache in the database disk_cache.enabled = True disk_cache.save() # now we generate the squid configuration on disk g = Generator( os.path.join(Paths.etc_dir(), "squid") ) g.generate() # and we call special sudoing binary that will stop squid, reset the cache and start squid again SquidCacheInitializer().initialize() # perfect now we have the cache enabled and reinitialized, return nicely return super(ViewCacheDiskReset, self).form_valid(form) except Exception as e: form.errors['__all__'] = form.error_class([ "%s\n%s" % (str(e), traceback.format_exc()) ]) # failure return super(ViewCacheDiskReset, self).form_invalid(form)
def install_license(self): # collect the license into a temporary folder folder = self.collect_license() # protect against users loading key instead of pem if os.path.getsize(os.path.join(folder, "license.pem")) < 255: raise Exception( "You might be uploading a 4.2 style license.key file. This version requires license.pem instead. Contact [email protected] to get it." ) # check if the license is fine try: license = CommandLicense().run(folder) if license['valid'] != "1": message = license["error"] if license["error"].find("local issuer certificate") != -1: message += ". You might be trying to upload a valid license key but for Web Safety version 5. Such keys are not supported. Please contact [email protected] to convert your valid license key to a new format." raise Exception(message) except Exception as e: raise Exception("License key is invalid, error: %s" % str(e)) # if we got here, then the license key is fine, replace the current one shutil.copy(os.path.join(folder, "license.pem"), os.path.join(Paths.etc_dir(), "license.pem")) # good, now if the license is community, activate/deactivate web safety self.activate_safety()
def get(self, request, *args, **kwargs): path = os.path.join(Paths.etc_dir(), "wpad.dat") with open(path, "rb") as fin: response = HttpResponse( fin.read(), content_type="application/x-ns-proxy-autoconfig") return response
def get_settings(self): name = os.path.join(Paths.etc_dir(), "system.json") with open(name) as fin: data = json.load(fin) return data
def collect_zip(self): # will write zip data to a temp folder folder = os.path.join(Paths.var_dir(), "temp", "websafety_restore") try: shutil.rmtree(folder) except: pass os.makedirs(folder) # collect all chunks into it data = self.request.FILES['file'] name = os.path.join(folder, "websafety_backup.zip") try: os.unlink(name) except: pass with open(name, "wb") as fout: for chunk in data.chunks(): fout.write(chunk) # fine, zip is there return name
def full_path(): name = "ldap" if System.name() == System.WS_WINDOWS: name += ".exe" return os.path.join(Paths.bin_dir(), name)
def test_ldap(self, object): # here we have django model auth domain updated, but we also need to generate JSON files in /opt/websafety/etc g = Generator(os.path.join(Paths.etc_dir(), "squid")) w = FileWriter(os.path.join(Paths.etc_dir(), "squid")) d = JsonDumper() g.generate_auth(w, d) # and run the tests result = LdapTester().run(); if result['exit_code'] == 0: return (True, "%s %s" % (result['stdout'], result['stderr'])) else: return (False, "Exit code: %s\nSTDOUT: %s\nSTDERR: %s\n" % (str(result['exit_code']), result['stdout'], result['stderr'])) pass
def get(self, request, *args, **kwargs): path = os.path.join(Paths.etc_dir(), "ldaps.pem") with open(path, "rb") as fin: response = HttpResponse(fin.read(), content_type="application/x-x509-ca-cert") response['Content-Disposition'] = "attachment; filename=\"%s\"" % "ldaps.pem" return response
def get(self): path = os.path.join(Paths.var_dir(), "log", "wssyncd.log") try: return FileReader(path).read() except Exception as e: return "%s does not exist yet or is not accessible, error - %s" % (path, str(e))
def get(self, request, *args, **kwargs): # construct the path path = os.path.join(Paths.etc_dir(), "blocked_safe_browsing.html") # run it all in the exception block try: # get the file with open(path, "r") as fin: contents = fin.read() # render everything templ = Template(contents) context = { "VERSION" : Build.version(), "URI" : request.GET.get('url', ''), "RESULT_INFO" : request.GET.get('reason', '') } # and return response response = HttpResponse(templ.render(Context(context)), content_type="text/html") return response except Exception as e: return HttpResponse(str(e), content_type="text/html")
def generate_linux_debian_manual(self, manual): # construct file names cur_file = os.path.join(Paths.etc_dir(), "node", "etc_network_interfaces.manual") bak_file = cur_file + ".bak" new_file = cur_file + ".new" # clear the environment if os.path.exists(bak_file): os.unlink(bak_file) if os.path.exists(new_file): os.unlink(new_file) # always generate manual file (even if manual is empty) t = loader.get_template("node/network/etc_network_interfaces.manual") m = manual.replace("\r\n", "\n") c = {"manual": m} # the contents contents = t.render(c) # write data into new file with open(new_file, "w") as fout: fout.write(contents) fout.flush() # move files around if os.path.exists(cur_file): shutil.move(cur_file, bak_file) shutil.move(new_file, cur_file)
def __init__(self, dc_addr, port, use_ldaps, base_dn, bind_user, bind_pass): # set size limit self.size_limit = 150 # we do not support referrals ldap.set_option(ldap.OPT_REFERRALS, 0) ldap.set_option(ldap.OPT_SIZELIMIT, self.size_limit) # set schema schema = "ldap" if use_ldaps: schema = "ldaps" # it is ldapS so set the path to the trusted cert ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, os.path.join(Paths.etc_dir(), "ldaps.pem")) # disable certificate checking only on windows to ease development if System.name() == System.WS_WINDOWS: ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) # construct uri uri = "%s://%s:%d" % (schema, dc_addr, port) # init LDAP and bind to it self.conn = ldap.initialize(uri) # bind to LDAP server self.conn.simple_bind_s(bind_user, bind_pass) # and save the base dn self.base_dn = base_dn
def get_context_data(self, **kwargs): context = super(ViewAuthNegotiateSchemeEdit, self).get_context_data(**kwargs) # this is the path to keytab keytab_path = os.path.join(Paths.etc_dir(), "krb5.keytab") # dump contents of keytab try: context['keytab'] = KeyTabDumper().dump(keytab_path) except Exception as e: context['keytab'] = str(e) # verify keytab is valid for spn (result, output) = KeyTabInitializer().initialize(keytab_path, self.object.krb5_spn) context['kinit_result'] = result context['kinit_output'] = output # read krb5.conf try: path = os.path.join(Paths.etc_dir(), "krb5.conf") with open(path, "r") as fin: context['krb5conf'] = fin.read() except Exception as e: context['krb5conf'] = "Error reading %s file: %s" % (path, str(e)) # construct some params use_ldaps = False if self.object.lookup_mode in (AuthAd.LOOKUP_MODE_LDAP, AuthAd.LOOKUP_MODE_GC): use_ldaps = False if self.object.lookup_mode in (AuthAd.LOOKUP_MODE_LDAPS, AuthAd.LOOKUP_MODE_GCS): use_ldaps = True # check kvno (result, output) = KvnoChecker( self.object.dc1addr, self.object.lookup_mode, use_ldaps, self.object.base_dn, self.object.bind_user, self.object.bind_pass ).check(keytab_path, self.object.krb5_spn) context['kvno_result'] = result context['kvno_output'] = output return context
def __init__(self, server1, port1, server2, port2, token): self.exe = os.path.join(Paths.bin_dir(), "inspector_auth.py") self.server1 = server1 self.port1 = str(port1) self.server2 = server2 self.port2 = str(port2) self.token = token
def get(self, request, *args, **kwargs): path = os.path.join(Paths.etc_dir(), "users.htpasswd") with open(path, "rb") as fin: response = HttpResponse(fin.read(), content_type="application/octet-stream") response[ 'Content-Disposition'] = "attachment; filename=\"%s\"" % "users.htpasswd" return response
def run(self): (exit_code, stdout, stderr) = Command.run(self, ["df", "-k", Paths.var_dir()]) if exit_code == 0: line = filter(None, stdout.split('\n')[1:2]) rows = line.split(' ') for row in rows: if row.find('%') != -1: return row return '0%'
def dump_raw(self, cacert): cacert_path = os.path.join(Paths.etc_dir(), cacert) if not os.path.isfile(cacert_path): raise Exception("File %s does not exist or is not accessible!" % cacert_path) with open(cacert_path, 'r') as fin: return fin.read()