def get_url_info_v1(host, path, query_string): """ Get the url info from the Redis backend, if it is malware or not :param host: suspected url host :param path: path with suspected url :param query_string: Queury string related to suspected url :return: Dict of suspected url with check if it is malware or not """ status = {'host': host, 'path': path, 'status': ''} if len(str(query_string)) != 0: status['query'] = query_string.decode("utf-8") r_client = RedisClient() malware_content = r_client.get(key=host) if malware_content is not None: if malware_content != "ERROR": status['stored_info'] = malware_content.decode("utf-8") status['status'] = 'blacklisted' else: return None else: status['status'] = 'whitelisted' return status
def _get_license_info(): redis_client = RedisClient() license_sig = redis_client.get(lc_check.redis_key_license_signature, is_json=False) if license_sig is not None and license_sig != "": license_info = utils.check_license_signature_valid( license_sig, CERT_PATH + LICENSE_CERT) if license_info != {} and license_info[ "deviceId"] == utils.get_device_uuid(): products = [] for product in license_info["products"]: product_id = product["product"] products.append(license_product[product_id - 1]) return license_info["licenseId"], products else: return "本机license不合法,license is: " + license_info["licenseId"], [] else: return "本机并未指定license", []
def __getitem__(self, index): """ Args: index (int): Index Returns: tuple: (image, target) where target is index of the target class. """ rdb = RedisClient() d = rdb.get(self.db_key)[index] img, target = d[0], int(d[1]) # doing this so that it is consistent with all other datasets # to return a PIL Image img = Image.fromarray(img, mode='L') if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) return img, target
def __len__(self): rdb = RedisClient() return len(rdb.get(self.db_key))
def _backup_appliance(tmp_folder, whether_backup_network=False, whether_backup_forensics=False): version = utils.app_conf_get("device_base_info")["version"] # Get current time tnow = int(time.time()) tnowst = time.localtime(tnow) timestamp = time.strftime('%Y%m%d%H%M%S', tnowst) current_path = os.getcwd() os.chdir(tmp_folder) app_filename_prefix = "Appliance" + "-" + version + "-" + timestamp device_type = utils.get_device_type() backup_content = {} agent_client = AGENT_CLIENT(AGENT_ADDR, AGENT_PORT) if device_type == 2 or device_type == 4 or device_type == 6 or device_type == 8: if device_type == 2 or device_type == 6: # get device_work_mode info work_mode = utils.app_conf_get("device_work_mode") device_work_mode = {} backup_content["device_work_mode"] = work_mode if work_mode == 1: #backup netObject and globalbypass for pcap mode monitor_mode = utils.get_monitor_node() if monitor_mode == "pcap": global_bypass = utils.app_conf_get("globalbypass") if global_bypass != {}: backup_content["globalbypass"] = global_bypass # get protocol settings protocol_settings = utils.app_conf_get("protocol") if protocol_settings != {}: backup_content["protocol"] = protocol_settings # get block_pages setting block_page_settings = BlockPageSettings().get_settings() backup_content["block_page_settings"] = block_page_settings["data"] # get SNMP info try: res_json = json.loads(agent_client.get_SNMP_settings()) except httplib.HTTPException: agent_client = AGENT_CLIENT(AGENT_ADDR, AGENT_PORT) res_json = json.loads(agent_client.get_SNMP_settings()) backup_content["snmp_settings"] = res_json["snmp_settings"] if device_type == 1: # get device_time_info try: res_json = json.loads(agent_client.get_device_time()) except httplib.HTTPException: agent_client = AGENT_CLIENT(AGENT_ADDR, AGENT_PORT) res_json = json.loads(agent_client.get_device_time()) backup_content["device_time_info"] = res_json["device_time_info"] # get proxy_settings try: res_json = json.loads(agent_client.get_proxy_settings()) except httplib.HTTPException: agent_client = AGENT_CLIENT(AGENT_ADDR, AGENT_PORT) res_json = json.loads(agent_client.get_proxy_settings()) if res_json.has_key("proxyserver"): del res_json["responseCode"] del res_json["message"] backup_content["proxy_settings"] = res_json # get backup_settings try: res_json = json.loads(agent_client.get_backup_settings()) except httplib.HTTPException: agent_client = AGENT_CLIENT(AGENT_ADDR, AGENT_PORT) res_json = json.loads(agent_client.get_backup_settings()) if res_json.has_key("locationType"): del res_json["responseCode"] del res_json["message"] backup_content["backup_settings"] = res_json # get hostname and desc hostname = utils.app_conf_get("device_base_info")["hostname"] if utils.app_conf_get("device_base_info").has_key("desc"): desc = utils.app_conf_get("device_base_info")["desc"] else: desc = "" backup_content["device_base_info"] = {"hostname": hostname, "desc": desc} # get forensics storage backup_content["forensics_storage"] = { "capacity": forensics_storage.get_forensics_capacity() } #get exception list backup_content["exception"] = utils.app_conf_get("exception") #get synctime setting redis_client = RedisClient() if redis_client.exist_key("synctime_schedule_task"): synctime_schedule_task = redis_client.hgetall("synctime_schedule_task") if synctime_schedule_task is not None: backup_content["synctime_schedule_task"] = synctime_schedule_task #get backup schedule setting redis_client = RedisClient() if redis_client.exist_key("backup_schedule_task"): backup_schedule_task = redis_client.hgetall("backup_schedule_task") if backup_schedule_task is not None: backup_content["backup_schedule_task"] = backup_schedule_task #get ts setting redis_client = RedisClient() if redis_client.exist_key("ts"): ts_res = redis_client.get("ts") if ts_res is not None: backup_content["ts_account_info"] = ts_res with open(app_filename_prefix + ".conf", "w") as bk_file: json.dump(backup_content, bk_file, indent=4) bk_file.close() if device_type == 2 or device_type == 4 or device_type == 6 or device_type == 8: bk_file_list = "" for bk_file in app_backup_filelist: if (device_type == 4 or device_type == 8) and "ebtables" in bk_file: continue shutil.copy(bk_file, "./") bk_file_list += " %s" % os.path.basename(bk_file) # backup block pages blockpage_zip_files = BlockPageSettings().backup_blockpage_dir() for bk_file in blockpage_zip_files: shutil.copy(bk_file, "./") bk_file_list += " %s" % os.path.basename(bk_file) bk_file_list += " " + app_filename_prefix + ".conf" else: bk_file_list = app_filename_prefix + ".conf" # get networking files if whether_backup_network: networking_list = _get_backup_networking_files(tmp_folder) for bk_file in networking_list: bk_file_list += " %s" % os.path.basename(bk_file) # backup hybrid.conf if exist if os.path.isfile("/opt/skyguard/www/app/hybrid.conf"): shutil.copy("/opt/skyguard/www/app/hybrid.conf", "./") bk_file_list += " hybrid.conf" if device_type == 2: for bro_cfg in bro_backup_filelist: if os.path.isfile(bro_cfg): shutil.copy(bro_cfg, "./") bk_file_list += " %s" % os.path.basename(bro_cfg) # swg pacp mode if device_type == 6: if "2.2" in version: pass else: for bro_cfg in bro_backup_filelist_swg: if os.path.isfile(bro_cfg): shutil.copy(bro_cfg, "./") bk_file_list += " %s" % os.path.basename(bro_cfg) #backup collect log if os.path.exists(collect_log_dir): filelist = os.listdir(collect_log_dir) if filelist != []: if not os.path.exists("collect_log"): os.mkdir("collect_log") FileUtil.copyFilesToDir(collect_log_dir, "collect_log/") bk_file_list += " %s" % ("collect_log/") # Backup hybrid settings if device_type == 4 or device_type == 8: shutil.copy("/opt/skyguard/www/app/gre_info.conf", "./") bk_file_list += " gre_info.conf" ''' agent_client = AGENT_CLIENT(AGENT_ADDR,AGENT_PORT) network_settings = agent_client.get_hybrid_config() ucss_external_ip = utils.get_ucss_address() # get ucss internal ip from iptables (ret, output) = utils.app_command("iptables-save | grep %s" % ucss_external_ip) if ret == 0: kv = [word.split() for word in output[0].split()] if "-d" in kv: ucss_internal_ip = kv[kv.index("-d") + 1].split("/")[0] # get ucsg public ip public_ip = utils.get_ip_address("eth1") hybrid_settings = {"network_setting" : json.dumps(network_settings), "ucss_external_ip" : ucss_external_ip, "ucss_internal_ip" : ucss_internal_ip, "public_ip" : public_ip} with open("hybrid_settings.conf", "w") as hybrid_conf: json.dump(hybrid_settings, hybrid_conf, indent=4) ''' # Backup #backup forensics whether_backup_forensics = 0 if whether_backup_forensics: if device_type != 1: if os.path.exists(forensics_dir): filelist = os.listdir(forensics_dir) if filelist != []: os.mkdir("forensics") for f in filelist: shutil.copy(forensics_dir + f, "forensics/") # generate backup tgz utils.app_command_quiet("tar cfvz %s %s" % (app_filename_prefix + ".tar.gz", bk_file_list)) # remove tmp files os.unlink(app_filename_prefix + ".conf") if os.path.isfile("hybrid.conf"): os.unlink("hybrid.conf") if os.path.isfile("gre_info.conf"): os.unlink("gre_info.conf") if device_type == 2 or device_type == 4 or device_type == 6 or device_type == 8: for bk_file in app_backup_filelist: if (device_type == 4 or device_type == 8) and "ebtables" in bk_file: continue os.unlink(os.path.basename(bk_file)) BlockPageSettings().delete_backup_zip() for bk_file in blockpage_zip_files: os.unlink(os.path.basename(bk_file)) if device_type == 2: for bro_cfg in bro_backup_filelist: if os.path.isfile(os.path.basename(bro_cfg)): os.unlink(os.path.basename(bro_cfg)) if device_type == 6: for bro_cfg in bro_backup_filelist_swg: if os.path.isfile(os.path.basename(bro_cfg)): os.unlink(os.path.basename(bro_cfg)) if whether_backup_network: _remove_backup_networking_files(tmp_folder) os.chdir(current_path) return app_filename_prefix + ".tar.gz"
class Generator(): def __init__(self, hostname): """ connect redis get cookies map and username map and init browser(use selenium) """ fd = open("conf/%s_website.json" % hostname, "r") tmp = fd.read() data = json.loads(tmp) self.website = data["website_name"] self.login_url = data["login_url"] self.cookies_db = RedisClient('cookies', self.website) self.users_db = RedisClient('users', self.website) self.users_db.set("15320347357","123456wyq") self.users_db.set("15320343017","123456wyq") chrome_options = webdriver.ChromeOptions() chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--window-size=1980,1980') chrome_options.add_argument('--headless') chrome_options.add_argument('--disable-gpu') self.browser = webdriver.Chrome(chrome_options=chrome_options) self.wait = WebDriverWait(self.browser, 20) def get_cookie_dict(self, cookie): """ return a cookie type is dict """ res = {} for item in cookie: res[item["name"]] = item["value"] return res def open_lianjia(self, username, password): """ open website,input username and password finally click it """ self.browser.get(self.login_url) time.sleep(2) #点击登录 button = self.browser.find_element_by_css_selector("a.btn-login.bounceIn.actLoginBtn") button.click() time.sleep(2) #使用密码账号登录 button = self.browser.find_element_by_css_selector("#con_login_user_tel a.tologin") button.click() username_input = self.wait.until(EC.presence_of_element_located( (By.CSS_SELECTOR, "input.the_input.topSpecial.users") )) password_input = self.wait.until(EC.presence_of_element_located( (By.CSS_SELECTOR, "input.the_input.password") )) loginSubmit = self.wait.until(EC.presence_of_element_located( (By.CSS_SELECTOR, ".li_btn a.login-user-btn") )) #输入账号密码login username_input.send_keys(username) password_input.send_keys(password) loginSubmit.click() time.sleep(3) def open_qfang(self, username, password): """ open linajia,input username and password finally click it """ self.browser.get(self.login_url) time.sleep(2) #点击登录 button = self.browser.find_element_by_css_selector("#noLoginUser .nav-link a") button.click() time.sleep(2) #使用密码账号登录 button = self.browser.find_element_by_css_selector("#loginTbs a:nth-child(2)") button.click() username_input = self.wait.until(EC.presence_of_element_located( (By.ID, "phone") )) password_input = self.wait.until(EC.presence_of_element_located( (By.ID, "password") )) loginSubmit = self.wait.until(EC.presence_of_element_located( (By.ID, "loginSubmit") )) #输入账号密码login username_input.send_keys(username) password_input.send_keys(password) loginSubmit.click() time.sleep(3) def new_cookie_qfang(self, username, password): """ request website,login and get cookie """ self.open_qfang(username, password) #确认是否登录成功 check = self.browser.find_element_by_css_selector("#loginOrUserName a.frontUserName") text = check.text res = {} if text == "我的Q房": res["code"] = 1 res["data"] = self.browser.get_cookies() else: res["code"] = -1 res["data"] = "login failed" return res def new_cookie_lianjia(self, username, password): """ request lianjia,login and get cookie """ self.open_lianjia(username, password) check = self.browser.find_element_by_css_selector(".ti-hover .typeShowUser a:link") res = {} if "1" in check.text: res["code"] = 1 res["data"] = self.browser.get_cookies() else: res["code"] = -1 res["data"] = "login failed" return res def save_cookies(self): """ get all cookies and save """ all_users = self.users_db.all_users() done_users = self.cookies_db.all_users() if len(all_users) == len(done_users): print("No users can get cookie") for user in all_users: if user not in done_users: pw = self.users_db.get(user) print("get cookie user:%s,website:%s..." % (user, self.website) ) if self.website == "qfang": result = self.new_cookie_qfang(user, pw) elif self.website == "lianjia": result =self.new_cookie_lianjia(user, pw) else: print("not support this website") if result["code"] == 1: cookie = self.get_cookie_dict(result["data"]) self.cookies_db.set(user, json.dumps(cookie)) print("save cookie %s succ" % cookie) elif result["code"] == -1: print(result["data"]) self.users_db.delete(user) print("delete account :%s" % user) else: print(result["data"]) def get_cookie(self): """ get_cookie from redis """ cookie = cookies_db.get_cookie() return cookie def close(self): self.browser.quit() os.system('pkill chromedriver')
class RedisCache(object): def __init__(self, address, port): self.address = address self.port = port self.redis_client = RedisClient(self.address, self.port) def cache(self, **options): """ Cache decorator """ def cache_inside(fn, **kwargs): def wrapper(*args, **kwargs): fn_name = fn.__name__ signature_generator = options.get( 'signature_generator', self._get_signature ) if not hasattr(signature_generator, '__call__'): raise TypeError( "signature_generator must be a callable function" ) signature = signature_generator(args, **kwargs) fn_hash = str(hash(fn_name + signature)) cache_request = self.redis_client.get(fn_hash) if cache_request is '': # Cache miss ret = fn(*args, **kwargs) if 'expiration' in options: self.redis_client.setex( fn_hash, ret, options.get('expiration') ) else: self.redis_client.set(fn_hash, ret, **options) else: # Cache hit return cache_request return ret return wrapper return cache_inside def _get_signature(*args, **kwargs): """ Gets the signature of the decorated method :return: arg1,...argn,kwarg1=kwarg1,...kwargn=kwargn """ # Join regular arguments together with commas parsed_args = ",".join(map(lambda x: str(x), args[1])) # Join keyword arguments together with `=` and commas parsed_kwargs = ",".join( map(lambda x: '%s=%s' % (x, str(kwargs[x])), kwargs) ) # Filter out empty params parsed = filter( lambda x: x != '', [parsed_args, parsed_kwargs] ) return ','.join(parsed)