def jwt_init(username, password): """ initialize through the user name and password, write jwt to the local configuration file, the expiration time is about 12 hours, so it is recommended to initialize through the api key. :param username: str, login zoomeye account :param password: str, login zoomeye account password :return: """ file.check_exist(zoomeye_dir) try: zoom = ZoomEye(username=username, password=password) access_token = zoom.login() except Exception: return jwt_file = zoomeye_dir + "/jwt" if access_token: # display the remaining resources of the current account user_data = zoom.resources_info() show.printf("Role: {}".format(user_data["plan"])) show.printf("Quota: {}".format(user_data["resources"].get("search"))) with open(jwt_file, 'w') as f: f.write(access_token) show.printf("successfully initialized", color="green") # change the permission of the configuration file to read-only os.chmod(jwt_file, 0o600) else: show.printf("failed initialized!", color="red")
def get_data(self): """ get user level and IP historical data """ normal_user = ['user', 'developer'] api_key, access_token = file.get_auth_key() zm = ZoomEye(api_key=api_key, access_token=access_token) role = zm.resources_info() # permission restrictions if role["plan"] in normal_user: show.printf( "this function is only open to advanced users and VIP users.", color='red') exit(0) # the user chooses to force data from the API if self.force: history_data = zm.history_ip(self.ip) else: # from local cache get data history_data_str = self.get_data_from_cache() # local cache not exists from API get data if history_data_str is None: history_data = zm.history_ip(self.ip) else: history_data = json.loads(history_data_str) # cache data self.cache_data(history_data) return history_data
def key_init(key): """ initialize through the api key, write the api key to the local configuration file, theoretically it will never expire unless you remake the api key :param key: user input API key :return: """ file.check_exist(zoomeye_dir) key = key.strip() try: zoom = ZoomEye(api_key=key) except Exception: return # api key save path key_file = zoomeye_dir + "/apikey" # display the remaining resources of the current account user_data = zoom.resources_info() show.printf("Role: {}".format(user_data["plan"])) show.printf("Quota: {}".format(user_data["resources"].get("search"))) # save api key with open(key_file, 'w') as f: f.write(key) show.printf("successfully initialized", color="green") # change the permission of the configuration file to read-only os.chmod(key_file, 0o600)
def request_data(self): """ get api data """ api_key, access_token = file.get_auth_key() zm = ZoomEye(api_key=api_key, access_token=access_token) data = zm.dork_search(self.dork) return data
def __init__(self, dork, num, facet=None): self.dork = dork self.num = num self.facet = facet self.dork_data = [] self.facet_data = None self.total = 0 self.api_key, self.access_token = file.get_auth_key() self.zoomeye = ZoomEye(api_key=self.api_key, access_token=self.access_token)
def zoomeye_engine(api_key, ip, count): zm = ZoomEye(api_key=api_key) #API-KEY for 認證 pattern = "cidr:" + ip data = zm.dork_search(pattern) #print(data) num = 1 for datum in data: if (num > count): return format(datum) num += 1
def info(args): """ used to print the current identity of the user and the remaining data quota for the month :param args: :return: """ api_key, access_token = file.get_auth_key() zm = ZoomEye(api_key=api_key, access_token=access_token) # get user information user_data = zm.resources_info() if user_data: # show in the terminal show.printf("Role: {}".format(user_data["plan"])) show.printf("Quota: {}".format(user_data["resources"].get("search")))
def __init__(self, dork, num, resource, facet=None, force=False): self.dork = dork self.num = num self.resource = resource self.facet = facet self.force = force self.dork_data = list() self.facet_data = None self.total = 0 self.api_key, self.access_token = file.get_auth_key() self.zoomeye = ZoomEye(api_key=self.api_key, access_token=self.access_token)
class CliZoomEye: """ the ZoomEye instance in cli mode has added storage compared to the ZoomEye instance of the API, to determine where to get the data, cache or api. By the way, cli mode supports search for "host", but "web" search is not currently supported """ def __init__(self, dork, num, facet=None): self.dork = dork self.num = num self.facet = facet self.dork_data = [] self.facet_data = None self.total = 0 self.api_key, self.access_token = file.get_auth_key() self.zoomeye = ZoomEye(api_key=self.api_key, access_token=self.access_token) def handle_page(self): """ convert the number of data into pages and round up. ex: num = 30, page = 2. because the API returns 20 pieces of data each time. :return: """ num = int(self.num) if num % 20 == 0: return int(num / 20) return int(num / 20) + 1 def auto_cache(self, data, page): """ cache to local :param page: :param data: :return: """ cache = Cache(self.dork, page) cache.save(json.dumps(data)) def from_cache_or_api(self): """ get data from cache or api get data from the api if there is no data in the cache. :return: """ page = self.handle_page() for p in range(page): cache = Cache(self.dork, page=p) # get data from cache file if cache.check(): # return dork, facet, total data dork_data_list, self.facet_data, self.total = cache.load() self.dork_data.extend(dork_data_list) else: # no cache, get data from API self.facet = [ 'app', 'device', 'service', 'os', 'port', 'country', 'city' ] try: dork_data_list = self.zoomeye.dork_search( dork=self.dork, page=p + 1, resource="host", facets=self.facet) except ValueError: print( "the access token expires, please re-run [zoomeye init] command. " "it is recommended to use API KEY for initialization!") exit(0) self.facet_data = self.zoomeye.facet_data self.total = self.zoomeye.total self.dork_data.extend(dork_data_list) if dork_data_list: self.auto_cache(self.zoomeye.raw_data, p) # return dork, facet,total data return self.dork_data[:self.num], self.facet_data, self.total def filter_data(self, keys, data): """ get the data of the corresponding field :param keys: list, user input field :param data: list, zoomeye api data :return: list, ex: [[1,2,3...],[1,2,3...],[1,2,3...]...] """ result = [] for d in data: item = [] zmdict = ZoomEyeDict(d) for key in keys: if fields_tables_host.get(key.strip()) is None: support_fields = ','.join(list(fields_tables_host.keys())) show.printf( "filter command has unsupport fields [{}], support fields has [{}]" .format(key, support_fields), color='red') exit(0) res = zmdict.find(fields_tables_host.get(key.strip())) item.append(res) result.append(item) return result def regexp_data(self, keys): """ filter based on fields entered by the user AND operation on multiple fields :param keys: str , user input filter filed :return: list, ex:[{...}, {...}, {...}...] """ keys = keys.split(",") result = [] self.zoomeye.data_list = self.dork_data[:self.num] data_list = self.zoomeye.data_list for key in keys: result = [] for da in data_list: zmdict = ZoomEyeDict(da) input_key, input_value = key.split("=") if fields_tables_host.get(input_key.strip()) is None: # check filed effectiveness support_fields = ','.join(list(fields_tables_host.keys())) show.printf( "filter command has unsupport fields [{}], support fields has [{}]" .format(input_key, support_fields), color='red') exit(0) # the value obtained here is of type int, and the user's input is of type str, # so it needs to be converted. if input_key == "port": input_value = str(input_value) find_value = zmdict.find( fields_tables_host.get(input_key.strip())) # get the value through regular matching try: regexp_result = re.search(str(input_value), str(find_value), re.I) except re.error: show.printf( 'the regular expression you entered is incorrect, please check!', color='red') exit(0) except Exception as e: show.printf(e, color='red') exit(0) # the matched value is neither None nor empty if regexp_result and regexp_result.group(0) != '': result.append(da) # AND operation data_list = result return result def cli_filter(self, keys, save=False): """ this function is used to filter the results. :param save: :param keys: str, filter condition. ex: 'ip, port, app=xxx' :return: None """ has_equal = [] not_equal = [] # set the ip field to be displayed by default and in the first place key_list = keys.split(',') try: # set ip field in the first place key_index = key_list.index("ip") key_list.pop(key_index) key_list.insert(0, 'ip') # add IP to the first item when there is no IP except ValueError: key_list.insert(0, 'ip') # process user input fields, separating single fields from fields with equal signs. for key in key_list: res = key.split('=') # field with equal sign if len(res) == 2: has_equal.append(key) not_equal.append(res[0]) # No field with equal sign if len(res) == 1: # handle the case where the wildcard character * is included in the filed # that is, query all fields if key == "*": not_equal = list(fields_tables_host.keys()) continue else: not_equal.append(key) # the filter condition is port, banner, app=** # ex:port,banner,app=MySQL if len(has_equal) != 0: equal = ','.join(has_equal) equal_data = self.regexp_data(equal) # the filter condition is app, port # ex: ip,port,app else: equal_data = self.dork_data[:self.num] # get result result = self.filter_data(not_equal, equal_data) equal = ','.join(not_equal) if save: return equal, result show.print_filter(equal, result) def save(self, fields): """ save the data to a local json file, you cannot specify the save path, but you can specify the save data :param fields: str, filter fields, ex: app=xxxx :return: """ # -save default, data format ex: # {"total":xxx, "matches":[{...}, {...}, {...}...], "facets":{{...}, {...}...}} if fields == 'all': filename = "{}_{}_{}.json".format(self.dork, self.num, int(time.time())) data = { 'total': self.total, 'matches': self.dork_data[:self.num], 'facets': self.facet_data } file.write_file(filename, json.dumps(data)) show.printf("save file to {}/{} successful!".format( os.getcwd(), filename), color='green') # -save xx=xxxx, save the filtered data. data format ex: # {app:"xxx", "app":"httpd",.....} else: key, value = self.cli_filter(fields, save=True) filename = "{}_{}_{}.json".format(self.dork, len(value), int(time.time())) # parser data for v in value: dict_save = {} for index in range(len(key.split(','))): dict_key = key.split(',')[index] dict_value = v[index] dict_save[dict_key] = dict_value # write to local file file.add_to_file(filename, str(dict_save)) show.printf("save file to {}/{} successful!".format( os.getcwd(), filename), color='green') def load(self): """ load a local file it must be a json file and the data format is the format exported by zoomeye. format is {"total":123123, "matches":[{...}, {...}, {...}...], "facets":{{...}, {...}...}} :param path: :return: """ data = file.read_file(self.dork) json_data = json.loads(data) self.total = json_data.get("total", 0) self.dork_data = json_data.get("matches", "") self.facet_data = json_data.get("facets", "") if self.total == 0 and self.dork_data == "" and self.facet_data: print("file format error!") exit(0) self.num = len(self.dork_data) return self.dork_data, self.facet_data, self.total def statistics(self, keys): """ perform data aggregation on the currently acquired data instead of directly returning the result of the data aggregation of the API. :param keys: str, user input filter fields {'app': {'Gunicorn': 2, 'nginx': 14, 'Apache httpd': 9, '[unknown]': 3, 'Tornado httpd': 2}, 'port': {443: 29, 8443: 1}} :return: None """ data = {} key_list = keys.split(',') # cycle key for key in key_list: count = {} for item in self.dork_data[:self.num]: zmdict = ZoomEyeDict(item) if stat_host_table.get(key.strip()) is None: # check filed effectiveness support_fields = ','.join(list(stat_host_table.keys())) show.printf( "filter command has unsupport fields [{}], support fields has [{}]" .format(key, support_fields), color='red') exit(0) fields = zmdict.find(stat_host_table.get(key.strip())) # the value of the result field returned by the API may be empty if fields == '': fields = '[unknown]' r = count.get(fields) if not r: count[fields] = 1 else: count[fields] = count[fields] + 1 data[key] = count # print result for current data aggregation show.print_stat(keys, data)
class CliZoomEye: def __init__(self, dork, num, resource, facet=None, force=False): self.dork = dork self.num = num self.resource = resource self.facet = facet self.force = force self.dork_data = list() self.facet_data = None self.total = 0 self.api_key, self.access_token = file.get_auth_key() self.zoomeye = ZoomEye(api_key=self.api_key, access_token=self.access_token) def handle_page(self): try: num = int(self.num) if num % 20 == 0: return int(num / 20) return int(num / 20) + 1 except ValueError: if self.num == 'all': for i in range(3): user_confirm = input( "The data may exceed your quota. " "If the quota is exceeded, all quota data will be returned. " "Are you sure you want to get all the data?(y/N)\n") if user_confirm == 'y': self.zoomeye.dork_search(dork=self.dork, page=1, resource=self.resource, facets=self.facet) self.num = self.zoomeye.total cache = Cache(self.dork, page=1, resource=self.resource) cache.save(json.dumps(self.zoomeye.raw_data)) if self.num % 20 == 0: return int(self.num / 20) return int(self.num / 20) + 1 elif user_confirm == "N": user_num = input( "Please enter the required amount of data:") return int(user_num) else: continue show.printf("more than the number of errors!", color='red') except Exception as e: show.printf(e, color='red') exit(0) def cache_dork(self, page, data): cache_file = Cache(self.dork, page=page, resource=self.resource) cache_file.save(json.dumps(data)) def request_data(self): if os.path.exists(self.dork): self.load() else: page_count = self.handle_page() for page in range(page_count): cache_file = Cache(self.dork, self.resource, page) if cache_file.check() and self.force is False: dork_data_list, self.facet_data, self.total = cache_file.load( ) self.dork_data.extend(dork_data_list) else: if self.resource == 'host': self.facet = [ 'app', 'device', 'service', 'os', 'port', 'country', 'city' ] if self.resource == 'web': self.facet = [ 'webapp', 'component', 'framework', 'frontend', 'server', 'waf', 'os', 'country', 'city' ] try: dork_data_list = self.zoomeye.dork_search( dork=self.dork, page=page + 1, resource=self.resource, facets=self.facet) except ValueError: print( "the access token expires, please re-run [zoomeye init] command." "it is recommended to use API KEY for initialization!" ) exit(0) self.facet_data = self.zoomeye.facet_data self.total = self.zoomeye.total self.dork_data.extend(dork_data_list) self.cache_dork(page, self.zoomeye.raw_data) def default_show(self): self.request_data() if self.resource == 'host': show.show_host_default_data(self.dork_data, self.total) if self.resource == 'web': show.show_web_default_data(self.dork_data, self.total) def filter_data(self, keys, save=False): """ according to web/search or host/search select filter field """ if save is not True: self.request_data() if self.resource == 'host': tables = fields_tables_host if self.resource == 'web': tables = fields_tables_web has_equal = [] not_equal = [] # set the ip field to be displayed by default and in the first place key_list = keys.split(',') try: # set ip field in the first place key_index = key_list.index("ip") key_list.pop(key_index) key_list.insert(0, 'ip') # add IP to the first item when there is no IP except ValueError: key_list.insert(0, 'ip') # process user input fields, separating single fields from fields with equal signs. for key in key_list: res = key.split('=') # field with equal sign if len(res) == 2: has_equal.append(key) # not_equal.append(res[0]) # No field with equal sign if len(res) == 1: # handle the case where the wildcard character * is included in the filed # that is, query all fields if key == "*": not_equal = list(tables.keys()) continue else: not_equal.append(key) # the filter condition is port, banner, app=** # ex:port,banner,app=MySQL if len(has_equal) != 0: equal_data = regexp(has_equal, tables, self.dork_data) # the filter condition is app, port # ex: ip,port,app else: equal_data = self.dork_data # get result result = filter_search_data(not_equal, tables, equal_data) equal = ','.join(not_equal) if save: return equal, result show.print_filter(equal, result[:self.num], has_equal) def facets_data(self, facet, figure): """ according to web/search or host/search select facet field """ self.request_data() if self.resource == 'host': tables = facets_table_host if self.resource == 'web': tables = facets_table_web show.print_facets(facet, self.facet_data, self.total, figure, tables) def save(self, fields): """ save api response raw data """ self.request_data() name = re.findall(r"[a-zA-Z0-9_\u4e00-\u9fa5]+", self.dork) re_name = '_'.join(name) if fields == 'all': filename = "{}_{}_{}_{}.json".format(re_name, self.num, self.resource, int(time.time())) data = { 'total': self.total, 'matches': self.dork_data, 'facets': self.facet_data } with open(filename, 'w') as f: f.write(json.dumps(data)) show.printf("save file to {}/{} successful!".format( os.getcwd(), filename), color='green') # -save xx=xxxx, save the filtered data. data format ex: # {app:"xxx", "app":"httpd",.....} else: key, value = self.filter_data(fields, save=True) filename = "{}_{}_{}.json".format(re_name, len(value), int(time.time())) # parser data for v in value: dict_save = {} for index in range(len(key.split(','))): dict_key = key.split(',')[index] if isinstance(v[index], dict): v[index] = v[index].get('name', 'unknown') dict_value = v[index] dict_save[dict_key] = dict_value # write to local file file.add_to_file(filename, str(dict_save)) show.printf("save file to {}/{} successful!".format( os.getcwd(), filename), color='green') def load(self): """ load local json file,must be save file """ with open(self.dork, 'r') as f: data = f.read() try: json_data = json.loads(data) except json.decoder.JSONDecodeError: show.printf('json format error', color='red') exit(0) self.total = json_data.get("total", 0) self.dork_data = json_data.get("matches", "") self.facet_data = json_data.get("facets", "") self.num = len(self.dork_data) def statistics(self, keys, figure): """ local data statistics """ self.request_data() if self.resource == 'web': tables = stat_web_table if self.resource == 'host': tables = stat_host_table data = {} key_list = keys.split(',') # cycle key for key in key_list: count = {} for item in self.dork_data[:self.num]: zmdict = ZoomEyeDict(item) if tables.get(key.strip()) is None: # check filed effectiveness support_fields = ','.join(list(tables.keys())) show.printf( "filter command has unsupport fields [{}], support fields has [{}]" .format(key, support_fields), color='red') exit(0) fields = zmdict.find(tables.get(key.strip())) # the value of the result field returned by the API may be empty if fields == '' or isinstance(fields, list): fields = '[unknown]' r = count.get(fields) if not r: count[fields] = 1 else: count[fields] = count[fields] + 1 data[key] = count # print result for current data aggregation show.print_stat(keys, data, self.num, figure) def count(self): """ show dock count number """ self.request_data() show.printf(self.total)