def _parse_last_cve_results(self, api_data, max_res_counter): """ :param api_data:CVE Last 30 CVE's Rest API Call Result Data to Parsed :param max_res_counter : maximum no of vulnerabilities to return :return:an array of latest CVE's """ tmp_last_cve_data = [] if isinstance(api_data, list): for last_data_dict in api_data: if max_res_counter != 0: # Changing The date format to milli seconds to store in the resilient table search_pub_date = last_data_dict.get('Published') search_pub_date_timestamp = get_gm_epoch_time_stamp( date_string=search_pub_date) last_data_dict['Published'] = search_pub_date_timestamp tmp_last_cve_data.append(last_data_dict) max_res_counter -= 1 else: break return tmp_last_cve_data elif isinstance(api_data, dict): raise NotImplementedError("dictionary datatypes are not supported") else: raise NotImplementedError( "result last cve call datatypes is not recognized")
def _parse_cve_results(self, api_data): """ :param api_data: Specific CVE Rest API Call Result Data to Parsed :return:an array of specific cve data """ tmp_cve_list = [] # Changing The date format to milli seconds to store in the resilient table search_pub_date = api_data.get('Published') search_pub_date_timestamp = get_gm_epoch_time_stamp(date_string=search_pub_date) api_data['Published'] = search_pub_date_timestamp tmp_cve_list.append(api_data) return tmp_cve_list
def _parse_search_results(self, api_data, cve_pub_date_from, cve_pub_date_to, max_res_counter): """ :param api_data:CVE Search Rest API Call Result Data to be Parsed :param cve_pub_date_from: User Given from Published date :param cve_pub_date_to: User Given up to Published date :param max_res_counter : maximum no of vulnerabilities to return :return: Array of search data dictionaries """ search_data = None tmp_search_data = [] if isinstance(api_data, list): search_data = api_data elif isinstance(api_data, dict): search_data = api_data.get('data') else: raise NotImplementedError("Search Data type is not recognized") if search_data is not None: for search_data_dict in search_data: search_pub_date = search_data_dict.get('Published') search_pub_date_timestamp = get_gm_epoch_time_stamp( date_string=search_pub_date) # Changing The date format to milli seconds to store in the resilient table search_data_dict['Published'] = search_pub_date_timestamp if cve_pub_date_from is not None and cve_pub_date_to is not None: if (search_pub_date_timestamp >= cve_pub_date_from) and ( search_pub_date_timestamp <= cve_pub_date_to): if max_res_counter != 0: tmp_search_data.append(search_data_dict) max_res_counter -= 1 elif cve_pub_date_from is not None and cve_pub_date_to is None: if search_pub_date_timestamp >= cve_pub_date_from: if max_res_counter != 0: tmp_search_data.append(search_data_dict) max_res_counter -= 1 elif cve_pub_date_from is None and cve_pub_date_to is not None: if search_pub_date_timestamp <= cve_pub_date_to: if max_res_counter != 0: tmp_search_data.append(search_data_dict) max_res_counter -= 1 else: if max_res_counter != 0: tmp_search_data.append(search_data_dict) max_res_counter -= 1 return tmp_search_data
def test_get_gm_epoch_time_stamp(self): assert cve.get_gm_epoch_time_stamp("2018-09-11") == 1536624000000