def parser_page_fake_data(url): page_data = technews_parser.parser_page(url) appending_dict = {url: page_data} targetfile_read = open( the_path_of_this_file + '/pickle/parser_page/fake_data_pickle', 'r') parser_page_target = pickle.load(targetfile_read) temp_dict = parser_page_target.copy() appending_dict.update(temp_dict) targetfile = open( the_path_of_this_file + '/pickle/parser_page/fake_data_pickle', 'wb') pickle.dump(appending_dict, targetfile) targetfile.close() print('parser_page_fake_data is done')
def parser_page_fake_data(url): page_data = technews_parser.parser_page(url) appending_dict = { url: page_data } targetfile_read = open(the_path_of_this_file + '/pickle/parser_page/fake_data_pickle', 'r') parser_page_target= pickle.load(targetfile_read) temp_dict = parser_page_target.copy() appending_dict.update(temp_dict) targetfile = open(the_path_of_this_file + '/pickle/parser_page/fake_data_pickle', 'wb') pickle.dump(appending_dict, targetfile) targetfile.close() print('parser_page_fake_data is done')
def test_parser_page(self): with patch.object(requests, 'get', side_effect=get_fake_request_parser_page) as requests.get: url = get_test_urls_parser_page(1) result = technews_parser.parser_page(url) target = get_fake_data_parser_page(url) self.assertEqual(result, target)