def b_c_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/gjjbj/gjjQueryCreditAction!xj_biangengFrame.dhtml' self.params["regno"] = self.regno self.params["urlflag"] = '5' self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') key_list = [ 'xuhao', 'reason', 'before_change', 'after_change', 'date_to_change' ] soup = page_soup.find('div', {'class': 'qyqx-detail'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_b_c.extend(info)
def black_info_execute(self, page_soup): url = self.url.split('&')[0] + '&tabPanel=05' headers = { 'Host': 'gsxt.ynaic.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36'), 'Referer': 'http://gsxt.ynaic.gov.cn/notice/search/ent_info_list' } resp = url_requests.get(url, headers=headers) page_soup = BeautifulSoup(resp.content, 'lxml') key_list = [ 'xuhao', 'reason_in', 'date_in', 'reason_out', 'date_out', 'authority' ] info = CreditInfo.parse(page_soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_adm_punishment.extend(info)
def pledge_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/gdczdj/gdczdjAction!gdczdjFrame.dhtml' # self.params["regno"] = self.regno self.params["urltag"] = '13' # self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') key_list = [ 'xuhao', 'reg_code', 'pleder', 'id_card', 'plede_amount', 'brower', 'brower_id_card', 'reg_date', 'status', 'gongshiriqi', 'changes' ] soup = page_soup.find('div', {'class': 'qyqx-detail'}) # print soup if soup: info = CreditInfo.parse(soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_pledge.extend(info)
def abnormal_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/gsgs/gsxzcfAction!list_jyycxx.dhtml' self.params["regno"] = self.regno self.params["urlflag"] = '8' headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') # print page_soup key_list = [ 'xuhao', 'reason', 'date_occurred', 'authority_occurred', 'reason_out', 'date_out', 'authority' ] info = CreditInfo.parse(page_soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_adm_punishment.extend(info)
def adm_punishment_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn//gdgq/gdgqAction!xj_qyxzcfFrame.dhtml' # self.params["regno"] = self.regno self.params["urltag"] = '14' # self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') key_list = [ 'xuhao', 'pun_number', 'reason', 'fines', 'authority', 'pun_date', 'gongshiriqi', 'detail' ] soup = page_soup.find('div', {'class': 'qyqx-detail'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_adm_punishment.extend(info)
def spot_check_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/gsgs/gsxzcfAction!xj_list_ccjcxx.dhtml' # self.params["regno"] = self.regno self.params["urltag"] = '10' self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') soup = page_soup.find('div', {'class': 'qyqx-detail'}) if soup: key_list = [ 'xuhao', 'authority', 'spot_type', 'spot_date', 'spot_result' ] info = CreditInfo.parse(soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_spot_check.extend(info)
def stockholder_change_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/newChange/newChangeAction!getTabForNB_new.dhtml' # self.params["regno"] = self.regno self.params["urltag"] = '15' self.params["flag_num"] = '2' # self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') key_list = [ 'xuhao', 'person', 'stock', 'person_get', 'court', 'detail' ] soup = page_soup.find('div', {'id': 'layout-06_02_01'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_stockholder_change.extend(info)
def mortgage_basic_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/gjjbjTab/gjjTabQueryCreditAction!dcdyFrame.dhtml' # self.params["regno"] = self.regno self.params["urltag"] = '12' # self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') soup = page_soup.find('div', {'class': 'qyqx-detail'}) # print soup if soup: key_list = [ 'xuhao', 'mortgage_reg_num', 'date_reg', 'authority', 'amount', 'status', 'gsrq', 'xiangqing' ] info = CreditInfo.parse(soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_mortgage_basic.extend(info)
def change_detail(url, params, cookies): """对于变更信息中查看变更前后资料的处理 :param url: :param params: :param cookies: :return: """ headers = { 'User-Agent':('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.71 Safari/537.36'), 'Host':'gsxt.gdgs.gov.cn', } html = url_requests.get(url, params=params, headers=headers, cookies=cookies).text soup = BeautifulSoup(html, 'lxml') key_list = ['holderName', 'idCard'] info = CreditInfo.parse(soup, name='div', attrs={'id': 'jibenxinxi'}, key_list=key_list) info = str(info).decode('utf-8') return info
def b_c_execute(self, page_soup): key_list = ['reason', 'before_change', 'after_change', 'date_to_change'] soup = page_soup.find('div', {'id': 'layout-01_01_03'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_b_c.extend(info)
def s_h_execute(self, page_soup): key_list = ['s_h_name', 's_h_type', 's_h_id_type', 's_h_id', 'detail'] soup = page_soup.find('div', {'id': 'layout-01_01_02'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_s_h.extend(info)
def spot_check_execute(self, page_soup): soup = page_soup.find('div', {'id': 'layout-01_08_01'}) if soup: key_list = ['xuhao', 'authority', 'spot_type', 'spot_date', 'spot_result'] info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_spot_check.extend(info)
def stock_freeze_execute(self, page_soup): key_list = ['xuhao', 'person', 'stock', 'court', 'notice_number', 'status', 'detail'] soup = page_soup.find('div', {'id': 'layout-06_01_01'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_stock_freeze.extend(info)
def stockholder_change_execute(self, page_soup): key_list = ['xuhao', 'person', 'stock', 'person_get', 'court', 'detail'] soup = page_soup.find('div', {'id': 'layout-06_02_01'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_stockholder_change.extend(info)
def mortgage_basic_execute(self, page_soup): soup = page_soup.find('div', {'id': 'layout-01_04_01'}) if soup: key_list = ['xuhao', 'mortgage_reg_num', 'date_reg', 'authority', 'amount', 'status', 'gsrq', 'xiangqing'] info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_mortgage_basic.extend(info)
def pledge_execute(self, page_soup): key_list = ['xuhao', 'reg_code', 'pleder', 'id_card', 'plede_amount', 'brower', 'brower_id_card', 'reg_date', 'status', 'gongshiriqi', 'changes'] soup = page_soup.find('div', {'id': 'layout-01_03_01'}) if soup: info = CreditInfo.parse(soup, 'table', {'class': 'tableG'}, key_list=key_list) self.qyxx_pledge.extend(info)
def s_h_execute(self, page_soup): url = 'http://gx.gsxt.gov.cn/gjjbj/gjjQueryCreditAction!touzirenInfo.dhtml' self.params["regno"] = self.regno self.params["urltag"] = '2' self.params["ent_id"] = self.params["entId"] headers = { 'Host': 'gx.gsxt.gov.cn', 'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/54.0.2840.99 Safari/537.36') } resp = url_requests.get(url, headers=headers, params=self.params, proxies=proxies) page_soup = BeautifulSoup(resp.content, 'lxml') soup = page_soup.find('div', {'class': 'qyqx-detail'}) if soup: try: key_list = [ 'xuhao', 's_h_name', 's_h_type', 's_h_id_type', 's_h_id', 'detail' ] info = CreditInfo.parse(page_soup, 'table', {'class': 'table-result'}, key_list=key_list) except: key_list = [ 'xuhao', 's_h_name', 's_h_type', 's_h_id_type', 's_h_id' ] info = CreditInfo.parse(page_soup, 'table', {'class': 'table-result'}, key_list=key_list) self.qyxx_s_h.extend(info)