Exemplo n.º 1
0
    def b_c_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/gjjbj/gjjQueryCreditAction!xj_biangengFrame.dhtml'
        self.params["regno"] = self.regno
        self.params["urlflag"] = '5'
        self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')
        key_list = [
            'xuhao', 'reason', 'before_change', 'after_change',
            'date_to_change'
        ]
        soup = page_soup.find('div', {'class': 'qyqx-detail'})
        if soup:
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'table-result'},
                                    key_list=key_list)

            self.qyxx_b_c.extend(info)
Exemplo n.º 2
0
    def black_info_execute(self, page_soup):

        url = self.url.split('&')[0] + '&tabPanel=05'

        headers = {
            'Host':
            'gsxt.ynaic.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36'),
            'Referer':
            'http://gsxt.ynaic.gov.cn/notice/search/ent_info_list'
        }

        resp = url_requests.get(url, headers=headers)

        page_soup = BeautifulSoup(resp.content, 'lxml')
        key_list = [
            'xuhao', 'reason_in', 'date_in', 'reason_out', 'date_out',
            'authority'
        ]
        info = CreditInfo.parse(page_soup,
                                'table', {'class': 'tableG'},
                                key_list=key_list)

        self.qyxx_adm_punishment.extend(info)
Exemplo n.º 3
0
    def pledge_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/gdczdj/gdczdjAction!gdczdjFrame.dhtml'
        # self.params["regno"] = self.regno
        self.params["urltag"] = '13'
        # self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')

        key_list = [
            'xuhao', 'reg_code', 'pleder', 'id_card', 'plede_amount', 'brower',
            'brower_id_card', 'reg_date', 'status', 'gongshiriqi', 'changes'
        ]
        soup = page_soup.find('div', {'class': 'qyqx-detail'})
        # print soup
        if soup:
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'table-result'},
                                    key_list=key_list)
            self.qyxx_pledge.extend(info)
Exemplo n.º 4
0
    def abnormal_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/gsgs/gsxzcfAction!list_jyycxx.dhtml'
        self.params["regno"] = self.regno
        self.params["urlflag"] = '8'
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')
        # print page_soup
        key_list = [
            'xuhao', 'reason', 'date_occurred', 'authority_occurred',
            'reason_out', 'date_out', 'authority'
        ]
        info = CreditInfo.parse(page_soup,
                                'table', {'class': 'table-result'},
                                key_list=key_list)

        self.qyxx_adm_punishment.extend(info)
Exemplo n.º 5
0
    def adm_punishment_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn//gdgq/gdgqAction!xj_qyxzcfFrame.dhtml'
        # self.params["regno"] = self.regno
        self.params["urltag"] = '14'
        # self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')

        key_list = [
            'xuhao', 'pun_number', 'reason', 'fines', 'authority', 'pun_date',
            'gongshiriqi', 'detail'
        ]
        soup = page_soup.find('div', {'class': 'qyqx-detail'})
        if soup:
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'table-result'},
                                    key_list=key_list)

            self.qyxx_adm_punishment.extend(info)
Exemplo n.º 6
0
    def spot_check_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/gsgs/gsxzcfAction!xj_list_ccjcxx.dhtml'
        # self.params["regno"] = self.regno
        self.params["urltag"] = '10'
        self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')
        soup = page_soup.find('div', {'class': 'qyqx-detail'})
        if soup:
            key_list = [
                'xuhao', 'authority', 'spot_type', 'spot_date', 'spot_result'
            ]
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'table-result'},
                                    key_list=key_list)

            self.qyxx_spot_check.extend(info)
Exemplo n.º 7
0
    def stockholder_change_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/newChange/newChangeAction!getTabForNB_new.dhtml'
        # self.params["regno"] = self.regno
        self.params["urltag"] = '15'
        self.params["flag_num"] = '2'
        # self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')

        key_list = [
            'xuhao', 'person', 'stock', 'person_get', 'court', 'detail'
        ]
        soup = page_soup.find('div', {'id': 'layout-06_02_01'})
        if soup:
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_stockholder_change.extend(info)
Exemplo n.º 8
0
    def mortgage_basic_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/gjjbjTab/gjjTabQueryCreditAction!dcdyFrame.dhtml'
        # self.params["regno"] = self.regno
        self.params["urltag"] = '12'
        # self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')
        soup = page_soup.find('div', {'class': 'qyqx-detail'})
        # print soup
        if soup:
            key_list = [
                'xuhao', 'mortgage_reg_num', 'date_reg', 'authority', 'amount',
                'status', 'gsrq', 'xiangqing'
            ]
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'table-result'},
                                    key_list=key_list)

            self.qyxx_mortgage_basic.extend(info)
Exemplo n.º 9
0
def change_detail(url, params, cookies):
    """对于变更信息中查看变更前后资料的处理

    :param url:
    :param params:
    :param cookies:
    :return:
    """

    headers = {
        'User-Agent':('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                      'AppleWebKit/537.36 (KHTML, like Gecko) '
                      'Chrome/54.0.2840.71 Safari/537.36'),
        'Host':'gsxt.gdgs.gov.cn',
    }
    html = url_requests.get(url,
                            params=params,
                            headers=headers,
                            cookies=cookies).text

    soup = BeautifulSoup(html, 'lxml')
    key_list = ['holderName', 'idCard']

    info = CreditInfo.parse(soup,
                            name='div', attrs={'id': 'jibenxinxi'},
                            key_list=key_list)
    info = str(info).decode('utf-8')

    return info
Exemplo n.º 10
0
    def b_c_execute(self, page_soup):
        key_list = ['reason', 'before_change',
                    'after_change', 'date_to_change']
        soup = page_soup.find('div', {'id': 'layout-01_01_03'})
        if soup:
            info = CreditInfo.parse(soup, 'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_b_c.extend(info)
Exemplo n.º 11
0
    def s_h_execute(self, page_soup):
        key_list = ['s_h_name', 's_h_type', 's_h_id_type', 's_h_id', 'detail']
        soup = page_soup.find('div', {'id': 'layout-01_01_02'})
        if soup:
            info = CreditInfo.parse(soup,
                                    'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_s_h.extend(info)
Exemplo n.º 12
0
    def spot_check_execute(self, page_soup):

        soup = page_soup.find('div', {'id': 'layout-01_08_01'})
        if soup:
            key_list = ['xuhao', 'authority', 'spot_type', 'spot_date',
                        'spot_result']
            info = CreditInfo.parse(soup, 'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_spot_check.extend(info)
Exemplo n.º 13
0
    def stock_freeze_execute(self, page_soup):

        key_list = ['xuhao', 'person', 'stock', 'court', 'notice_number',
                    'status', 'detail']
        soup = page_soup.find('div', {'id': 'layout-06_01_01'})
        if soup:
            info = CreditInfo.parse(soup, 'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_stock_freeze.extend(info)
Exemplo n.º 14
0
    def stockholder_change_execute(self, page_soup):

        key_list = ['xuhao', 'person', 'stock', 'person_get', 'court',
                    'detail']
        soup = page_soup.find('div', {'id': 'layout-06_02_01'})
        if soup:
            info = CreditInfo.parse(soup, 'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_stockholder_change.extend(info)
Exemplo n.º 15
0
    def mortgage_basic_execute(self, page_soup):

        soup = page_soup.find('div', {'id': 'layout-01_04_01'})
        if soup:
            key_list = ['xuhao', 'mortgage_reg_num', 'date_reg',
                        'authority', 'amount', 'status', 'gsrq', 'xiangqing']

            info = CreditInfo.parse(soup, 'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_mortgage_basic.extend(info)
Exemplo n.º 16
0
    def pledge_execute(self, page_soup):

        key_list = ['xuhao', 'reg_code', 'pleder', 'id_card',
                    'plede_amount', 'brower', 'brower_id_card',
                    'reg_date', 'status', 'gongshiriqi', 'changes']
        soup = page_soup.find('div', {'id': 'layout-01_03_01'})
        if soup:
            info = CreditInfo.parse(soup, 'table', {'class': 'tableG'},
                                    key_list=key_list)

            self.qyxx_pledge.extend(info)
Exemplo n.º 17
0
    def s_h_execute(self, page_soup):
        url = 'http://gx.gsxt.gov.cn/gjjbj/gjjQueryCreditAction!touzirenInfo.dhtml'
        self.params["regno"] = self.regno
        self.params["urltag"] = '2'
        self.params["ent_id"] = self.params["entId"]
        headers = {
            'Host':
            'gx.gsxt.gov.cn',
            'User-Agent': ('Mozilla/5.0 (Windows NT 6.1; WOW64) '
                           'AppleWebKit/537.36 (KHTML, like Gecko) '
                           'Chrome/54.0.2840.99 Safari/537.36')
        }
        resp = url_requests.get(url,
                                headers=headers,
                                params=self.params,
                                proxies=proxies)
        page_soup = BeautifulSoup(resp.content, 'lxml')

        soup = page_soup.find('div', {'class': 'qyqx-detail'})
        if soup:
            try:
                key_list = [
                    'xuhao', 's_h_name', 's_h_type', 's_h_id_type', 's_h_id',
                    'detail'
                ]
                info = CreditInfo.parse(page_soup,
                                        'table', {'class': 'table-result'},
                                        key_list=key_list)
            except:
                key_list = [
                    'xuhao', 's_h_name', 's_h_type', 's_h_id_type', 's_h_id'
                ]
                info = CreditInfo.parse(page_soup,
                                        'table', {'class': 'table-result'},
                                        key_list=key_list)

            self.qyxx_s_h.extend(info)