Пример #1
0
def get(url):
    if scrapy_configs.configs('debug'):
        logger.info(url)
    req_list = [
        grequests.request("GET",
                          url=url,
                          headers=scrapy_configs.configs('api_headers'),
                          timeout=10)
    ]
    return grequests.imap(req_list, exception_handler=err_handler)
Пример #2
0
    def start_requests(self, spider_name):

        self.allowed_domains = scrapy_configs.configs('allowed_domains',
                                                      spider_name)
        self.start_urls = scrapy_configs.configs('start_urls', spider_name)
        # self.concepts = theme_spider.read_csv()
        # self.text_processing = TextProcessing(scrapy_configs.configs('stopwords_path'),
        #                                       scrapy_configs.configs('dictionary_path'))
        # 所有的股票名称
        self.stocks = self.get_stocks_from_db()
Пример #3
0
def post(url, item):
    if scrapy_configs.configs('debug'):
        # logger.info(api_url)
        logger.info(json.dumps(item, ensure_ascii=False))
    req_list = [
        grequests.request("POST",
                          url=url,
                          data=json.dumps(item),
                          headers=scrapy_configs.configs('api_headers'),
                          timeout=10)
    ]
    return grequests.imap(req_list, exception_handler=err_handler)
Пример #4
0
def err_handler(request, exception):
    if scrapy_configs.configs('debug'):
        logger.error(exception)
Пример #5
0
 def get_stocks_from_db(self):
     """从db中获取所有股票"""
     json_ls = []
     for resp in request_utils.get(scrapy_configs.configs('stocks_path')):
         json_ls = json.loads(resp.content)
     return json_ls.get('data')