コード例 #1
0
def install_history_financial_crawler(stock_code):
    baseurl = 'http://quotes.money.163.com'
    headers = {
        'Accept':
        'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language': 'zh-CN,zh;q=0.8',
        'User-Agent': get_user_agent(),
    }
    zcfzb_url = baseurl + '/service/zcfzb_%s.html' % stock_code
    zcfzb_response = requests.get(url=zcfzb_url, headers=headers)
    zcfzb(zcfzb_response, stock_code)

    xjllb_url = baseurl + '/service/xjllb_%s.html' % stock_code
    xjllb_response = requests.get(url=xjllb_url, headers=headers)
    xjllb(xjllb_response, stock_code)

    lrb_url = baseurl + '/service/lrb_%s.html' % stock_code
    lrb_response = requests.get(url=lrb_url, headers=headers)
    lrb(lrb_response, stock_code)

    zycwzb_url = baseurl + '/service/zycwzb_%s.html' % stock_code
    zycwzb_response = requests.get(url=zycwzb_url, headers=headers)
    zycwzb(zycwzb_response, stock_code)

    cwbbzy_url = baseurl + '/service/cwbbzy_%s.html' % stock_code
    cwbbzy_response = requests.get(url=cwbbzy_url, headers=headers)
    cwbbzy(cwbbzy_response, stock_code)

    print('%s save history financial of stock successful...' % stock_code)
コード例 #2
0
def install_history_capttal_crawler(code):
    '''
    爬去到数据后,直接根据stock_code,直接入库
    爬取上交所的股本信息,由于上交所更新过慢,则不用
    :return:
    '''
    capital_headers = {
        'Accept': '*/*',
        'Connection': 'keep-alive',
        'Referer':
        'keep-http://www.sse.com.cn/assortment/stock/list/info/capital/index.shtml',
        'Accept-Encoding': 'gzip, deflate',
        'Accept-Language':
        'zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2',
        'User-Agent': get_user_agent()
    }
    data_dicts = {}
    capital_url = 'http://query.sse.com.cn/security/stock/queryEquityChangeAndReason.do?companyCode={}'.format(
        code)
    data = requests.get(url=capital_url, headers=capital_headers)
    data_dict = json.loads(data.text)
    stock_code = data_dict['companyCode']
    history_capital_stock = data_dict['result']
    data_list = [
        ['realDate', 'changeReasonDesc', 'totalShares'],
    ]
    for i in history_capital_stock:
        data_list.append(
            [i['realDate'], i['changeReasonDesc'], i['totalShares']])
    data_dicts[stock_code] = data_list
    # write07Excel(data_dicts)
    BaseCrawlerDB.save_set_result('lm_stock_data', {'code': stock_code},
                                  'history_capital_stock',
                                  history_capital_stock)
    capttal_xlsx_to_db(code)
    print('%s save history capital of stock successful...' % code)
コード例 #3
0
    Create  : 2018/1/2 下午8:31
    Author  : Richard Chen
    File    : price_data.py
    Software: IntelliJ IDEA
'''
import requests
import json
from common.db_utils.stock_crawler_inner_db import LmInnerReportDBMgr
from conf.requests_useragent import get_user_agent

headers = {
    'Accept':
    'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
    'Accept-Encoding': 'gzip, deflate',
    'Accept-Language': 'zh-CN,zh;q=0.8',
    'User-Agent': get_user_agent(),
}


def is_halt(code):
    url = 'http://mdfm.eastmoney.com/EM_UBG_MinuteApi/Js/Get?style=tail&id={}1&num=1'.format(
        code)
    res = requests.get(url, headers=headers)
    res.encoding = 'utf-8'
    data_json = json.loads(res.text.split('(')[1].split(')')[0])
    if not data_json['result']:
        if data_json['message'] == '暂无数据':
            return 0
        else:
            print('未找到股票代码')
    else: