Beispiel #1
0
     '001538': '上投摩根科技前沿',
     '161903': '万家行业优选',
     '000336': '农银汇理研究精选'
     }
tm_calculator = TMCalculater()
basic_calculator = ShowCalculater()

Result = namedtuple('Result', [
    'code', 'name', 'start', 'year', 'alpha', 'beta2', 'profit', 'aagr',
    'withdraw'
])
import logging

logger = logging.getLogger(__name__)

init_logger()


def format(results):
    logger.debug(
        "====================================================================================================================="
    )

    logger.debug(
        "| {1:^8} | {2:{0}^15} | {3:^8} | {4:^4} | {5:^6} |  {6:^6} |  {7:^4} |  {8:^4} |  {9:^4} |"
        .format(chr(12288), "Code", "名称", "开始", "年限", "α", "β2", "总收益", "年化收益",
                "最大回撤"))

    for r in results:
        # print(r.code,
        #       r.name,
Beispiel #2
0
                        default=None,
                        help="字类型:基金有两种(info:基本信息|trade:交易数据| manager:基金经理)")
    parser.add_argument('--force',
                        '-f',
                        action='store_true',
                        dest="force",
                        default=False,
                        help="是否再次强制爬取,覆盖已有的")
    parser.add_argument('--num',
                        '-n',
                        type=int,
                        default=999999999,
                        help="爬取条数")
    parser.add_argument('--period', '-p', type=str, default='1d')
    args = parser.parse_args()
    utils.init_logger()

    crawler = None
    if args.type == const.FUND and args.sub_type == const.CRAWLER_INFO:
        crawler = JQDataFundCrawler()
    elif args.type == const.FUND and args.sub_type == const.CRAWLER_TRADE:
        crawler = EastmoneyCrawler()
    elif args.type == const.FUND and args.sub_type == const.CRAWLER_MANAGER:
        logger.info("爬取基金经理信息")
        crawler = EastmoneyFundManagerCrawler()
    elif args.type == const.STOCK:
        crawler = JQDataStockCrawler()
    else:
        logger.error("--data 参数必须是'info|trade'")
        exit()
Beispiel #3
0
    sp500_index_data = data_utils.load_index_data_by_name('标普500')

    # 计算他们之间的相关系数
    cov_calculator.calculate(fund_data_list)

    # 计算和标普500之间的相关系数
    for fund_data, fund in zip(fund_data_list, fund_list):
        merge_dataframe = data_utils.merge_by_date(
            [fund_data, sp500_index_data], [COL_ACCUMULATIVE_NET, 'sp500'])
        coef_value = merge_dataframe.corr().iloc[0, 1]
        logger.info("基金[%s](%s)和标普500的相关系数:%.2f", fund.code, fund.name,
                    coef_value)


def main(args):
    codes = args.codes.split(",")
    fund_list = [data_utils.load_fund(code) for code in codes]
    fund_data_list = [data_utils.load_fund_data(code) for code in codes]
    calculate(fund_data_list, fund_list)


# python -m fund_analysis.projects.etf_sp500_compare --codes 003718,050025
if __name__ == '__main__':
    utils.init_logger(logging.DEBUG)

    parser = argparse.ArgumentParser()
    parser.add_argument('--codes', '-c', type=str, default=None)
    args = parser.parse_args()

    main(args)