Exemple #1
0
    def default_value(cls):
        dt_now = datetime.datetime.now()
        yesterday = dt_now - datetime.timedelta(days=1)

        to_day = "{0}-{1}-{2}".format(dt_now.year, dt_now.month, dt_now.day)

        yesterday = "{0}-{1}-{2}".format(yesterday.year, yesterday.month,
                                         yesterday.day)

        last_week_start, last_week_end = last_week()
        last_month_start, last_month_end = last_month()

        # SQL里的变量, 需要再加
        """
        SELECT id AS ID, name AS 名称 FROM table WHERE dt_created > {today_start} AND dt_created < {today_end};
        """
        SQL_VARIABLE = {
            "today_start": f"{to_day} 00:00:00",
            "today_end": f"{to_day} 23:59:59",
            "yesterday_start": f"{yesterday} 00:00:00",
            "yesterday_end": f"{yesterday} 23:59:59",
            "last_week_start": f"{last_week_start} 00:00:00",
            "last_week_end": f"{last_week_end} 23:59:59",
            "last_month_start": f"{last_month_start} 00:00:00",
            "last_month_end": f"{last_month_end} 23:59:59",
        }
        return SQL_VARIABLE
Exemple #2
0
def get_curr_fund_value(fund_name_list):
    unit_values = {}
    for fund in fund_name_list:
        fund_id = db.get_fund_id(fund)
        data_dict, last_date = get_mf_data(fund_id, utils.last_month(), utils.today())
        unit_values[fund] = data_dict[max(data_dict)]
    return unit_values
Exemple #3
0
    def get_timeline(cls, proj, typ, tag=None):
        # todo: the f**k, how could u hard code these things.
        prv_obj = cls.get_obj(proj, utils.slashify(typ, "hour_by_month", utils.month_tag(utils.last_month())))
        obj = cls.get_obj(proj, utils.slashify(typ, "hour_by_month", utils.month_tag()))

        def fill_entries(lst, ms, hrs):
            if len(lst) < last_month_hrs:
                lst.extend([str(utils.add_hrs(j, ms)), [0, 0]] for j in range(len(lst), last_month_hrs))
            return lst

        last_month_hrs = utils.last_month().day * 24
        ms = utils.month_start(now=utils.last_month())

        def normalize(res, ms, last_month_hrs):
            for i in res:
                res[i] = fill_entries(res[i], ms, last_month_hrs)
            return res

        if not prv_obj and not obj:
            return {}

        if prv_obj:
            res = normalize(json.loads(prv_obj.stat), ms, last_month_hrs)

        if obj:
            nres = json.loads(obj.stat)
            if not prv_obj:
                res = nres
            else:
                for t in nres:
                    if not t in res:
                        res[t] = fill_entries([], ms, last_month_hrs)
                    res[t].extend(nres[t])

        if tag:
            return res[tag]

        return res
def generate_reference_xls():
    existing_rows = []

    #open the reference.xls file
    global date
    excel_fp = os.path.join(REFERENCE_DIR, 'reference_' + date + '.xls')

    #create the xls file and the read_worksheet if it doesn't yet exist
    if not os.path.exists(excel_fp):
        write_workbook = xlwt.Workbook()
        write_workbook.add_sheet('all')
        write_workbook.save(excel_fp)

    #get the baseline existing_rows from the xls file
    read_workbook = xlrd.open_workbook(excel_fp)
    read_worksheet = read_workbook.sheet_by_name('all')
    rows = read_worksheet.nrows
    for i in range(0, rows):
        existing_rows.append(read_worksheet.row_values(i))
        #existing_rows.sort(comp)
    insert_log(
        LOG_FILE, 'Read ' + format(read_worksheet.nrows, "") +
        ' rows from file ' + excel_fp)

    write_workbook = copy(read_workbook)
    write_worksheet = write_workbook.get_sheet('all')

    #to skip the existing data we need to have the existing_rows of ID in existing_rows
    #we already know the format of existing_rows is [[id1, x, y, z, ...], [id2, x, y, z, ...] ...]
    ids_in_worksheet = []
    for i in existing_rows:
        ids_in_worksheet.append(i[0])

    #get all stock info
    stock_info = tushare.get_stock_basics()
    insert_log(
        LOG_FILE, 'There are ' + format(len(stock_info), "") +
        ' items from tushare.get_stock_basics()')

    count = 0

    for id in stock_info.index:
        count += 1
        insert_log(LOG_FILE, 'processing ' + format(count, "") + ': ' + id)

        if id in ids_in_worksheet:
            insert_log(LOG_FILE, 'Already has ' + id + ' skip it')
            continue

        #test code
        if count > 20:
            break

        month = utils.last_month()
        try:
            history_data = tushare.get_hist_data(
                id,
                start=utils.first_date_of_last_month(),
                retry_count=5,
                pause=1)
            #print history_data.columns
            #print history_data[0:4]
            close_price = history_data[u'close']
            print history_data
        except Exception:
            ##  nothing to handle
            insert_log(LOG_FILE, 'Exception when handling ' + id)
            info = sys.exc_info()
            print(info[0], ':', info[1])
            continue

        continous_up = False
        continous_down = False

        #only need to analyze if we have at least 4 sample
        if (len(close_price) >= 4):
            continous_up = True
            continous_down = True
            for i in range(0, 3):
                if (close_price[i] < close_price[i + 1]):
                    continous_up = False
                    break

            for i in range(0, 3):
                if (close_price[i] > close_price[i + 1]):
                    continous_down = False
                    break

        #row = read_worksheet.nrows
        #read_worksheet.write(row, 0, id)
        try:
            record = []

            date = close_price.keys()[0]

            three_days_ago = 'NA'
            if len(close_price.keys()) >= 4:
                three_days_ago = close_price.keys()[3]

            open_price = history_data[u'open'][0]
            high = history_data[u'high'][0]
            low = history_data[u'low'][0]
            price_change = history_data[u'price_change'][0]
            volume = history_data[u'volume'][0]
            p_change = history_data[u'p_change'][0]
            ma5 = history_data[u'ma5'][0]
            ma10 = history_data[u'ma10'][0]
            ma20 = history_data[u'ma20'][0]
            v_ma5 = history_data[u'v_ma5'][0]
            v_ma10 = history_data[u'v_ma10'][0]
            v_ma20 = history_data[u'v_ma20'][0]
            turnover = history_data[u'turnover'][0]

            trend = ''
            #
            #[id, 3_day_trend, date, open price, close price, high, low, volume, price_change, p_change, ma5, ma10, ma20, v_ma5, v_ma10, v_ma20, turnover]
            #
            if (continous_up):
                trend = 'up'
            elif (continous_down):
                trend = 'down'
            else:
                trend = 'NA'

            record.append(id)
            record.append(trend)
            record.append(date)
            record.append(three_days_ago)
            record.append(open_price)
            record.append(close_price[0])
            record.append(high)
            record.append(low)
            record.append(volume)
            record.append(price_change)
            record.append(p_change)
            record.append(ma5)
            record.append(ma10)
            record.append(ma20)
            record.append(v_ma5)
            record.append(v_ma10)
            record.append(v_ma20)
            record.append(turnover)

            for i in range(len(record)):
                write_worksheet.write(rows, i, record[i])

            rows += 1

            write_workbook.save(excel_fp)
            insert_log(LOG_FILE, 'written to file ' + excel_fp)

        except Exception, e:
            insert_log(LOG_FILE, 'Exception when handling id ' + id)
            info = sys.exc_info()
            print traceback.print_exc()
            continue

        #existing_rows.append([id, trend, date, open_price, close_price[0], high, low, price_change, ma5, ma10, ma20, v_ma5, v_ma10, v_ma20, turnover])
        insert_log(LOG_FILE, id + ' 3 day trend is ' + trend)