Beispiel #1
0
def btconvert(inputfile=None, inputobject=None, outputfile=None, linekey=None):

    if inputobject:
        import collections
        export = collections.OrderedDict(inputobject)
    elif inputfile:
        f = open(inputfile)
        export = json.load(f)
        f.close()
    else:
        raise UnboundLocalError("No input to convert")

    with open(outputfile, "w", newline='') as csv_out:
        writer = csv.writer(csv_out)
        count = 0
        dates = list(reversed(sorted(dates_from_keys(export.keys()))))
        # import pdb; pdb.set_trace()
        for date in dates[::
                          -1]:  # this could be date in dates and reverse here if needed
            if count == 0:
                if linekey:
                    header = [linekey]
                else:
                    header = [
                        'Date', 'Open', 'High', 'Low', 'Close', 'Adj Close',
                        'Volume'
                    ]
                # header.extend(export[line].keys())
                writer.writerow(header)
                count += 1
            row = [date]
            row.extend(list(export[date].values())[0:4])
            row.extend([export[date]['4. close'], export[date]['5. volume']])
            # print(date)
            writer.writerow(row)
Beispiel #2
0
def include_fund_specific_meta_data(fundata):
    if fundata:
        dates = dates_from_keys(fundata.keys())
        if len(dates):
            if too_old(dates[-1]) or not ("meta" in fundata.keys()):
                fundata["meta"] = {}
                fundata["meta"]["start"] = dates[0]
                fundata["meta"]["starting price"] = fundata[
                    dates[0]]["4. close"]
                fundata["meta"]["end"] = dates[-1]
                fundata["meta"]["ending price"] = fundata[
                    dates[-1]]["4. close"]
                fundata["meta"]["number_of_days"] = (
                    parser.parse(dates[-1]) - parser.parse(dates[0])).days
                fundata["meta"]["market_days"] = len(dates)
                fundata["meta"]["return"] = round(
                    (float(fundata["meta"]["ending price"]) -
                     float(fundata["meta"]["starting price"])) /
                    float(fundata["meta"]["starting price"]), 3)
                fundata["meta"]["averaged_annualized_CAGR"] = round(
                    36500 *
                    (calc_cagr(float(fundata["meta"]["starting price"]),
                               float(fundata["meta"]["ending price"]),
                               fundata["meta"]["number_of_days"])), 4)
        else:
            fundata["meta"] = {}
            fundata["meta"]["note"] = "No data in range for "
        return fundata
Beispiel #3
0
def build_6_year_returns(fundata, etf, six_year_returns):
    dates = list(reversed(dates_from_keys(
        fundata.keys())))  # descending order, new to old
    for date in dates:
        if not (date in six_year_returns.keys()):
            six_year_returns[date] = {}
        six_year_returns[date][etf] = {
            "return": fundata[date]["6_year_return"]
        }
    return six_year_returns
Beispiel #4
0
def append_2_wk_change_to_files():
    for etf in fidelity + list(set(symbols) - set(fidelity)):
        fundata = get_fund_price_data(etf)
        dates = dates_from_keys(fundata.keys())
        if len(dates) > 15:
            for i in range(15, len(dates)):
                delta10 = (float(fundata[dates[i]]["4. close"]) -
                           float(fundata[dates[i - 10]]["4. close"])) / float(
                               fundata[dates[i - 10]]["4. close"])
                fundata[dates[i]]["delta10"] = delta10
        write_prices(fundata, etf)
Beispiel #5
0
def get_fund_price_data(symbol):
    filename = pricefile(symbol)
    fundata = read_json(filename)
    if fundata:
        if too_old(dates_from_keys(fundata.keys())[-1]):
            fundata = get_prices(symbol, "compact")  # get data
            write_prices(fundata, symbol)
    else:
        fundata = get_prices(symbol)  # get data
        write_prices(fundata, symbol)
    return fundata
Beispiel #6
0
def get_fund_price_data(etf):
    pricefile = "./json/prices/" + etf + ".json"
    fundprices = read_json(pricefile)
    if fundprices:
        if too_old(dates_from_keys(fundprices.keys())[-1]):
            print("old 62")
            fundata = get_fundata(etf)
            fundprices = build_fundprices(etf, fundata, fundprices)
    else:
        import pdb
        pdb.set_trace()
        fundata = get_fundata(etf)
        fundprices = build_fundprices(etf, fundata)
    return fundprices
Beispiel #7
0
def append_delta1825_and_delta14_to_files():
    for etf in fidelity + list(set(symbols) - set(fidelity)):
        fundata = get_fund_price_data(etf)
        dates = dates_from_keys(fundata.keys())
        if len(dates) > 1839:
            for i in range(1825, len(dates)):
                delta1825 = (float(fundata[dates[i]]["4. close"]) - float(
                    fundata[dates[i - 1825]]["4. close"])) / float(
                        fundata[dates[i - 1825]]["4. close"])
                fundata[dates[i]]["delta1825"] = delta1825
            for i in range(len(dates) - 10):
                delta14 = (float(fundata[dates[i + 10]]["4. close"]) -
                           float(fundata[dates[i]]["4. close"])) / float(
                               fundata[dates[i]]["4. close"])
                fundata[dates[i]]["delta14"] = delta14
        write_prices(fundata, etf)
Beispiel #8
0
def merge_fund_indicator_returns(symbol, strategy, fund_data):
    fundata = {}
    if fund_data:
        dates = dates_from_keys(fund_data.keys())
        # calculate return values by fund using given strategy
        fireturns = calc_returns(fund_data, strategy)
        # merge with api data
        if "meta" in fund_data.keys():
            fundata["meta"] = {**fireturns["meta"], **fund_data["meta"]}
        for date in dates:
            if date in fireturns.keys():
                fundata[date] = {**fireturns[date], **fund_data[date]}
            else:
                fundata[date] = fund_data[date]

    return fundata
Beispiel #9
0
def top_100():
    deltas = []
    for etf in fidelity + list(set(symbols) - set(fidelity)):
        fundata = get_fund_price_data(etf)
        dates = dates_from_keys(fundata.keys())
        period = 5 * 365  # from runs of predict_two_weeks_from_now(); we learned that the jum at 11 years was due to narrowing the field
        if len(dates) > period:
            delta = (float(fundata[dates[period]]["4. close"]) -
                     float(fundata[dates[0]]["4. close"])) / float(
                         fundata[dates[period]]["4. close"])
            deltas.append({"etf": etf, "delta": delta})
    deltas = sorted(deltas, key=lambda x: x["delta"]).reverse()
    convert_list(inputfile=None,
                 inputobject=deltas,
                 outputfile='top_100.csv',
                 linekey=None)
Beispiel #10
0
def all_indicators_only(fun_data):
    if fun_data:
        dates = dates_from_keys(fun_data.keys())
        # remove all the dates that have incomplete data

        for date in sorted(list(fun_data.keys())):
            for indicator in all_the_keys:
                if not (indicator in fun_data[date].keys()):
                    if date in dates:
                        # print (date)
                        # print (indicator)
                        dates.remove(date)

# copy values to a new object using keys that remain
        fundata = {}
        for date in dates:
            fundata[date] = fun_data[date]
        return fundata
Beispiel #11
0
def create_csv_price_files():
    ''' these files get moved to backtrader. maybe link?'''
    # for etf in fidelity + list(set(symbols) - set(fidelity)): #  ["AADR"]: #
    for etf in etfs_to_process:
        #  update fundata files
        fundata = get_fund_price_data(etf)
        #  this ordering may not be necessary
        import collections
        rfundata = collections.OrderedDict(fundata)

        dates = dates_from_keys(fundata.keys())  #  ascending
        for date in dates:
            # print(date)
            rfundata[date] = fundata[date]
        pricefile = "./csv/prices/" + etf + ".txt"
        btconvert(inputfile=None,
                  inputobject=rfundata,
                  outputfile=pricefile,
                  linekey=None)
Beispiel #12
0
def get_fundata(symbol, indicators=[], start=None, end=None):
    filename = "./json/raw/" + symbol + ".json"
    # read from file if there is one; call api if there isn't
    # if symbol == 'ALFA':
    #     import pdb; pdb.set_trace()
    try:
        f = open(filename)
        fundata = json.load(f)
        f.close()
    except Exception as e:
        print(e)
        fundata = no_file(symbol, filename)
    if not fundata:
        fundata = no_file(symbol, filename)
#update if "too old"
    if fundata and too_old(dates_from_keys(fundata.keys())[-1]):
        api_data = call_api(symbol, 'compact')

        # import pdb; pdb.set_trace()
        fundata = build_data_object(symbol, api_data)
        with open(filename, "w") as writeJSON:
            json.dump(fundata, writeJSON)


# toss unwanted dates
    ''' 8/27 I'm pulling out fund_data_in_range  because too often I'm seeing it result 
    in empty files. It will need to be replaced where needed. (after processing)
    and all_indicators_only(fundata) 9/24
    fundata = limit_fundata(fundata)  
    fundata = all_indicators_only(fundata) '''
    # include the symbol name for human readability
    if fundata:
        if "meta" in fundata.keys():
            fundata["meta"]["symbol"] = symbol
    # note says "No data in range for ". should probably be moved into meta?
        else:
            if "note" in fundata.keys():
                fundata["meta"]["note"] = fundata["note"] + symbol
                del (fundata["note"])
        with open(filename, "w") as writeJSON:
            json.dump(fundata, writeJSON)
        return fundata
Beispiel #13
0
def rank_and_value():
    # append_delta1825_and_delta14_to_files()
    # import sys; sys.stdout = open('file.txt', 'w')
    daily_rankings = {}
    summary = {}
    for etf in fidelity + list(
            set(symbols) - set(fidelity)):  #  ['PTH', "QQQ", "FPX", "ONEQ"]:
        # print (etf)
        fundata = get_fund_price_data(etf)
        dates = dates_from_keys(fundata.keys())
        for date in dates:
            if ("delta1825" in fundata[date].keys()
                    and "delta14" in fundata[date].keys()):
                obj = {
                    "etf": etf,
                    "delta1825": fundata[date]["delta1825"],
                    "delta14": fundata[date]["delta14"]
                }
                # print(date, obj)
                # if date == '2014-07-14':
                #     import pdb; pdb.set_trace()
                if not date in daily_rankings.keys():
                    daily_rankings[date] = [obj]
                if daily_rankings[date] == None:
                    daily_rankings[date] = [obj]
                if not obj in daily_rankings[date]:
                    daily_rankings[date].append(obj)
                # print(daily_rankings[date])
    # import pdb; pdb.set_trace()
    for date in daily_rankings:
        lst = daily_rankings[date]
        # sorted(list_of_dict, key=lambda d: d['c'], reverse=True)
        daily_rankings[date] = list(
            reversed(sorted(daily_rankings[date],
                            key=lambda x: x["delta1825"])))


# print(daily_rankings[dates[-1][1]])

    with open("rank.json", "w") as writeJSON:
        json.dump(daily_rankings, writeJSON, sort_keys=True)
Beispiel #14
0
def set_6_year_return(fundata, symbol):
    dates = dates_from_keys(fundata.keys())  # ascending order, old to new
    period = 6 * 261
    for index in range(len(dates) - 1, -1, -1):  # descending order, new to old
        # import pdb; pdb.set_trace()
        '''fundata['2018-07-16'] = 0.0! 
        fundata['2010-07-21'] = dne '''
        if "6_year_return" in fundata[dates[index]].keys():
            return fundata
        if index >= period:
            fundata[dates[index]]["6_year_return"] = (
                (float(fundata[dates[index]]["4. close"]) -
                 float(fundata[dates[index - period]]["4. close"])) /
                float(fundata[dates[index - period]]["4. close"]))
            # now - then / then
        else:  # now - oldest / oldest
            fundata[dates[index]]["6_year_return"] = (
                (float(fundata[dates[index]]["4. close"]) -
                 float(fundata[dates[0]]["4. close"])) /
                float(fundata[dates[0]]["4. close"]))
    write_prices(fundata, symbol)
    return fundata
Beispiel #15
0
def btconvert_with_rank(inputfile=None,
                        inputobject=None,
                        outputfile=None,
                        linekey=None):

    if inputobject:
        export = inputobject
    elif inputfile:
        f = open(inputfile)
        export = json.load(f)
        f.close()
    else:
        raise UnboundLocalError("No input to convert")

    with open(outputfile, "w", newline='') as csv_out:
        writer = csv.writer(csv_out)
        count = 0
        dates = list(sorted(dates_from_keys(
            export.keys())))  # ascending old to new
        # import pdb; pdb.set_trace()
        for index, date in enumerate(dates):
            if index == 0:
                if linekey:
                    header = [linekey]
                else:
                    header = [
                        'Date', 'Open', 'High', 'Low', 'Close', 'Adj Close',
                        'Volume', 'rank2', 'rank20', 'rank130'
                    ]
                writer.writerow(header)
            else:  # we skip the first date since it has no rank value.
                row = [date]
                row.extend(list(export[date].values())[0:4])
                row.extend([
                    export[date]['4. close'], export[date]['5. volume'],
                    export[date]['rank2'], export[date]['rank20'],
                    export[date]['rank130']
                ])
                writer.writerow(row)
Beispiel #16
0
def add_more_indicators():
    # import pdb; pdb.set_trace()
    # marshall api data
    for symbol in etfs_to_process:
        filename = "./json/raw/" + symbol + ".json"
        # read from file if there is one; call api if there isn't
        try:
            f = open(filename)
            fundata = json.load(f)
            f.close()
        except Exception as e:
            print(e)
            api_data = call_api(symbol)
            fundata = build_data_object(symbol, api_data)
            with open(filename, "w") as writeJSON:
                json.dump(fundata, writeJSON)
        dates = dates_from_keys(fundata.keys())
        for ma in ["sma", "ema"]:  #  change ma to new_indicator
            if not (ma.upper() in fundata[dates[-1]]):
                api_data = add_ma(symbol, ma)  # a response object set
                fundata = update_data_object(fundata, symbol, api_data)
        with open(filename, "w") as writeJSON:
            json.dump(fundata, writeJSON)
Beispiel #17
0
def predict_two_weeks_from_now():
    results = {}
    summary = {}
    for etf in fidelity + list(set(symbols) - set(fidelity)):
        fundata = get_fund_price_data(etf)
        # why do this separately from rank, etc?
        dates = dates_from_keys(fundata.keys())
        results[etf] = {}
        period_range = range(len(fundata) - 10)
        for period in period_range:  # each performance period
            # over the data range
            deltas = []
            poscount, negcount, possible = 0, 0, 0
            last_index_value = len(fundata) - 10 - period

            if not period in summary.keys():
                summary[period] = {
                    "poscount": 0,
                    "possible": 0,
                    "N": 0,
                    "points": 0
                }
            if not period in results[etf].keys():
                results[etf][period] = {
                    "poscount": 0,
                    "possible": 0,
                    "points": 0
                }
            summary[period]["N"] += 1

            for i in range(last_index_value):
                return_over_preceding_period = float(
                    fundata[dates[i + period]]["4. close"]) - float(
                        fundata[dates[i]]["4. close"])

                return_in_two_weeks = float(
                    fundata[dates[i + period + 10]]["4. close"]) - float(
                        fundata[dates[i + period]]["4. close"])

                deltas.append({
                    "i":
                    i,
                    "dates": ([dates[i]], [dates[i + period]]),
                    "deltas":
                    (return_over_preceding_period, return_in_two_weeks)
                })
                if return_over_preceding_period >= 0:
                    possible += 1
                    if return_in_two_weeks > 0:
                        poscount += 1
                if return_over_preceding_period < 0 and return_in_two_weeks < 0:
                    negcount += 1

            results[etf][period]["poscount"] += poscount
            results[etf][period]["possible"] += possible
            results[etf][period]["points"] += last_index_value

            summary[period]["poscount"] += results[etf][period]["poscount"]
            summary[period]["possible"] += results[etf][period]["possible"]
            summary[period]["points"] += results[etf][period]["points"]

    for period in summary:
        summary[period]["probability"] = round(
            summary[period]['poscount'] / summary[period]['possible'], 4)

    convert(inputfile=None,
            inputobject=summary,
            outputfile='results.csv',
            linekey="period")
Beispiel #18
0
def set_6_year_rank(fundata, etf, six_year_returns):
    dates = dates_from_keys(fundata.keys())  # ascending order, old to new
    for index in range(len(dates) - 1, 0, -1):  # descending order, new to old
        fundata[dates[index]]["rank"] = six_year_returns[
            dates[index]][etf]["rank"]
    return fundata