Example #1
0
def run_tests():
    global test_result
    TEST_ENV = {
            'stopped': False,
            'test': True,
            'send': False,
            }

    TEST_ENV['sender'] = TestSender()
    TEST_ENV['enumerator'] = Enumerator(TEST_ENV)
    TEST_ENV['parser'] = Parser(TEST_ENV)
    TEST_ENV['screener'] = Screener(TEST_ENV)
    TEST_ENV['logger'] = open('/dev/null', 'w')
    TEST_ENV['processor'] = processor = Processor(TEST_ENV)

    num_failures = 0
    for (rcpttos, expected) in tests:
        test_result = {}
        processor.process_message({
            'peer': None,
            'mailfrom': 'mailfrom',
            'rcpttos': rcpttos,
            'data': 'message',
            })
        print 'testing sending to', rcpttos, '...'
        for key in expected:
            assert key in test_result
            if type(test_result[key]) == type([]):
                assert set(test_result[key]) == set(expected[key])
            else:
                assert test_result[key] == expected[key]
        for key in test_result:
            assert key in expected
Example #2
0
def screener():
    if request.method == 'GET':
        default_dictionary = {}
        return render_template(
            'screener.html', **default_dictionary)
    elif request.method == 'POST':
        query = request.form.get('query')
        try:
            results = Screener().run_screen(query)
        except Exception as e:
            print(e)
            return '''Something was wrong with your query. Try again.'''

        output = {
            'query': query,
            'results': results
        }
        return render_template('screener.html', **output)
 def getInstruments(self):
     print 'getInstruments()'
     self.screener = Screener()
     screener_model = self.screener.fetch()
     self.instruments = screener_model.get('DataList')
class Collector:
    def __init__(self, dt=datetime.now(), predict=False):
        print 'Sequencer initialized...'
        self.predict = predict
        self.observations = Observations()
        print 'Observations instance initialized with length:', self.observations.length
        if self.predict:
            self.today = dt + timedelta(days=1)
        else:
            self.today = dt
        self.yesterday_market_times = utils.previous_market_times(self.today)
        print 'Getting yesterdays chart for:', datetime.fromtimestamp(
            self.yesterday_market_times[0])
        self.today_market_times = utils.current_market_times(self.today)
        self.charts = Tickcharts()

    def start(self):
        self.sequence()

    def sequence(self):
        self.getInstruments()
        self.instruments.each(self.getChart)
        self.initialize_observations()
        self.setNews()
        self.setNumNewHighs()
        self.setNumNewLows()
        self.setTallestCandles()
        if not self.predict:
            self.observations.create()
        else:
            self.printObservations()

    def printObservations(self):
        printer = []
        prediction_file = open(
            './trading-data/' + config.get('predict.filename'), 'w+')
        for observation in self.observations.toJSON():
            item = observation
            item['market_open_datetime'] = str(
                observation['market_open_datetime'])
            item['market_close_datetime'] = str(
                observation['market_close_datetime'])
            printer.append(item)
        prediction_file.write(json.dumps(printer))

    def printObservation(self, observation, index):
        print observation.toJSON()

    def appendObservation(self, model, index):
        self.observations.append(model)

    def initialize_observations(self):
        print 'initialize_observations()'
        self.charts.each(self.appendObservation)

    def getInstruments(self):
        print 'getInstruments()'
        self.screener = Screener()
        screener_model = self.screener.fetch()
        self.instruments = screener_model.get('DataList')

    def getChart(self, instrument, index):
        print 'getChart()'
        symbol = instrument.get('Eqsm')
        regex = re.compile(r'(\S+[\.\/]\S+$)', re.I)
        search_symbol = regex.search(symbol)
        if search_symbol:
            return {}
        chart_instance = ChartAnalysis({
            'startdate':
            self.yesterday_market_times[0],
            'enddate':
            self.yesterday_market_times[1],
            'ticker':
            symbol
        })
        chart = chart_instance.fetch()
        try:
            results = chart.get('chart').get('result').at(0)
        except:
            return None
        if not results.has('timestamp'):
            return None
        timestamps = results.get('timestamp')
        opens = results.get('indicators').get('quote').at(0).get('open')
        closes = results.get('indicators').get('quote').at(0).get('close')
        highs = results.get('indicators').get('quote').at(0).get('high')
        lows = results.get('indicators').get('quote').at(0).get('low')
        volumes = results.get('indicators').get('quote').at(0).get('volume')
        return_chart = Tickchart({
            'symbol': symbol,
            'timestamps': timestamps,
            'opens': opens,
            'closes': closes,
            'highs': highs,
            'lows': lows,
            'volumes': volumes
        })
        if not self.predict:
            last_close = return_chart.get('closes')[len(closes) - 1]
            tomorrow_open_margin = self.getTodaysOpenMargin(symbol, last_close)
            return_chart.set('next_market_open_margin', tomorrow_open_margin)

        if (return_chart.get('next_market_open_margin') != None
                and not self.predict) or self.predict:
            self.charts.append(return_chart)
        return return_chart

    def getTodaysOpenMargin(self, symbol, last_close):
        print 'getTodaysOpenMargin()'
        chart_instance = ChartAnalysis({
            'startdate': self.today_market_times[0],
            'enddate': self.today_market_times[1],
            'ticker': symbol
        })
        chart = chart_instance.fetch()
        try:
            chart.get('chart')
            results = chart.get('chart').get('result').at(0)
        except:
            print 'Could not parse results from chart: %s' % symbol
            return None
        if not results.has('timestamp'):
            return None
        today_timestamps = results.get('timestamp')
        today_opens = results.get('indicators').get('quote').at(0).get('open')
        today_closes = results.get('indicators').get('quote').at(0).get(
            'close')
        today_highs = results.get('indicators').get('quote').at(0).get('high')
        today_lows = results.get('indicators').get('quote').at(0).get('low')
        today_volumes = results.get('indicators').get('quote').at(0).get(
            'volume')
        today_chart = Tickchart({
            'symbol': symbol,
            'timestamps': today_timestamps,
            'opens': today_opens,
            'closes': today_closes,
            'highs': today_highs,
            'lows': today_lows,
            'volumes': today_volumes
        })
        today_first_open = today_chart.get('opens')[0]
        return (today_first_open - last_close) / last_close

    def getNumNewHighs(self, chart, index):
        print 'getNumNewHighs()'
        highs = chart.get('highs')
        times = chart.get('timestamps')
        volumes = chart.get('volumes')
        count = 0
        abvVolCount = 0
        blwVolCount = 0
        amCount = 0
        pmCount = 0
        avgVol = chart.get('avg_volume')
        max_high = 0
        for key, high in enumerate(highs):
            if high and high > max_high:
                max_high = high
                count += 1
                if volumes[key] > avgVol:
                    abvVolCount += 1
                else:
                    blwVolCount += 1
                if utils.isMorning(times[key]):
                    amCount += 1
                else:
                    pmCount += 1
        self.observations.at(index).set('num_new_highs', count)
        self.observations.at(index).set('num_highs_abv_avg_vol', abvVolCount)
        self.observations.at(index).set('num_highs_blw_avg_vol', blwVolCount)
        self.observations.at(index).set('num_new_highs_am', amCount)
        self.observations.at(index).set('num_new_highs_pm', pmCount)
        return count

    def setNumNewHighs(self):
        print 'setNumNewHighs()'
        self.charts.each(self.getNumNewHighs)

    def getNumNewLows(self, chart, index):
        print 'getNumNewLows()'
        lows = chart.get('lows')
        times = chart.get('timestamps')
        volumes = chart.get('volumes')
        count = 0
        abvVolCount = 0
        blwVolCount = 0
        amCount = 0
        pmCount = 0
        avgVol = chart.get('avg_volume')
        min_low = float('inf')
        for key, low in enumerate(lows):
            if low and low < min_low:
                min_low = low
                count += 1
                if volumes[key] > avgVol:
                    abvVolCount += 1
                else:
                    blwVolCount += 1
                if utils.isMorning(times[key]):
                    amCount += 1
                else:
                    pmCount += 1
        self.observations.at(index).set('num_new_lows', count)
        self.observations.at(index).set('num_lows_abv_avg_vol', abvVolCount)
        self.observations.at(index).set('num_lows_blw_avg_vol', blwVolCount)
        self.observations.at(index).set('num_new_lows_am', amCount)
        self.observations.at(index).set('num_new_lows_pm', pmCount)
        return count

    def setNumNewLows(self):
        print 'setNumNewLows()'
        self.charts.each(self.getNumNewLows)

    def getTallestCandles(self, chart, index):
        print 'getTallestCandles()'
        candles = chart.get('candlesticks').toJSON()
        tallestGreen = 0.0
        tallestRed = 0.0
        for candle in candles:
            if candle['color'] == 'green' and candle[
                    'relative_height'] > tallestGreen:
                tallestGreen = candle['relative_height']
            if candle['color'] == 'red' and candle[
                    'relative_height'] > tallestRed:
                tallestRed = candle['relative_height']
        self.observations.at(index).set('tallest_green_candlestick',
                                        tallestGreen)
        self.observations.at(index).set('tallest_red_candlestick', tallestRed)

    def setTallestCandles(self):
        print 'setTallestCandles()'
        self.charts.each(self.getTallestCandles)

    def fetchNewsitems(self, chart, index):
        print 'getNewsitems()'
        newsitems = News({'symbol': chart.get('symbol')})
        news = newsitems.fetch()
        chart.set('news', newsitems.get('data'))
        self.observations.at(index).set('age_recent_news',
                                        chart.get('news_age'))

    def setNews(self):
        print 'getNews()'
        self.charts.each(self.fetchNewsitems)
Example #5
0
def get_industry_data(company):
    screener = Screener(company)
    related = screener.get_related_companies()
    industry = screener.industry
    if company not in related:
        related.append(company)
    industry_dict = {}
    indicator_list = [
        "MCAP",
        "EPS5",
        "ROA",
        "CROCI",
        "DIVYLD",
        "EPS1",
        "PEG",
        "PE",
        "PBV",
        "LTDE",
        "ROE",
        "ROCE",
        "NETPCT",
        "CRATIO",
        "SOLRATIO",
        "LTDE",
        "IC",
        "DEBT_ASSETS",
    ]
    for comp in related:
        comp_dict = {}
        indicator_set = get_indicator_set(comp, indicator_list)
        if indicator_set:
            std = indicator_set["standalone"]
            con = indicator_set["consolidated"]
            for i in indicator_list:
                val = con[i] if con[i] else std[i]
                if val:
                    comp_dict.update({i: val})
                else:
                    comp_dict.update({i: np.nan})
                industry_dict.update({comp: comp_dict})

    adf = pd.DataFrame.from_dict(industry_dict, orient="index")

    adf["Weights"] = adf.MCAP / adf.MCAP.sum() * 100
    indicators = adf.columns.tolist()
    indicators.remove("Weights")
    indicators.remove("MCAP")

    industry_dict = {"companies": related}
    industry_dict.update({"industry": industry})
    for i in indicators:
        c = adf[i].loc[company]
        a = adf[i].mean()
        wa = (adf[i] * adf["Weights"]).sum() / adf["Weights"].sum()

        if not c or np.isnan(c):
            c = 0

        if not a or np.isnan(a):
            a = 0

        if not wa or np.isnan(wa):
            wa = 0

        industry_dict.update({i: {"company": c, "average": a, "weighed_average": wa}})
        nifty_pe_str = r_full.get("NIFTY_PE").decode("UTF-8")
        industry_dict.update({"nifty_pe": float(nifty_pe_str)})
        industry_dict.update({"cash_saving_rate": 5.0})

    return industry_dict
Example #6
0
def get_indicator_set(comp, indicator_list):
    return_set = {}
    obj_consolidated = r_full.get("{}_A_CONSOLIDATED".format(comp))
    obj_standalone = r_full.get("{}_A_STANDALONE".format(comp))

    if not obj_consolidated and not obj_standalone:
        print("Company {} not found in the database.".format(comp))
        return return_set

    if obj_consolidated:
        df_c = pd.read_json(obj_consolidated)
    if obj_standalone:
        df_s = pd.read_json(obj_standalone)

    screenerFailed = False
    try:
        screener = Screener(comp)
        indicator_set_consolidated = {}
        indicator_set_standalone = {}

        # get PE and PBV from screener
        pe = screener.get_pe()
        pe_standalone = screener.get_pe(standalone=True)

        indicator_set_standalone.update({"PE": pe_standalone})
        indicator_set_consolidated.update({"PE": pe})

        bv = screener.get_book_value()
        bv_standalone = screener.get_book_value(standalone=True)

        current_price = screener.get_current_price()

        pbv = np.nan
        pbv_standalone = np.nan
        if bv:
            pbv = current_price / bv

        if bv_standalone:
            pbv_standalone = current_price / bv_standalone

        indicator_set_standalone.update({"PBV": pbv_standalone})
        indicator_set_consolidated.update({"PBV": pbv})

    except:
        print("Error while getting data from screener")
        traceback.print_exc(file=sys.stdout)
        screenerFailed = True

    other_indicators = indicator_list

    if screenerFailed:
        other_indicators += ["PE", "PBV"]

    for ind in other_indicators:
        try:
            if not df_c.empty:
                result_c = df_c[ind].loc[df_c[ind].last_valid_index()]
            else:
                result_c = np.nan

            indicator_set_consolidated.update({ind: result_c})
        except:
            indicator_set_consolidated.update({ind: np.nan})

        try:
            if not df_s.empty:
                result_s = df_s[ind].loc[df_s[ind].last_valid_index()]
            else:
                result_s = np.nan
            indicator_set_standalone.update({ind: result_s})
        except:
            indicator_set_standalone.update({ind: np.nan})

    return_set.update(
        {"company": comp, "standalone": indicator_set_standalone, "consolidated": indicator_set_consolidated}
    )
    return return_set
Example #7
0
def get_data(company, indicator, get_quarterly=False):
    print("Company : {}, Indicator : {}".format(company, indicator))
    if not get_quarterly:
        df_standalone_full = pd.read_json(r_full.get(company + "_A_STANDALONE"))
        df_consolidated_full = pd.read_json(r_full.get(company + "_A_CONSOLIDATED"))
    else:
        df_standalone_full = pd.read_json(r_full.get(company + "_Q_STANDALONE"))
        df_consolidated_full = pd.read_json(r_full.get(company + "_Q_CONSOLIDATED"))
    indicatorList = []

    if isinstance(indicator, str):
        indicatorList.append(indicator)
    else:
        indicatorList = indicator

    seriesStandalone = []
    seriesConsolidated = []

    for indicator in indicatorList:
        try:
            df_standalone = df_standalone_full[indicator].dropna()
            df_consolidated = df_consolidated_full[indicator].dropna()
            if indicator == "PE":
                pe_standalone = None
                pe_consolidated = None
                screener = None
                try:
                    screener = Screener(company)
                except:
                    print("Error while initializing screener")

                if screener:
                    try:
                        pe_standalone = screener.get_pe(standalone=True)
                    except:
                        print("Error while getting standalone PE from screener")

                    try:
                        pe_consolidated = screener.get_pe()
                    except:
                        print("Error while getting consolidated PE from screener")

                if pe_standalone:
                    new_df_standalone = pd.Series([pe_standalone], index=[datetime.datetime.now().date()])
                    df_standalone = df_standalone.append(new_df_standalone)

                if pe_consolidated:
                    new_df_consolidated = pd.Series([pe_consolidated], index=[datetime.datetime.now().date()])
                    df_consolidated = df_consolidated.append(new_df_consolidated)

            seriesStandalone.append(
                {
                    "Metric": indicator,
                    "Index": df_standalone.index.tolist(),
                    "Data": df_standalone.fillna(0).tolist(),
                    "PctChange1Y": getPercentChange(df_standalone),
                    "PctChange5Y": getPercentChange(df_standalone, step=5),
                    "Mean": getMean(df_standalone),
                    "Success": "True",
                    "Message": "",
                    "Mean_5_YR": getMean(df_standalone, tail=5),
                }
            )
            seriesConsolidated.append(
                {
                    "Metric": indicator,
                    "Index": df_consolidated.index.tolist(),
                    "Data": df_consolidated.fillna(0).tolist(),
                    "PctChange1Y": getPercentChange(df_consolidated),
                    "PctChange5Y": getPercentChange(df_consolidated, step=5),
                    "Mean": getMean(df_consolidated),
                    "Mean_5_YR": getMean(df_consolidated, tail=5),
                    "Success": "True",
                    "Message": "",
                }
            )
        except:
            seriesStandalone.append(
                {"Metric": indicator, "Success": "False", "Message": "Error while getting indicator : " + indicator}
            )
            seriesConsolidated.append(
                {"Metric": indicator, "Success": "False", "Message": "Error while getting indicator : " + indicator}
            )

    my_dict = {}
    my_dict.update({"Company": "{}".format(company)})
    my_dict.update({"Standalone": seriesStandalone})
    my_dict.update({"Consolidated": seriesConsolidated})
    return my_dict
Example #8
0
            'peer': peer,
            'mailfrom': mailfrom,
            'rcpttos': rcpttos,
            'data': data,
            'status': 'UNVERIFIED',
        })


if __name__ == '__main__':
    # Initialize components
    GLOBAL_ENV['db'] = MongoClient().emails

    sender = Sender(GLOBAL_ENV)
    enumerator = Enumerator(GLOBAL_ENV)
    parser = Parser(GLOBAL_ENV)
    screener = Screener(GLOBAL_ENV)
    processor = Processor(GLOBAL_ENV)
    sendlist_server = SendlistServer(GLOBAL_ENV, IP_ADDRESS)

    # Initialize global environment
    GLOBAL_ENV['sender'] = sender
    GLOBAL_ENV['enumerator'] = enumerator
    GLOBAL_ENV['parser'] = parser
    GLOBAL_ENV['screener'] = screener
    GLOBAL_ENV['logger'] = open(LOG_FILE, 'a', 0)
    GLOBAL_ENV['processor'] = processor
    GLOBAL_ENV['sendlist_server'] = sendlist_server

    server = SimpleSMTPServer((IP_ADDRESS, SMTP_PORT), None)

    # Start new thread for the SMTP server.