Esempio n. 1
0
def daily_briefings_intent(companies, sectors, attributes, days):
    """
    :param companies: Comma separated values in a string that specify which companies the daily briefing should contain
    :param sectors: Comma separated values in a string that specify which sectors the daily briefing should contain
    :param attributes: A list containing the attributes that the daily briefing should contain
    :param days: An integer specifying the users' preference for how many days of news they want to see
    :return: A dictionary containing the layout of the cards to be displayed
    """
    scraper = Scraper.Scraper()

    companies_data = []

    if companies:
        for company in companies.split(", "):
            companies_data.append(scraper.get_company_data(company))

    sectors_data = []
    if sectors:
        for sector in sectors.split(", "):
            sectors_data.append(scraper.get_sector_data(sector))

    response = replies.daily_briefings(companies_data, sectors_data,
                                       attributes, days)

    return response
Esempio n. 2
0
def top_risers_intent(r):
    """
    :param r: A JSON object received from Dialogflow
    :return: A dictionary containing the layout of the card to be displayed
    """
    response = {}

    if r['result']['parameters']['rise_fall'] == '':
        reply = {}
        reply['text'] = r['result']['fulfillment']['speech']
        reply['speech'] = r['result']['fulfillment']['speech']
        reply['type'] = 'incomplete'
        return reply
    else:
        scraper = Scraper.Scraper()
        if r['result']['parameters']['rise_fall'] == "risers":
            risers = scraper.get_top5()
            response = replies.big_movers_card(risers)
        elif r['result']['parameters']['rise_fall'] == "fallers":
            fallers = scraper.get_top5(False)
            response = replies.big_movers_card(fallers, False)
        else:  # get both
            risers = scraper.get_top5()
            fallers = scraper.get_top5(False)
            risers_response = replies.big_movers_card(risers)
            fallers_response = replies.big_movers_card(fallers, False)
            response['speech'] = risers_response[
                'speech'] + ' ' + fallers_response['speech']
            response['type'] = 'risers&fallers'
            response['text'] = {
                'risers': risers_response['text'],
                'fallers': fallers_response['text']
            }

    return response
Esempio n. 3
0
def sector_query_intent(r, is_sector, days):
    """
    :param r: The JSON object received from Dialogflow where the intent is either a SectorQuery or a SubSectorQuery
    :param is_sector: Boolean value to specify whether query relates to a sector (True) or a sub-sector (False)
    :days: An integer specifying the user's preference for how many days of news they want to see
    :return: A dictionary containing the layout of the card to be displayed
    """
    scraper = Scraper.Scraper()
    sector = None
    # if required entities have been specified get sector/sub-sector data
    if is_sector:  # is a SectorQuery
        if r['result']['parameters']['sector'] == '' or r['result'][
                'parameters']['sector_attribute'] == '':
            reply = {}
            reply['text'] = r['result']['fulfillment']['speech']
            reply['speech'] = r['result']['fulfillment']['speech']
            reply['type'] = 'incomplete'
            return reply
        else:
            sector_name = r['result']['parameters']['sector']
            sector_attribute = r['result']['parameters']['sector_attribute']
            sector = scraper.get_sector_data(sector_name)
    else:  # is a SubSectorQuery
        if r['result']['parameters']['subsector'] == '' or r['result'][
                'parameters']['sector_attribute'] == '':
            reply = {}
            reply['text'] = r['result']['fulfillment']['speech']
            reply['speech'] = r['result']['fulfillment']['speech']
            reply['type'] = 'incomplete'
            return reply
        else:
            sector_name = r['result']['parameters']['subsector']
            sector_attribute = r['result']['parameters']['sector_attribute']
            sector = scraper.get_sub_sector_data(sector_name)
    # if query asks for news
    if sector_attribute == "news":
        date_period = r['result']['parameters']['date-period']
        # if the user specified a news timeframe
        if date_period:
            # calculate number of days of news that was requested
            start, end = date_period.split('/')
            start_date = datetime.strptime(start, '%Y-%m-%d')
            end_date = datetime.strptime(end, '%Y-%m-%d')
            difference = end_date.date() - start_date.date()
            return replies.news_reply(sector.news, difference.days, '')
        else:
            return replies.news_reply(sector.news, days, '')
    else:
        return replies.sector_reply(sector, sector_attribute)
Esempio n. 4
0
def comparison_intent(r):
    if r['result']['actionIncomplete']:
        reply = {}
        reply['text'] = r['result']['fulfillment']['speech']
        reply['speech'] = r['result']['fulfillment']['speech']
        reply['type'] = 'incomplete'
        return reply
    else:
        companies = r['result']['parameters']['company']
        scraper = Scraper.Scraper()

        companies = list(set(companies))

        company_data = []
        for company in companies:
            company_data.append(scraper.get_company_data(company))

        return replies.comparison_reply(company_data)
Esempio n. 5
0
def footsie_intent(r, days):
    """
    :param r: The JSON object received from Dialogflow where the intent is a 'Footsie Intent'
    :param days: An integer specifying the user's preference for how many days of news they want to see
    :return: A dictionary containing the layout of the card to be displayed
    """
    # Check whether all required entities have been specified
    if r['result']['actionIncomplete']:
        reply = {}
        reply['text'] = r['result']['fulfillment']['speech']
        reply['speech'] = r['result']['fulfillment']['speech']
        reply['type'] = 'incomplete'
        return reply
    else:
        company_code = r['result']['parameters']['company']
        attribute = r['result']['parameters']['attribute']
        scraper = Scraper.Scraper()

        # if query asks for news use replies.news_reply
        if attribute == "news":
            date_period = r['result']['parameters']['date-period']
            # if user specified a news timeframe
            positive_negative = r['result']['parameters']['positive_negative']
            if date_period:
                # calculate how many days of news were requested
                start, end = date_period.split('/')
                start_date = datetime.strptime(start, '%Y-%m-%d')
                end_date = datetime.strptime(end, '%Y-%m-%d')
                difference = end_date.date() - start_date.date()
                return replies.news_reply(
                    scraper.get_financial_news_data(company_code),
                    difference.days, positive_negative)
            else:
                return replies.news_reply(
                    scraper.get_financial_news_data(company_code), days,
                    positive_negative)
        elif attribute == "revenue":
            company = scraper.get_company_data(company_code)
            return replies.revenue_reply(
                company, r['result']['parameters']['date-period'])
        else:
            company = scraper.get_company_data(company_code)
            return replies.get_company_reply(company, attribute)
Esempio n. 6
0
def interests(request):
    """
    :param request: A HTTP request
    :return: A rendering of interests.html
    """
    try:
        preferences = UserPreferences.objects.all().first()
    except:
        preferences = UserPreferences.objects.create()
        preferences.save()

    news_timeframe = preferences.days_old
    companies = preferences.companies
    sectors = preferences.sectors
    scraper = Scraper.Scraper()
    company_news_data = defaultdict()
    company_news_data['LSE'] = list()
    company_news_data['YAHOO'] = list()
    #get news, data for tracked companies
    company_data = list()
    updated = ""

    for company in companies.split(", "):
        if len(company) > 0:
            financial_news_data = scraper.get_financial_news_data(company)
            company_data.append(scraper.get_company_data(company))
            updated = "Last updated: " + company_data[-1].date
            company_news_data[
                'LSE'] = company_news_data['LSE'] + financial_news_data['LSE']
            company_news_data['YAHOO'] = company_news_data[
                'YAHOO'] + financial_news_data['YAHOO']

    sector_news_data = defaultdict()
    sector_news_data['LSE'] = list()
    sector_news_data['YAHOO'] = list()
    #get news, data for tracked sectors

    for sector in sectors.split(", "):
        if len(sector) > 0:
            sector_data = scraper.get_sector_data(sector)
            company_data = company_data + sector_data.companies
            sector_news_data['LSE'] += scraper.get_sector_data(
                sector).news['LSE']
            sector_news_data['YAHOO'] += scraper.get_sector_data(
                sector).news['YAHOO']
    #merge company and sector news, sort by date and remove duplicates

    all_news_data = defaultdict()
    all_news_data['LSE'] = list()
    all_news_data['YAHOO'] = list()
    all_news_data['LSE'] = company_news_data['LSE'] + sector_news_data['LSE']
    all_news_data[
        'YAHOO'] = company_news_data['YAHOO'] + sector_news_data['YAHOO']
    all_news_data['LSE'].sort(
        key=lambda x: datetime.strptime(x.date, '%H:%M %d-%b-%Y'),
        reverse=True)
    all_news_data['YAHOO'].sort(
        key=lambda x: datetime.strptime(x.date, '%H:%M %d-%b-%Y'),
        reverse=True)
    all_news_data['LSE'] = remove_duplicates(all_news_data['LSE'])
    all_news_data['YAHOO'] = remove_duplicates(all_news_data['YAHOO'])
    all_news = replies.news_reply(all_news_data, news_timeframe, '')
    #remove duplicates from company_data
    company_data.sort(key=lambda x: x.code, reverse=False)
    lastc = ""

    for c in company_data:
        if lastc != "":
            if c.code == lastc.code:
                company_data.remove(c)
        lastc = c

    #pass data to interests template
    data = {
        'companies': company_data,
        'all_news': all_news,
        'updated': updated,
        'colour_scheme': preferences.colour_scheme
    }
    return render(request, 'interests.html', data)
Esempio n. 7
0
 def setUp(self):
     self.scraper = Scraper.Scraper()
     self.company = self.scraper.get_company_data('BARC')
Esempio n. 8
0
def sector_members(r):
    scraper = Scraper.Scraper()
    sector_name = r['result']['parameters']['sector']
    sector = scraper.get_sector_data(sector_name)
    return replies.members_reply(sector)