Ejemplo n.º 1
0
def run_report_via_rest(host, slug, namespace, title, authfile, verify,
                        **kwargs):
    """Mimic the front-end javascript to run a whole report.

    This also has the added benefit of adding an entry to Report History, and
    adding the resulting data to the Widget Cache if those functions
    are enabled.
    """

    report_url = '/report/%s/%s/' % (namespace, slug)
    post_header = {'Content-Type': 'application/x-www-form-urlencoded'}

    criteria, options = process_criteria(kwargs)

    # since we are posting in place of a criteria form, we need to split time
    endtime = criteria.pop('endtime')
    endtime_date, endtime_time = endtime.split('T')
    criteria['endtime_0'] = endtime_date
    criteria['endtime_1'] = endtime_time

    conn = Connection(host, auth=get_auth(authfile), verify=verify)

    logger.debug('Posting report criteria for report %s: %s' %
                 (title, criteria))
    r = conn.request('POST',
                     report_url,
                     extra_headers=post_header,
                     body=criteria)

    if r.ok and 'widgets' in r.json():
        logger.debug('Got widgets for Report url %s' % report_url)
    else:
        logger.error('Error getting Widgets for Report url %s. Aborting.' %
                     report_url)
        return

    jobs = []

    # create the widget jobs for each widget found
    for w in r.json()['widgets']:
        data = {'criteria': json.dumps(w['criteria'])}

        w_response = conn.request('POST',
                                  w['posturl'],
                                  extra_headers=post_header,
                                  body=data)
        jobs.append(w_response.json()['joburl'])

    # check until all jobs are done
    timeout = options['delta']
    interval = 1

    wait_for_complete(conn, interval, timeout, jobs)
Ejemplo n.º 2
0
def run_report_via_rest(host, slug, namespace,
                        title, authfile, verify, **kwargs):
    """Mimic the front-end javascript to run a whole report.

    This also has the added benefit of adding an entry to Report History, and
    adding the resulting data to the Widget Cache if those functions
    are enabled.
    """

    report_url = '/report/%s/%s/' % (namespace, slug)
    post_header = {'Content-Type': 'application/x-www-form-urlencoded'}

    criteria, options = process_criteria(kwargs)

    # since we are posting in place of a criteria form, we need to split time
    endtime = criteria.pop('endtime')
    endtime_date, endtime_time = endtime.split('T')
    criteria['endtime_0'] = endtime_date
    criteria['endtime_1'] = endtime_time

    conn = Connection(host, auth=get_auth(authfile), verify=verify)

    logger.debug('Posting report criteria for report %s: %s' %
                 (title, criteria))
    r = conn.request('POST', report_url, extra_headers=post_header,
                     body=criteria)

    if r.ok and 'widgets' in r.json():
        logger.debug('Got widgets for Report url %s' % report_url)
    else:
        logger.error('Error getting Widgets for Report url %s. Aborting.'
                     % report_url)
        return

    jobs = []

    # create the widget jobs for each widget found
    for w in r.json()['widgets']:
        data = {'criteria': json.dumps(w['criteria'])}

        w_response = conn.request('POST', w['posturl'],
                                  extra_headers=post_header, body=data)
        jobs.append(w_response.json()['joburl'])

    # check until all jobs are done
    timeout = options['delta']
    interval = 1

    wait_for_complete(conn, interval, timeout, jobs)
Ejemplo n.º 3
0
def get_historical_prices(begin, end, symbol, measures,
                          resolution='day', date_obj=False):
    """Get historical prices for the given ticker symbol.
    Returns a list of dicts keyed by 'date' and measures

    :param string begin: begin date of the inquire interval
      in the format of YYYY-MM-DD
    :param string end: end date of the inquire interval
      in the format of YYYY-MM-DD
    :param string symbol: symbol of one stock to query
    :param list measures: a list of prices that needs to be queried,
      should be a subset of ["open", "high", "low", "close", "volume"]
    :param string resolution: '1 day' or '5 days'
    :param boolean date_obj: dates are converted to datetime objects
      from date strings if True. Otherwise, dates are stored as strings
    """
    try:
        conn = Connection('http://ichart.finance.yahoo.com')
        start_month = parse_date(begin).month - 1
        start_day = parse_date(begin).day
        start_year = parse_date(begin).year
        end_month = parse_date(end).month - 1
        end_day = parse_date(end).day
        end_year = parse_date(end).year

        ret = []
        params = {'s': symbol,
                  'a': start_month,
                  'b': start_day,
                  'c': start_year,
                  'd': end_month,
                  'e': end_day,
                  'f': end_year,
                  'g': resolution[0],
                  'ignore': '.csv'}

        resp = conn.request(method='POST', path='/table.csv', params=params)
        # extract data and skip first row with column titles
        data = list(resp.iter_lines())[1:]

        # iterate over the data backwards as the daily prices are sorted
        # backwards by the dates
        for day in reversed(data):
            # day is a string with date, prices, volume separated by commas,
            # '<date>,<open>,<high>,<low>,<close>,<volume>,<adj_close>'
            # as '2014-02-19,20.22,20.55,20.11,20.50,1599600,20.50'
            day = day.split(',')
            date = parse_date(day[0]) if date_obj else day[0]
            daily_prices = {'date': date}
            for m in measures:
                if m in mapping:
                    daily_prices[m] = float(day[mapping[m]])
            ret.append(daily_prices)
    except RvbdHTTPException:
        raise StockApiException("Symbol '%s' is invalid or Stock '%s' was"
                                " not on market on %s" % (symbol, symbol,
                                                          end))
    return ret
Ejemplo n.º 4
0
def run_table_via_rest(host, url, authfile, verify, **kwargs):
    criteria, options = process_criteria(kwargs)

    conn = Connection(host, auth=get_auth(authfile), verify=verify)

    logger.debug('POSTing new job with criteria: %s' % criteria)
    r = conn.request('POST', url, body=criteria)
    if r.ok:
        logger.debug('Job creation successful.')
    else:
        logger.error('Error creating Job: %s' % r.content)

    if options.get('output-file', None):
        # check periodically until data is ready and write to file
        url = r.headers['Location']
        timeout = options['delta']
        interval = 1

        wait_for_complete(conn, interval, timeout,
                          [url], [options['output-file']])
    else:
        # we aren't interested in results
        pass
Ejemplo n.º 5
0
def run_table_via_rest(host, url, authfile, verify, **kwargs):
    criteria, options = process_criteria(kwargs)

    conn = Connection(host, auth=get_auth(authfile), verify=verify)

    logger.debug('POSTing new job with criteria: %s' % criteria)
    r = conn.request('POST', url, body=criteria)
    if r.ok:
        logger.debug('Job creation successful.')
    else:
        logger.error('Error creating Job: %s' % r.content)

    if options.get('output-file', None):
        # check periodically until data is ready and write to file
        url = r.headers['Location']
        timeout = options['delta']
        interval = 1

        wait_for_complete(conn, interval, timeout,
                          [url], [options['output-file']])
    else:
        # we aren't interested in results
        pass