def named_entities_in_time_range(currency_pair, start_time, epsilon): articles = DBSession.query(NamedEntity).join(NamedEntityFrequency).join( Article).filter((Article.effective_date >= start_time) & (Article.effective_date <= start_time + epsilon) & (Article.currency_pair == currency_pair) & (NamedEntityFrequency.frequency > 1)) return articles
def named_entities_in_time_range(currency_pair, start_time, epsilon): articles = DBSession.query(NamedEntity).join(NamedEntityFrequency).join(Article).filter( (Article.effective_date >= start_time) & (Article.effective_date <= start_time + epsilon) & (Article.currency_pair == currency_pair) & (NamedEntityFrequency.frequency > 1)) return articles
def main(): with environment(sys.argv) as env: for currency_pair in DBSession.query(CurrencyPair): print currency_pair.source, currency_pair.target with open( "%s-%s.txt" % (currency_pair.source, currency_pair.target), "w") as outfile: currency_pair_stats(outfile, currency_pair)
def currency_price_at_time(currency_pair, time): rates = DBSession.query(TradeRate).filter( (TradeRate.effective_date >= time - dt.timedelta(seconds=60)) & (TradeRate.effective_date <= time + dt.timedelta(seconds=60)) & (TradeRate.currency_pair == currency_pair)) n = rates.count() if n == 0: raise KeyError("No trade information available for that time.") rate = sum(r.rate for r in rates)/n return rate
def currency_price_at_time(currency_pair, time): rates = DBSession.query(TradeRate).filter( (TradeRate.effective_date >= time - dt.timedelta(seconds=60)) & (TradeRate.effective_date <= time + dt.timedelta(seconds=60)) & (TradeRate.currency_pair == currency_pair)) n = rates.count() if n == 0: raise KeyError("No trade information available for that time.") rate = sum(r.rate for r in rates) / n return rate
def FetchArticles(currency_pair): url = currency_pair.article_feed articles = feedparser.parse(url) for entry in articles['entries']: link = entry['link'] try: article = DBSession.query(Article).filter( (Article.url==link)).one() continue except NoResultFound: pass body = requests.get(link).content.decode("utf-8", "replace") effective_date = datetime(*entry.updated_parsed[:6]) yield Article(currency_pair, entry['link'], effective_date, body)
def FetchArticles(currency_pair): url = currency_pair.article_feed articles = feedparser.parse(url) for entry in articles['entries']: link = entry['link'] try: article = DBSession.query(Article).filter( (Article.url == link)).one() continue except NoResultFound: pass body = requests.get(link).content.decode("utf-8", "replace") effective_date = datetime(*entry.updated_parsed[:6]) yield Article(currency_pair, entry['link'], effective_date, body)
def currency_pair_stats(outfile, currency_pair): old_nes = set() for article in DBSession.query(Article).filter(Article.currency_pair == currency_pair): try: ne, dp = data_point_for_article(article, dt.timedelta(minutes=5)) ne = tuple(sorted(id for id, name in ne)) if ne in old_nes: continue old_nes.add(ne) outfile.write("%s %s\n" % (dp, " ".join(("%d:1" % id for id in ne)))) except KeyError: pass
def currency_pair_stats(outfile, currency_pair): old_nes = set() for article in DBSession.query(Article).filter( Article.currency_pair == currency_pair): try: ne, dp = data_point_for_article(article, dt.timedelta(minutes=5)) ne = tuple(sorted(id for id, name in ne)) if ne in old_nes: continue old_nes.add(ne) outfile.write("%s %s\n" % (dp, " ".join( ("%d:1" % id for id in ne)))) except KeyError: pass
def main(): with environment(sys.argv) as env: for currency_pair in DBSession.query(CurrencyPair): print currency_pair.source, currency_pair.target with open("%s-%s.txt" % (currency_pair.source, currency_pair.target), "w") as outfile: currency_pair_stats(outfile, currency_pair)