Example #1
0
    def generate_feeds_from_new_mail(cls) -> None:
        logging.info('Mail fetch and feed generation job starting')
        for feed_name in config.ParseFeed.names():
            logging.info('feed_name_tasks: {}'.format(feed_name))
            with feed.Feed(feed_name) as f:
                try:
                    # IMAP doesnt specify TZ for 'INTERNALDATE', so 2 days is the smallest value I'm happy with.
                    new_mail = mail.MailFetch.new_mail(feed_name, 2)
                    if mail is None: 
                        logging.warning('mail.MailFetch.new_mail returned None. Either there have been no emails recieved at server matching filter for 2 days or an unhandled error occured. Ending feed generation job for {}'.format(feed_name) )
                        return schedule.CancelJob
                except (mail.ConfigError, mail.GenericHandledException):
                    logging.exception('Handled exception raised in mail.MailFetch.new_mail({f}). Ending feed generation job for {f}'.format(f=feed_name))
                    return schedule.CancelJob

                try: 
                    f.add_entries_from_dict_if_new(new_mail)
                except Exception:
                    logging.exception('Unknown error occured in feed.Feed.add_entries_from_dict_if_new(). Skipping feed generation for {}'.format(feed_name))
                    return schedule.CancelJob

                try: 
                    f.generate_feed()
                except Exception:
                    logging.exception('Unknown error occured in feed.Feed.generate_feed(). Skipping feed generation for {}'.format(feed_name))
                    return schedule.CancelJob
                    
            logging.info('Feed Generated')
Example #2
0
 def test_make_zip(self):
   f = feed.Feed()
   outfile = test_outfile()
   f.make_zip(
     outfile,
     path=os.path.dirname(util.example_feed()),
     clone=util.example_feed()
   )
   expect = [
     'agency.txt', 
     'calendar.txt', 
     'calendar_dates.txt', 
     'fare_attributes.txt', 
     'fare_rules.txt', 
     'frequencies.txt', 
     'routes.txt', 
     'shapes.txt', 
     'stop_times.txt', 
     'trips.txt', 
     'stops.txt'
   ]
   zf = zipfile.ZipFile(outfile)
   for i,j in zip(sorted(zf.namelist()), sorted(expect)):
     assert i == j
   zf.close()
   os.unlink(outfile)
Example #3
0
def preload_agency(**kw):
    import feed
    agency_id = kw.pop('agency_id', 'DTA')
    f = feed.Feed(example_feed(**kw))
    f.preload()
    agency = f.agency(agency_id)
    return agency
Example #4
0
    def setUp(self):
        engine = create_engine('sqlite:///:memory:')
        self.target_url = 'http://www.hatena.ne.jp/'
        self.obj = feed.Feed(engine, self.target_url)

        def _create_test_data():
            """Create database for test.

            TODO: Finally I'm thinking to convert ORM function.
            """
            create_sql = """
                CREATE TABLE `feed` (
                  `id`    INTEGER PRIMARY KEY AUTOINCREMENT,
                  `url`   TEXT UNIQUE,
                  `title` TEXT
                );
             """
            insert_sql = """
                INSERT INTO `feed` (`url`, `title`)
                  VALUES ('http://test', 'test');
                """
            c = engine.connect()
            c.execute(create_sql)
            c.execute(insert_sql)

        _create_test_data()
Example #5
0
 def test_read_missing(self):
   f = feed.Feed(
     util.example_feed(), 
     path=os.path.dirname(util.example_feed())
   )
   with self.assertRaises(Exception):
     f.read('missing')
Example #6
0
 def test_write_exists(self):
   f = feed.Feed()
   data = [entities.Agency(**self.agency_expect)]
   outfile = test_outfile()
   f.write(outfile, data, sortkey='agency_id')
   with self.assertRaises(IOError):
     f.write(outfile, data, sortkey='agency_id')
   os.unlink(outfile)
Example #7
0
 def test_read_padding(self):
   # The Google GTFS example feed is missing columns in
   # stop_times.txt. Check the padding mechanism works.
   f = feed.Feed(util.example_feed())
   data = f.read('stop_times')
   # Check that all 9 elements are present.
   for entity in f.read('stop_times'):
     assert len(entity) == 9
Example #8
0
 def test_read(self):
   # Test basic read
   f = feed.Feed(util.example_feed())
   data = f.read('stops')
   # check we got 9 entities
   assert len(data) == 9
   # check cache
   assert 'stops' in f.by_id
Example #9
0
 def test_cache(self):
   f = feed.Feed(util.example_feed())
   # Read a first time
   data1 = f.read('stops')
   # Read a second time
   data2 = f.read('stops')
   assert len(data1) == len(data2)
   assert 'stops' in f.by_id
   assert len(data1) == len(f.by_id['stops'])
Example #10
0
 def test_read_path(self):
   # Test overlay
   f = feed.Feed(
     util.example_feed(), 
     path=os.path.dirname(util.example_feed())
   )
   assert f.stop('TEST')
   with self.assertRaises(Exception):
     f.stop('FUR_CREEK_RES')
Example #11
0
 def test_make_zip_exists(self):
   f = feed.Feed()
   outfile = test_outfile()
   f.make_zip(
     outfile,
     path=os.path.dirname(util.example_feed()),
     clone=util.example_feed()
   )
   with self.assertRaises(IOError):
     f.make_zip(
       outfile,
       path=os.path.dirname(util.example_feed()),
       clone=util.example_feed()
     )
   os.unlink(outfile)
Example #12
0
def main():
    """main function"""
    try:
        args = arguments()
        rss = args.source

        logging_level = 'WARNING'
        if args.verbose:
            logging_level = 'INFO'
        logging.basicConfig(level=logging_level,)
        logging.info('Started')

        if not args.date:
            if not check.internet_connection_check():
                raise ex.NoInternetConnection("No internet connection")
            parsed = feedparser.parse(rss)
            if parsed.bozo > 0:
                raise ex.InvalidURLAddress("Invalid RSS URL address")
            logging.info('parsed url: %s', rss)
        else:
            parsed = {}
        feed_obj = feed.Feed(parsed, args)

        if not args.date:
            feed_obj.save_feed_to_database()

        to_print = True

        if args.to_html:
            feed_obj.save_feed_to_html()
            to_print = False
        if args.to_pdf:
            feed_obj.save_feed_to_pdf()
            to_print = False
            
        if to_print:
            if args.json:
                feed_obj.print_json_feed(args.colorize)
            else:
                feed_obj.print_readable_feed(args.colorize)

    except (
        ex.InvalidURLAddress,
        ex.NoInternetConnection,
        ex.EmptyDataBase,
        ex.DateNotInDatabase
    ) as E:
        print(E)
Example #13
0
def load(feed_filename, db_filename=":memory:", verbose=True):
    schedule = Schedule(db_filename)
    schedule.create_tables()

    fd = feed.Feed(feed_filename)

    for gtfs_class in (
            Agency,
            Route,
            Stop,
            Trip,
            StopTime,
            ServicePeriod,
            ServiceException,
            Fare,
            FareRule,
            ShapePoint,
            Frequency,
            Transfer,
    ):

        if verbose:
            print "loading %s" % gtfs_class

        try:
            for i, record in enumerate(
                    fd.get_table(gtfs_class.TABLENAME + ".txt")):
                if i % 500 == 0:
                    if verbose:
                        sys.stdout.write(".")
                        sys.stdout.flush()
                    schedule.session.commit()

                instance = gtfs_class(**record.to_dict())
                schedule.session.add(instance)
            print
        except KeyError:
            # TODO: check if the table is required
            continue

    schedule.session.commit()

    return schedule
Example #14
0
  def test_make_zip_compression(self):
    f = feed.Feed()
    outfile = test_outfile()
    f.make_zip(
      outfile,
      path=os.path.dirname(util.example_feed()),
      clone=util.example_feed()
      ,compress=False
    )
    outfile2 = test_outfile()
    f.make_zip(
      outfile2,
      path=os.path.dirname(util.example_feed()),
      clone=util.example_feed()
    )
    assert os.stat(outfile).st_size > os.stat(outfile2).st_size

    os.unlink(outfile)
    os.unlink(outfile2)
Example #15
0
 def test_write(self):
   f = feed.Feed()
   data = [entities.Agency(**self.agency_expect)]
   outfile = test_outfile()
   f.write(outfile, data, sortkey='agency_id')
   # Check the output...
   with open(outfile) as csvfile:
     reader = csv.reader(csvfile)
     headers = reader.next()
     assert len(self.agency_expect.keys()) == len(headers)
     for i in headers:
       assert i in self.agency_expect
     rows = []
     for i in reader:
       rows.append(i)
     assert len(rows) == 1
     row = rows[0]
     for k,v in zip(headers, row):
       assert self.agency_expect[k] == v
   # Delete temp file
   os.unlink(outfile)
Example #16
0
 def test_agencies(self):
   f = feed.Feed(util.example_feed())
   data = f.agencies()
   assert len(data) == 1
Example #17
0
 def __init__(self, name):
     self.gateway = gateway.Gateway(name=name, listeners=[self])
     self.feed = feed.Feed(listeners=[self])
     self.book = book.Book()
     self.board = ChessBoard.ChessBoard()
     self.gameId = None
Example #18
0
 def feed(self):
     return feed.Feed(self._feed)
Example #19
0
 def test_init(self):
   f = feed.Feed(util.example_feed())
Example #20
0
 def test_stop(self):
   f = feed.Feed(util.example_feed())
   data = f.stop(self.stop_expect['stop_id'])
   for k in self.stop_expect:
     assert self.stop_expect[k] == data[k]
Example #21
0
 def test_stops(self):
   f = feed.Feed(util.example_feed())
   assert len(f.stops()) == 9
Example #22
0
 def test_route(self):
   f = feed.Feed(util.example_feed())
   data = f.route(self.route_expect['route_id'])
   for k in self.route_expect:
     assert self.route_expect[k] == data[k]
Example #23
0
 def test_routes(self):
   f = feed.Feed(util.example_feed())
   assert len(f.routes()) == 5
Example #24
0
 def test_agency(self):
   f = feed.Feed(util.example_feed())
   data = f.agency(self.agency_expect['agency_id'])
   for k in self.agency_expect:
     assert self.agency_expect[k] == data[k]
Example #25
0
if __name__ == "__main__":
    pnlFilename = sys.argv[2]

    gamelines = open(sys.argv[1], 'r').readlines()

    speed = 1
    nextMove = time.time() + speed
    ##game = chessgame.ChessGame(gameId = 150, result = "1/2-1/2", moves="e4 c5 Nf3 e6 d4 cxd4 Nxd4 a6 Bd3 Nf6 O-O Qc7 Qe2 d6 c4 g6 Nc3 Bg7 Nf3 O-O Bf4 Nc6 Rac1 e5 Bg5 h6 Be3 Bg4 Nd5 Qd8 h3 Nxd5 cxd5 Nd4 Bxd4 Bxf3 Qxf3 exd4 Rc4 Rc8 Rfc1 Rxc4 Rxc4 h5 Qd1 Be5 Qc1 Qf6 Rc7 Rb8 a4 Kg7 b4 h4 Kf1 Bf4 Qd1 Qd8 Rc4 Rc8 a5 Rxc4 Bxc4 Qf6 Be2 Be5 Bf3 Qd8 Qc2 b6 axb6 Qxb6 Qc4 d3".split(" "))
    game = chessgame.ChessGame.fromstr(gamelines.pop(0))
    n = 0

    debugFeedBook = True

    gateways = GatewayCollection()
    f = feed.Feed(send=True)
    b = MatchingBook()
    newMsg = game.newMessage()
    f.send(newMsg)
    if debugFeedBook:
        fb = Book()
        fb.processMessage(newMsg)
    r = RecoveryBuilder(b, f)
    pnlEvents = pnl.PnlEvents(pnlFilename)
    oldMark = None

    newoid = 1
    while True:
        if time.time() > nextMove:
            if n < len(game.moves):
                moveMessage = game.moveMessage(n)
import feed
import time

useThread = True

class FeedPrinter(feed.Listener):
  def onFeedMessage(self, rawMessage, seq, drop, message):
    print "%08d %1s: %s" % (seq, "*" if drop else " ", message)

if useThread:
  L = FeedPrinter()
  f = feed.Feed(listeners=[L])
  while True:
    time.sleep(1)
else:  
  f = feed.Feed()
  while True:
    msg, seq, drop, m = f.recv()
    print "%08d %1s: %s" % (seq, "*" if drop else " ", str(m))
Example #27
0
 def test_read_invalidfile(self):
   f = feed.Feed(util.example_feed())
   with self.assertRaises(KeyError):
     f.read('invalidfile')
Example #28
0
    for y, rank in enumerate(board):
        for x, p in enumerate(rank):
            pygameScreen.blit(pieces[(x+y)%2][p],(x*60,y*60))
    if os.path.isfile("pnl.png"):
        pnl = pygame.image.load("pnl.png")
        pygameScreen.blit(pnl, (0,480))

    pygame.display.flip()

chess = ChessBoard()

b = book.Book()
prevSeq = None
drops = 0

f = feed.Feed()

##p = pnl.Pnl(sys.argv[1])

stdscr = curses.initscr()

curses.noecho()
curses.cbreak()
curses.curs_set(0)
stdscr.keypad(1)

gameId = ""
chessResult=""
messages = []

try:
Example #29
0
"""Provide useful information about a GTFS file and export to JSON."""
import argparse
import json

import feed

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='GTFS Info and JSON export')
    parser.add_argument('filename', help='GTFS File')
    parser.add_argument('--debug',
                        help='Show helpful debugging information',
                        action='store_true')

    args = parser.parse_args()
    g = feed.Feed(args.filename)
    g.preload()
    print "===== GTFS: %s =====" % g.filename
    for agency in g.agencies():
        print "Agency:", agency['agency_name']
        print "  Routes:", len(agency.routes())
        if args.debug:
            for route in agency.routes():
                print route.data
        print "  Stops:", len(agency.stops())
        if args.debug:
            for stop in agency.stops():
                print stop.data
        print "  Trips:", len(agency.trips())
        if args.debug:
            for trip in agency.trips():
                print trip.data
Example #30
0
"""Validate a GTFS file."""
import argparse
import json

import feed
import validation

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='Validate a GTFS feed.')
    parser.add_argument('filename', help='GTFS File')
    parser.add_argument('--debug',
                        help='Show helpful debugging information',
                        action='store_true')
    args = parser.parse_args()

    validator = validation.ValidationReport()
    feed = feed.Feed(args.filename, debug=args.debug)
    feed.validate(validator=validator)
    validator.report()