Exemplo n.º 1
0
    def test_update_facebook_no_work(self):
        events_coll = fudge.Fake("events_coll")
        events_coll.remember_order()

        find = events_coll.expects("find")
        null_query = OrderedDict([("ubernear.lookup_failed.reason", "Null response")])
        validating_query = OrderedDict(
            [
                (
                    "ubernear.lookup_failed.reason",
                    OrderedDict([("$regex", "OAuthException error on get.*: " "Error validating application..")]),
                )
            ]
        )
        retry_query = OrderedDict(
            [
                (
                    "ubernear.lookup_failed.reason",
                    OrderedDict(
                        [
                            (
                                "$regex",
                                "OAuthException error on get.*: "
                                "An unexpected error has occurred. "
                                "Please retry your request later..",
                            )
                        ]
                    ),
                )
            ]
        )
        or_query = OrderedDict([("$or", [null_query, validating_query, retry_query])])
        match_query = OrderedDict([("ubernear.lookup_completed", OrderedDict([("$exists", False)]))])
        and_query = OrderedDict([("$and", [match_query, or_query])])
        query = OrderedDict([("$or", [match_query, and_query])])
        find.with_args(query, sort=[("ubernear.fetched", 1)])

        fake_cursor = FakeCursor([])
        find.returns(fake_cursor)

        fake_datetime = fudge.Fake("datetime")
        fake_datetime.remember_order()

        utcnow = fake_datetime.expects("utcnow")
        utcnow.with_arg_count(0)
        utcnow.returns(datetime(2011, 10, 16, 2, 50, 32))

        fake_graph = fudge.Fake("graph")

        fake_log = fudge.Fake("log")

        found_work = facebook_event.update_facebook(
            events_coll=events_coll, graph=fake_graph, _log=fake_log, _datetime=fake_datetime
        )

        eq(found_work, False)
Exemplo n.º 2
0
    def test_update_facebook_process_all(self):
        events_coll = fudge.Fake("events_coll")
        events_coll.remember_order()

        find = events_coll.expects("find")
        find.with_arg_count(0)
        ubernear_1 = OrderedDict([("fetched", datetime(2011, 11, 14, 1, 15, 53))])
        ubernear_2 = OrderedDict([("fetched", datetime(2011, 11, 15, 2, 36, 21))])
        events = [
            OrderedDict([("_id", "226680217397995"), ("ubernear", ubernear_1)]),
            OrderedDict([("_id", "267558763278075"), ("ubernear", ubernear_2)]),
        ]
        fake_cursor = FakeCursor(events)
        find.returns(fake_cursor)

        fake_log = fudge.Fake("log")
        fake_log.remember_order()

        info = fake_log.expects("info")
        info.with_args("Fetching 2 events")

        self._create_save_events_simple_fakes(events_coll=events_coll, fake_log=fake_log)

        fake_datetime = fudge.Fake("datetime")
        fake_datetime.remember_order()

        utcnow = fake_datetime.expects("utcnow")
        utcnow.with_arg_count(0)
        utcnow.returns(datetime(2011, 11, 16, 2, 50, 32))

        found_work = facebook_event.update_facebook(
            events_coll=self._events_coll,
            graph=self._fake_graph,
            process_all=True,
            _log=self._fake_log,
            _datetime=fake_datetime,
        )

        eq(found_work, True)
Exemplo n.º 3
0
def main():
    parser = optparse.OptionParser(
        usage='%prog [OPTS]',
        )
    parser.add_option(
        '-v', '--verbose',
        help='Verbose mode [default %default]',
        action="store_true", dest="verbose"
        )
    parser.add_option(
        '--config',
        help=('Path to the file with information on how to '
              'configure facebook-event'
              ),
        metavar='PATH',
        )
    parser.add_option(
        '--db-config',
        help=('Path to the file with information on how to '
              'retrieve and store data in the database'
              ),
        metavar='PATH',
        )
    parser.add_option(
        '-a', '--process-all',
        help=('Process all events that have not expired '
              'instead of just those that have not been '
              'processed [default %default]'
              ),
        action="store_true", dest="process_all"
        )
    parser.set_defaults(
        verbose=False,
        process_all=False,
        )

    options, args = parser.parse_args()
    if args:
        parser.error('Wrong number of arguments.')

    if options.config is None:
        parser.error('Missing option --config=.')
    if options.db_config is None:
        parser.error('Missing option --db-config=.')

    logging.basicConfig(
        level=logging.DEBUG if options.verbose else logging.INFO,
        format='%(asctime)s.%(msecs)03d %(name)s: %(levelname)s: %(message)s',
        datefmt='%Y-%m-%dT%H:%M:%S',
        )

    config = config_parser(options.config)
    access_token = config.get('facebook', 'access_token')
    graph = GraphAPI(access_token)

    usps_id = config.get('usps', 'user_id')
    yahoo_id = config.get('yahoo', 'app_id')

    coll = collections(options.db_config)
    events_coll = coll['events-collection']
    expired_coll = coll['expired-collection']

    indices = [
        {'facebook.end_time': pymongo.ASCENDING},
        {'ubernear.fetched': pymongo.ASCENDING},
        ]
    mongo.create_indices(
        collection=events_coll,
        indices=indices,
        )
    indices = [
        {'facebook.end_time': pymongo.ASCENDING},
        ]
    mongo.create_indices(
        collection=expired_coll,
        indices=indices,
        )

    log.info('Start...')

    log.info('Moving expired events...')
    facebook_event.expire(
        events_coll=events_coll,
        expired_coll=expired_coll,
        )

    log.info('Updating event data...')
    facebook_work = facebook_event.update_facebook(
        events_coll=events_coll,
        graph=graph,
        process_all=options.process_all,
        )

    log.info('Updating venue data...')
    venue_work = facebook_event.update_venue(
        events_coll=events_coll,
        usps_id=usps_id,
        process_all=options.process_all,
        )

    log.info('Updating coordinate data...')
    coord_work = facebook_event.update_coordinate(
        events_coll=events_coll,
        yahoo_id=yahoo_id,
        process_all=options.process_all,
        )
    if coord_work['sleep'] is not None:
        delay = coord_work['sleep']
        log.info(
            'Geocoding rate limit reached. '
            'Sleeping {sleep} hours...'.format(
                delay=delay,
                )
            )
        time.sleep(delay)
    else:
        found_work = (
            facebook_work
            or
            venue_work
            or
            coord_work['found_work']
            )
        if not found_work:
            hours = 24
            delay = random.randint(60*60*hours, 60*60*hours+1)
            log.info(
                'Did not find any work. '
                'Sleeping about {hours} hours...'.format(
                    hours=hours,
                    )
                )
            time.sleep(delay)

    log.info('End')
Exemplo n.º 4
0
    def test_update_facebook_more_than_50(self):
        fake_graph = fudge.Fake("graph")
        fake_graph.remember_order()

        def batch_fakes(fake_graph, start, end):
            if start == 1:
                batch = fake_graph.expects("batch")
            else:
                batch = fake_graph.next_call("batch")
            request = [
                OrderedDict([("method", "GET"), ("relative_url", "{i}?date_format=c".format(i=i))])
                for i in xrange(start, end)
            ]
            batch.with_args(request)

            batch_response = []
            for i in xrange(start, end):
                min_ = 15
                sec = i
                if sec > 59:
                    min_ += sec / 59
                    sec %= 59
                response = OrderedDict(
                    [
                        ("title", "event title {i}".format(i=i)),
                        ("id", "{i}".format(i=i)),
                        ("start_time", "2012-02-26T08:{:02}:{:02}+00:00".format(min_, sec)),
                        ("end_time", "2012-02-26T11:{:02}:{:02}+00:00".format(min_, sec)),
                    ]
                )
                batch_response.append(response)

            batch.returns(batch_response)

        batch_fakes(fake_graph, 1, 50 + 1)
        batch_fakes(fake_graph, 51, 100 + 1)
        batch_fakes(fake_graph, 101, 105 + 1)

        events_coll = fudge.Fake("events_coll")
        events_coll.remember_order()

        find = events_coll.expects("find")
        null_query = OrderedDict([("ubernear.lookup_failed.reason", "Null response")])
        validating_query = OrderedDict(
            [
                (
                    "ubernear.lookup_failed.reason",
                    OrderedDict([("$regex", "OAuthException error on get.*: " "Error validating application..")]),
                )
            ]
        )
        retry_query = OrderedDict(
            [
                (
                    "ubernear.lookup_failed.reason",
                    OrderedDict(
                        [
                            (
                                "$regex",
                                "OAuthException error on get.*: "
                                "An unexpected error has occurred. "
                                "Please retry your request later..",
                            )
                        ]
                    ),
                )
            ]
        )
        or_query = OrderedDict([("$or", [null_query, validating_query, retry_query])])
        match_query = OrderedDict([("ubernear.lookup_completed", OrderedDict([("$exists", False)]))])
        and_query = OrderedDict([("$and", [match_query, or_query])])
        query = OrderedDict([("$or", [match_query, and_query])])
        find.with_args(query, sort=[("ubernear.fetched", 1)])

        events = []
        for i in xrange(1, 105 + 1):
            min_ = 15
            sec = i
            if sec > 59:
                min_ += sec / 59
                sec %= 59
            ubernear = OrderedDict([("fetched", datetime(2011, 11, 14, 1, min_, sec))])
            events.append(OrderedDict([("_id", "{i}".format(i=i)), ("ubernear", ubernear)]))
        fake_cursor = FakeCursor(events)
        find.returns(fake_cursor)

        fake_log = fudge.Fake("log")
        fake_log.remember_order()

        info = fake_log.expects("info")
        info.with_args("Fetching 105 events")

        def debug_fakes(fake_log, i):
            if i == 1:
                debug = fake_log.expects("debug")
            else:
                debug = fake_log.next_call("debug")
            debug.with_args("Storing event {i}".format(i=i))

        [debug_fakes(fake_log, i) for i in xrange(1, 105 + 1)]

        def update_fakes(events_coll, i):
            if i == 1:
                update = events_coll.expects("update")
            else:
                update = events_coll.next_call("update")

            min_ = 15
            sec = i
            if sec > 59:
                min_ += sec / 59
                sec %= 59

            save = OrderedDict(
                [
                    ("facebook.title", "event title {i}".format(i=i)),
                    ("facebook.id", "{i}".format(i=i)),
                    ("facebook.start_time", datetime(2012, 2, 26, 8, min_, sec)),
                    ("facebook.end_time", datetime(2012, 2, 26, 11, min_, sec)),
                    ("ubernear.source", "facebook"),
                    ("ubernear.lookup_completed", datetime(2011, 10, 16, 2, 50, 32)),
                ]
            )
            update.with_args(
                OrderedDict([("_id", "{i}".format(i=i))]), OrderedDict([("$set", save)]), upsert=True, safe=True
            )

        [update_fakes(events_coll, i) for i in xrange(1, 105 + 1)]

        fake_datetime = fudge.Fake("datetime")
        fake_datetime.remember_order()

        utcnow = fake_datetime.expects("utcnow")
        utcnow.with_arg_count(0)
        utcnow.returns(datetime(2011, 10, 16, 2, 50, 32))

        found_work = facebook_event.update_facebook(
            events_coll=events_coll, graph=fake_graph, _log=fake_log, _datetime=fake_datetime
        )

        eq(found_work, True)
Exemplo n.º 5
0
    def test_update_facebook_simple(self):
        events_coll = fudge.Fake("events_coll")
        events_coll.remember_order()

        find = events_coll.expects("find")
        null_query = OrderedDict([("ubernear.lookup_failed.reason", "Null response")])
        validating_query = OrderedDict(
            [
                (
                    "ubernear.lookup_failed.reason",
                    OrderedDict([("$regex", "OAuthException error on get.*: " "Error validating application..")]),
                )
            ]
        )
        retry_query = OrderedDict(
            [
                (
                    "ubernear.lookup_failed.reason",
                    OrderedDict(
                        [
                            (
                                "$regex",
                                "OAuthException error on get.*: "
                                "An unexpected error has occurred. "
                                "Please retry your request later..",
                            )
                        ]
                    ),
                )
            ]
        )
        or_query = OrderedDict([("$or", [null_query, validating_query, retry_query])])
        match_query = OrderedDict([("ubernear.lookup_completed", OrderedDict([("$exists", False)]))])
        and_query = OrderedDict([("$and", [match_query, or_query])])
        query = OrderedDict([("$or", [match_query, and_query])])
        find.with_args(query, sort=[("ubernear.fetched", 1)])

        ubernear_1 = OrderedDict([("fetched", datetime(2011, 11, 14, 1, 15, 53))])
        ubernear_2 = OrderedDict([("fetched", datetime(2011, 11, 15, 2, 36, 21))])
        events = [
            OrderedDict([("_id", "226680217397995"), ("ubernear", ubernear_1)]),
            OrderedDict([("_id", "267558763278075"), ("ubernear", ubernear_2)]),
        ]
        fake_cursor = FakeCursor(events)
        find.returns(fake_cursor)

        fake_log = fudge.Fake("log")
        fake_log.remember_order()

        info = fake_log.expects("info")
        info.with_args("Fetching 2 events")

        self._create_save_events_simple_fakes(events_coll=events_coll, fake_log=fake_log)

        fake_datetime = fudge.Fake("datetime")
        fake_datetime.remember_order()

        utcnow = fake_datetime.expects("utcnow")
        utcnow.with_arg_count(0)
        utcnow.returns(datetime(2011, 11, 16, 2, 50, 32))

        found_work = facebook_event.update_facebook(
            events_coll=self._events_coll, graph=self._fake_graph, _log=self._fake_log, _datetime=fake_datetime
        )

        eq(found_work, True)