Example #1
0
def get_remote_tweets(sources, limit=None, timeout=5.0, use_cache=True):
    conn = aiohttp.TCPConnector(conn_timeout=timeout, use_dns_cache=True)
    headers = generate_user_agent()

    with aiohttp.ClientSession(connector=conn, headers=headers) as client:
        loop = asyncio.get_event_loop()

        def start_loop(client, sources, limit, cache=None):
            return loop.run_until_complete(process_sources_for_file(client, sources, limit, cache))

        if use_cache:
            try:
                with Cache.discover() as cache:
                    tweets = start_loop(client, sources, limit, cache)
            except OSError as e:
                logger.debug(e)
                tweets = start_loop(client, sources, limit)
    #comp490
        else:
            tweets = start_loop(client, sources, limit)
    #comp490
    if tweets is None:
        return backup_get_tweets(client,sources,limit)
    else:
        return tweets
def step_impl(context):
    with aiohttp.ClientSession() as client:
        loop = asyncio.get_event_loop()

        def start_loop(client, sources, limit, cache=None):
            return loop.run_until_complete(twtxt.twhttp.process_sources_for_file(client, sources, limit, cache))
        try:
            with Cache.discover() as cache:
                context.tweets=start_loop(client, [context.source], 7, cache)
        except Exception as caughtexception:
            context.error=caughtexception
Example #3
0
def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source,
             cache, force_update):
    """Retrieve your personal timeline."""
    if source:
        source_obj = ctx.obj["conf"].get_source_by_nick(source)
        if not source_obj:
            logger.debug("Not following {0}, trying as URL".format(source))
            source_obj = Source(source, source)
        sources = [source_obj]
    else:
        sources = ctx.obj["conf"].following

    tweets = []

    if cache:
        try:
            with Cache.discover(update_interval=ctx.obj["conf"].
                                timeline_update_interval) as cache:
                force_update = force_update or not cache.is_valid
                if force_update:
                    tweets = get_remote_tweets(sources, limit, timeout, cache)
                else:
                    logger.debug(
                        "Multiple calls to 'timeline' within {0} seconds. Skipping update"
                        .format(cache.update_interval))
                    # Behold, almighty list comprehensions! (I might have gone overboard here…)
                    tweets = list(
                        chain.from_iterable([
                            cache.get_tweets(source.url) for source in sources
                        ]))
        except OSError as e:
            logger.debug(e)
            tweets = get_remote_tweets(sources, limit, timeout)
    else:
        tweets = get_remote_tweets(sources, limit, timeout)

    if twtfile and not source:
        source = Source(ctx.obj["conf"].nick,
                        ctx.obj["conf"].twturl,
                        file=twtfile)
        tweets.extend(get_local_tweets(source, limit))

    if not tweets:
        return

    tweets = sort_and_truncate_tweets(tweets, sorting, limit)

    if pager:
        click.echo_via_pager(style_timeline(tweets, porcelain))
    else:
        click.echo(style_timeline(tweets, porcelain))
Example #4
0
def unfollow(ctx, nick):
    """Remove an existing source from your followings."""
    source = ctx.obj['conf'].get_source_by_nick(nick)

    try:
        with Cache.discover() as cache:
            cache.remove_tweets(source.url)
    except OSError as e:
        logger.debug(e)

    ret_val = ctx.obj['conf'].remove_source_by_nick(nick)
    if ret_val:
        click.echo("✓ You’ve unfollowed {0}.".format(
            click.style(source.nick, bold=True)))
    else:
        click.echo("✗ You’re not following {0}.".format(
            click.style(nick, bold=True)))
Example #5
0
def timeline(ctx, pager, limit, twtfile, sorting, timeout, porcelain, source, cache, force_update):
    """Retrieve your personal timeline."""
    if source:
        source_obj = ctx.obj["conf"].get_source_by_nick(source)
        if not source_obj:
            logger.debug("Not following {0}, trying as URL".format(source))
            source_obj = Source(source, source)
        sources = [source_obj]
    else:
        sources = ctx.obj["conf"].following

    if cache:
        try:
            with Cache.discover(update_interval=ctx.obj["conf"].timeline_update_interval) as cache:
                force_update = force_update or not cache.is_valid
                if force_update:
                    tweets = get_remote_tweets(sources, limit, timeout, cache)
                else:
                    logger.debug("Multiple calls to 'timeline' within {0} seconds. Skipping update".format(
                        cache.update_interval))
                    # Behold, almighty list comprehensions! (I might have gone overboard here…)
                    tweets = list(chain.from_iterable([cache.get_tweets(source.url) for source in sources]))
        except OSError as e:
            logger.debug(e)
            tweets = get_remote_tweets(sources, limit, timeout)
    else:
        tweets = get_remote_tweets(sources, limit, timeout)

    if twtfile and not source:
        source = Source(ctx.obj["conf"].nick, ctx.obj["conf"].twturl, file=twtfile)
        tweets.extend(get_local_tweets(source, limit))

    tweets = sort_and_truncate_tweets(tweets, sorting, limit)

    if not tweets:
        return

    if pager:
        click.echo_via_pager(style_timeline(tweets, porcelain))
    else:
        click.echo(style_timeline(tweets, porcelain))
Example #6
0
def get_remote_tweets(sources, limit=None, timeout=5.0, use_cache=True):
    conn = aiohttp.TCPConnector(conn_timeout=timeout, use_dns_cache=True)
    headers = generate_user_agent()
    with aiohttp.ClientSession(connector=conn, headers=headers) as client:
        loop = asyncio.get_event_loop()

        def start_loop(client, sources, limit, cache=None):
            return loop.run_until_complete(
                process_sources_for_file(client, sources, limit, cache))

        if use_cache:
            try:
                with Cache.discover() as cache:
                    tweets = start_loop(client, sources, limit, cache)
            except OSError as e:
                logger.debug(e)
                tweets = start_loop(client, sources, limit)
        else:
            tweets = start_loop(client, sources, limit)

    return tweets
Example #7
0
def retrieve_file(client, source, limit, cache):
    is_cached = cache.is_cached(source.url) if cache else None
    headers = {"If-Modified-Since": cache.last_modified(source.url)} if is_cached else {}

    try:
        response = yield from client.request("get",source.url, headers=headers,allow_redirects=False)
        content = yield from response.text()
    except Exception as e:
        if is_cached:
            logger.debug("{}: {} - using cached content".format(source.url, e))
            return cache.get_tweets(source.url, limit)
    #comp490
        elif e==ssl.CertificateError:

            click.echo("Warning the source: "+source.nick+" is unsafe: Hostname does not match name on SSL certificate")
            return []
        elif e==aiohttp.errors.ClientOSError:

            if "[[SSL: CERTIFICATE_VERIFY_FAILED" in str(e):
                click.echo("Warning the source: "+source.nick+" is unsafe: The ssl certificate has expired")
                return []
            elif "[SSL: EXCESSIVE_MESSAGE_SIZE]" in str(e):
                click.echo("Warning the source: "+source.nick+" is unsafe: source has sent an invalid response")
    #COMP490
        else:
            logger.debug(e)
            return []

    if response.status == 200:
        tweets = parse_tweets(content.splitlines(), source)

        if cache:
            last_modified_header = response.headers.get("Last-Modified")
            if last_modified_header:
                logger.debug("{} returned 200 and Last-Modified header - adding content to cache".format(source.url))
                cache.add_tweets(source.url, last_modified_header, tweets)
            else:
                logger.debug("{} returned 200 but no Last-Modified header - can’t cache content".format(source.url))
        else:
            logger.debug("{} returned 200".format(source.url))

        return sorted(tweets, reverse=True)[:limit]
#comp490
    elif response.status==301:
        cache = Cache.discover()
        conf=Config.discover()
        tweets=cache.get_tweets(source.url)

        conf.remove_source_by_nick(source.nick)
        url=response.headers["Location"]
        conf.add_source(Source(source.nick,url))
        for tweet in tweets:
            cache.add_tweet(url,0,tweet)
#comp490
    elif response.status == 410 and is_cached:
        # 410 Gone:
        # The resource requested is no longer available,
        # and will not be available again.
        logger.debug("{} returned 410 - deleting cached content".format(source.url))
        cache.remove_tweets(source.url)
        return []

    elif is_cached:
        logger.debug("{} returned {} - using cached content".format(source.url, response.status))
        return cache.get_tweets(source.url, limit)

    else:
        logger.debug("{} returned {}".format(source.url, response.status))
        return []