示例#1
0
文件: http.py 项目: Morillo9/twtxt
def retrieve_status(source):
    status = None
    try:
        response = yield from aiohttp.head(source.url)
        status = response.status
        yield from response.release()
    except Exception as e:
        logger.debug(e)
    finally:
        return source, status
示例#2
0
def retrieve_status(source):
    status = None
    try:
        response = yield from aiohttp.head(source.url)
        status = response.status
        yield from response.release()
    except Exception as e:
        logger.debug(e)
    finally:
        return source, status
示例#3
0
 async def url_code(self):
     while True:
         await asyncio.sleep(self.sleep)
         try:
             async with aiohttp.head(self.url) as head:
                 if head.status not in STATUS_OK:
                     self.__request_log(head.status)
                     self.__request_sleep_inc()
                 else:
                     self.sleep = SLEEP_NORMAL
         except aiohttp.errors.ClientOSError as e:
             self.__request_log(e)
             self.__request_sleep_inc()
示例#4
0
def process_links():
    while True:
        link = yield from links.get()
        if urlparse(link[2]).netloc in DOMAINS_WHITELIST:
            continue
        try:
            request = yield from aiohttp.head(link[2])
            logging.info('%s returned a %d', link[2], request.status)
            if (request.status // 100) not in (4, 5):
                continue
            if request.status in (405, 501):
                continue  # Method Not Allowed / Not Implemented
            yield from broken_links.put(link + (request.status, ))
            logging.warning('Found broken link: %s', link[2])
        except GeneratorExit:
            pass
        except Exception:
            pass
        finally:
            request.close()
def process_links():
    while True:
        link = yield from links.get()
        if urlparse(link[2]).netloc in DOMAINS_WHITELIST:
            continue
        try:
            request = yield from aiohttp.head(link[2])
            logging.info('%s returned a %d', link[2], request.status)
            if (request.status // 100) not in (4, 5):
                continue
            if request.status in (405, 501):
                continue  # Method Not Allowed / Not Implemented
            yield from broken_links.put(link + (request.status,))
            logging.warning('Found broken link: %s', link[2])
        except GeneratorExit:
            pass
        except Exception:
            pass
        finally:
            request.close()
示例#6
0
async def download(url, parts=16):

    print("URL: {}".format(url))

    async def get_partial_content(_url, _part, start, end):

        print("Part {}/{} (Bytes {} to {})".format(_part, parts, start, end))

        h = {"Range": "bytes={}-{}".format(start, end - 1 if end else "")}
        async with aiohttp.get(_url, headers=h) as resp:
            return _part, await resp.read()

    async with aiohttp.head(url) as resp:
        size = int(resp.headers["Content-Length"])

    ranges = list(range(0, size, size // parts))
    res, _ = await asyncio.wait(
        [get_partial_content(url, i, start, end) for i, (start, end) in
         enumerate(itertools.zip_longest(ranges, ranges[1:], fillvalue=""))])
    sorted_result = sorted(task.result() for task in res)

    return b"".join(data for _, data in sorted_result)
示例#7
0
async def download(url, parts):
    async def get_partial_content(u, i, start, end):
        print(i, start, end)
        headers = {'Range': 'bytes={}-{}'.format(start, end-1 if end else '')}
        async with aiohttp.get(u, headers=headers) as _resp:
            return(i, await _resp.read())

    async with aiohttp.head(url) as resp:
        size = int(resp.headers['content-length'])
    print('size: {}'.format(size))

    ranges = list(range(0, size, size // parts))

    res, _ = await asyncio.wait(
        [get_partial_content(url, i, start, end) for i, (start, end) in
         enumerate(itertools.zip_longest(ranges, ranges[1:], fillvalue=''))])

    sorted_result = sorted(task.result() for task in res)

    result_data = b''.join(data for _, data in sorted_result)

    with open('{}'.format(url.split('/')[-1]), 'wb') as f:
        f.write(result_data)
示例#8
0
async def get_image(url, parts):
    async def get_while_active(u, i, start, end):
        #print(i, start, end)

        # if available get bytes, otherwise move on to next partition
        async with aiohttp.get(u, headers={"Range": "bytes={}-{}".format(start, end - 1 if end else "")}) as _resp:
            return i, await _resp.read()

    # read bytes 
    async with aiohttp.head(url) as resp:
        size = int(resp.headers["Content-Length"])

    # create a list of all the 16 partitions
    ranges = list(range(0, size, size // parts))


    # iterate over the partitions until all of them have been completed
    res, _ = await asyncio.wait([get_while_active(url, i, start, end) for i, (start, end) in
         enumerate(itertools.zip_longest(ranges, ranges[1:], fillvalue=""))])

    sorted_result = sorted(task.result() for task in res)
    
    # join the partitions together and return the completed data
    return b"".join(data for _, data in sorted_result)
示例#9
0
文件: pdl.py 项目: kopchik/pdl
def downloader(loop=None, num_workers=3, chunksize=5 * MEG, url=None, out=None):
    # calculate download filename
    r = urlparse(url)                       # request object from urllib
    outfile = out or basename(r.path)       # download file name
    statusfile = outfile + ".download"      # track downloaded chunks
    log.info("url: '%s'" % url)
    if exists(outfile) and not exists(statusfile):
        log.info("It seems file already downloaded as '%s'" % outfile)
        return
    log.info("saving to '%s'" % outfile)

    # check for stalled status file
    if not isfile(outfile) and isfile(statusfile):
        raise Exception("There is a progress file (\"%s\"),"
                        "but no output file (\"%s\"). "
                        "Please remove stalled status file." % (statusfile, outfile))

    # get file size
    r = yield from aiohttp.head(url)
    rawsize = r.headers.get('Content-Length', None)
    r.close()
    assert rawsize, "No Content-Length header"
    size = int(rawsize)
    assert size < 20000 * MEG, "very large file, are you sure?"
    log.info("download size: %s bytes" % size)

    # load progress from file or create a new one
    try:
        status = pickle.load(open(statusfile, "rb"))
        log.debug("progress restored from %s" % statusfile)
        assert status.size == size,  \
            "cannot resume download:"  \
            "original file had %s size, this one is %s" \
            % (status.size, size)
        if chunksize != status.chunksize:
            log.info("chunk size: %s => %s" % (chunksize, status.chunksize))
            status.rechunkize(chunksize)
    except FileNotFoundError:
        status = Status(size, chunksize)
    except Exception as err:
        log.error("error unpickling db: %s" % err)
        return False
    status.url = url

    # save download progress when interrupted
    def save_status():
        with status.lock:
            log.info("\n\nsaving state to %s\n" % statusfile)
            with open(statusfile, "wb") as fd:
                pickle.dump(status, fd)
    atexit.register(save_status)

    # open file for writing and launch workers
    # open() does not support O_CREAT
    mode = "rb+" if isfile(outfile) else "wb"
    status.fd = open(outfile, mode)
    status.fd.truncate(size)

    # start workers
    status_worker = loop.create_task(output_status(status))
    tasks = []
    for i in range(num_workers):
        t = loop.create_task(worker(status))
        tasks.append(t)

    while True:
        done, pending = yield from asyncio.wait(tasks)
        print(done, pending)
        # TODO("check download complete")
        break

    status_worker.cancel()
    log.info("\ndownload finished")
    atexit.unregister(save_status)
    try:
        unlink(statusfile)
    except FileNotFoundError:
        pass

    return True
async def wildcard(context):
    global is_playing_game
    global is_statement_announced
    global current_player_roster
    global current_judge
    global high_score_roster
    global player_retorts
    global general_channel_id


    if not context.message.channel.id == general_channel_id:

        if is_playing_game:

            if is_statement_announced:

                if context.message.author.name in current_player_roster:

                    if not context.message.author.name == current_judge:

                        if context.message.author.name in high_score_roster:

                            if high_score_roster[context.message.author.name] >= 1:

                                async with aiohttp.head('https://imgur.com/random', allow_redirects=False) as response:

                                    if 'Location' in response.headers:

                                        player_retorts[context.message.author.name] = response.headers['Location']


                                        url = urlparse.urlparse(os.environ['DATABASE_URL'])
                                        dbname = url.path[1:]
                                        user = url.username
                                        password = url.password
                                        host = url.hostname
                                        port = url.port

                                        con2 = psycopg2.connect(dbname=dbname, user=user, password=password, host=host, port=port)
                                        cur2 = con2.cursor()

                                        
                                        high_score_roster[context.message.author.name] -= 1

                                        cur2.execute("""UPDATE high_scores SET score = %s WHERE name = %s;""", (high_score_roster[context.message.author.name], context.message.author.name))
                                        con2.commit()
                                        cur2.close()
                                        con2.close()

                                        high_score_roster = {k:high_score_roster[k] for k in sorted(high_score_roster, key=high_score_roster.get, reverse=True)}

                                        await client.say(":credit_card: You just purchased a wildcard for 1 BrawndoCoin: " + response.headers['Location'])

                                        await client.say(":warning: BE ADVISED: saying !wildcard again will pick a different wildcard at the nominal fee of 1 additional BrawndoCoin!")

                                    else:

                                        await client.say(":sos: Fatal Error getting a wildcard from imgur.com! But don't worry, I didn't touch your precious BrawndoCoin. Try again, maybe?")

                            else:

                                await client.say(":money_with_wings: You don't even have 1 BrawndoCoin to your name, dude! wildcards always cost 1 BrawndoCoin to play *after* getting back on your feet!")
                        else:

                            await client.say(":syringe: Our records indicate you've *never even owned 1 BrawndoCoin* so I'll let you get this first hit for free, BUT...wildcards WON'T be free after you win a round and get back on your feet, dude.")

                    else:

                        await client.say(":interrobang: Judges can't use wildcards! That wouldn't even make sense!")

                else:

                    await client.say(":no_entry: DUDE! You're not even playing this round, breh! Say !putmeincoach to join the game!")

            else:

                await client.say(":no_entry: You have to wait for the next round to start *before* trying to play a wildcard!")

        else:

            await client.say(":pouting_cat: No game in progress! DM me with !putmeincoach to join and start the game!")

    else:

        await client.say(":interrobang: DUDE, I told you not to call me on this channel! Prank caller! DM me with !putmeincoach to join the game, or your retort, or !wildcard to play a wildcard!")