Beispiel #1
0
def _cache_images(new_images):
	for image in new_images:
		new_filename = _get_filename_from_url(image['href'])
		request = Request(image['href'], headers=HEADERS)
		response = open_url(request)
		extracted_jpeg_data = response.read() 
		f = open(IMAGES + new_filename, 'wb') #FIXME: catch exceptions?
		f.write(extracted_jpeg_data)
		f.close()
		_scale_image(new_filename)
	_purge_everything_but(new_images)
Beispiel #2
0
def _cache_images(new_images):
    for image in new_images:
        new_filename = _get_filename_from_url(image['href'])
        request = Request(image['href'], headers=HEADERS)
        response = open_url(request)
        extracted_jpeg_data = response.read()
        f = open(IMAGES + new_filename, 'wb')  #FIXME: catch exceptions?
        f.write(extracted_jpeg_data)
        f.close()
        _scale_image(new_filename)
    _purge_everything_but(new_images)
Beispiel #3
0
def _fetch():
	request = Request(SOURCE_URL, headers=HEADERS)
	response = open_url(request)
	page_content = response.read()
	soup = BS(page_content)
	return soup
Beispiel #4
0
def _fetch():
    request = Request(SOURCE_URL, headers=HEADERS)
    response = open_url(request)
    page_content = response.read()
    soup = BS(page_content)
    return soup