def cache_images(html, session): """ find all images and store them locally """ data = BeautifulSoup(html, "html.parser") for image in data.findAll('img'): replacement = files.cache_image(image['src'], session) html = html.replace(image['src'], replacement) return html
def get_images(conn, logid): """ get image details from the DB """ cursor = conn.cursor() cursor.execute("SELECT * FROM cacheimages where logid = ?", (logid, )) arr = [] for row in cursor: output = {} output['cacheid'] = row[0] output['accountid'] = row[1] output['imageid'] = row[2] output['logid'] = row[3] url = "https://img.geocaching.com/cache/log/large/" + row[4] filename = files.cache_image(url, SESSION) output['filename'] = filename output['created'] = row[5] output['name'] = row[6] output['descr'] = row[7] arr.append(output) return arr