def dump_albums(data): path = create_folder(BACKUP_DIR, 'facebook_app', 'albums') photo_queue = Queue.Queue(1500) workers = WorkerPool(Worker, photo_queue) workers.start() for album_id, album in data['albums'].iteritems(): album_path = create_folder(path, album['id']) downloaded_photos = os.listdir(album_path) for photo_id, photo in album['photos'].iteritems(): source_url = urlparse.urlparse(photo['source']) ext = re.match("^.*/([^/]+?(\.[^/]+)?)$", source_url.path).groups()[1] if not ext: ext = '' if photo['id'] not in downloaded_photos: photo_path = create_folder(album_path, photo['id']) photo_queue.put((dump_photo_set, (photo_path, photo, ext,), {})) else: # either thumbnail or photo is missing. redownloading set if not photo_exists(album_path, photo): photo_path = create_folder(album_path, photo['id']) photo_queue.put((dump_photo_set, (photo_path, photo, ext,), {})) else: pass # photo is already in the filesystem workers.stop()
def dump_photos(data): path = create_folder(BACKUP_DIR, 'facebook_app', 'photos') photo_queue = Queue.Queue(1500) workers = WorkerPool(Worker, photo_queue) workers.start() downloaded_photos = os.listdir(path) for photo in data['photos']: photo_path = create_folder(path, photo['id']) if not photo['id'] in downloaded_photos: photo_queue.put((dump_photo_set, (photo_path, photo, ".jpg",), {})) else: if not photo_exists(path, photo): photo_queue.put((dump_photo_set, (photo_path, photo, ".jpg",), {})) else: pass # photo is alread in the filesystem workers.stop()
def dump_albums(data): path = create_folder(BACKUP_DIR, 'picasaweb_app', 'albums') photo_queue = Queue.Queue(1500) workers = WorkerPool(Worker, photo_queue) workers.start() for album_id, album in data['albums'].iteritems(): album_path = create_folder(path, album['id']) downloaded_photos = os.listdir(album_path) for photo_id, photo in album['photos'].iteritems(): if photo_id not in downloaded_photos: photo_path = create_folder(album_path, photo_id) photo_queue.put((dump_photo_set, (photo_path, photo,),{})) else: if not photo_exists(album_path, photo): photo_path = create_folder(album_path, photo['id']) photo_queue.put((dump_photo_set, (photo_path, photo,), {})) else: pass workers.stop()
def dump_json(data, backup_time): path = create_folder(BACKUP_DIR, 'picasaweb_app') fd = open(os.path.join(path, '%s.json' % backup_time.strftime('%Y%m%d%H%M%S')), 'w') fd.write(json.dumps(data)) fd.close()
def dump_json(data, backup_time): path = create_folder(BACKUP_DIR, 'facebook_app') fd = open(os.path.join(path, "%s.json" % backup_time.strftime("%Y%m%d%H%M%S")), "w") fd.write(json.dumps(data)) fd.close()
def func(photo_path, id): url = "http://graph.facebook.com/%s/picture" % id filename = "%s.jpg" % id try: img = download_media(url) write_image_data(photo_path, filename, img) except: try: img = download_media(url) write_image_data(photo_path, filename, img) except Exception, e: logger.debug("an exception occured while downloading media %s. ignoring..", url) path = create_folder(BACKUP_DIR, 'facebook_app', 'profile_pics') photo_queue = Queue.Queue(1500) workers = WorkerPool(Worker, photo_queue) workers.start() downloaded_photos = os.listdir(path) for friend in data['friends']: photo = "%s.jpg" % friend['id'] if photo not in downloaded_photos: photo_queue.put((func, (path, friend['id'],), {})) workers.stop()