def unpickle_save(data, default, type_=dict): """Unpickle a list of `type_` subclasses and skip items for which the class is missing. In case not just the class lookup fails, returns default. """ class dummy(type_): pass class SaveUnpickler(Unpickler): def find_class(self, module, name): try: return Unpickler.find_class(self, module, name) except (ImportError, AttributeError): return dummy fileobj = cBytesIO(data) try: items = SaveUnpickler(fileobj).load() except Exception: return default return [i for i in items if not isinstance(i, dummy)]
def get_url(url, post=None, get=None): post_params = urlencode(post or {}) get_params = urlencode(get or {}) if get: get_params = '?' + get_params # add post, get data and headers url = '%s%s' % (url, get_params) if post_params: request = Request(url, post_params) else: request = Request(url) # for discogs request.add_header('Accept-Encoding', 'gzip') request.add_header('User-Agent', USER_AGENT) url_sock = urlopen(request) enc = get_encoding_from_socket(url_sock) # unzip the response if needed data = url_sock.read() if url_sock.headers.get("content-encoding", "") == "gzip": data = gzip.GzipFile(fileobj=cBytesIO(data)).read() url_sock.close() content_type = url_sock.headers.get('Content-Type', '').split(';', 1)[0] domain = re.compile('\w+://([^/]+)/').search(url).groups(0)[0] print_d("Got %s data from %s" % (content_type, domain)) return (data if content_type.startswith('image') else data.decode(enc))
def get_url(url, post={}, get={}): post_params = urlencode(post) get_params = urlencode(get) if get: get_params = "?" + get_params # add post, get data and headers url = "%s%s" % (url, get_params) if post_params: request = Request(url, post_params) else: request = Request(url) # for discogs request.add_header("Accept-Encoding", "gzip") request.add_header("User-Agent", USER_AGENT) url_sock = urlopen(request) enc = get_encoding_from_socket(url_sock) # unzip the response if needed data = url_sock.read() if url_sock.headers.get("content-encoding", "") == "gzip": data = gzip.GzipFile(fileobj=cBytesIO(data)).read() url_sock.close() return data, enc
def setstringlist(self, section, option, values): """Saves a list of unicode strings using the csv module""" sw = cBytesIO() values = [unicode(v).encode('utf-8') for v in values] writer = csv.writer(sw, lineterminator='\n', quoting=csv.QUOTE_MINIMAL) writer.writerow(values) self.set(section, option, sw.getvalue())
def test_set_image_no_tag(self): m = mutagen.apev2.APEv2(self.f) m.delete() fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) self.s.set_image(image) images = self.s.get_images() self.assertEqual(len(images), 1)
def test_set_image(self): fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) song = FLACFile(self.filename) self.assertFalse(song.get_primary_image()) song.set_image(image) self.assertEqual(song.get_primary_image().width, 10)
def pickle_load(file, lookup_func=None): """Allows unpickling with manual control over class lookup on both Python 2 and Python 3. Will unpickle from the current position to the final stop marker. lookup_func gets passed a function for global lookup, the mod name to import and the attribute name to return from the module The lookup function passed to the callback can raise ImportError or AttributeError. Args: file (fileobj) lookup_func (callable or None) Returns: The unpickled objects Raises: pickle.UnpicklingError """ if PY2: inst = cPickle.Unpickler(file) if lookup_func is not None: # this is just a dummy unpickler we use for fallback class lookup unpickler = pickle.Unpickler(cBytesIO()) def find_global(mod, name): return lookup_func(unpickler.find_class, mod, name) inst.find_global = find_global else: if lookup_func is not None: class CustomUnpickler(pickle.Unpickler): def find_class(self, module, name): func = super(CustomUnpickler, self).find_class return lookup_func(func, module, name) unpickler_type = CustomUnpickler else: unpickler_type = pickle.Unpickler # helps a lot, but only on py3 if isinstance(file, BytesIO): file = BufferedReader(file) inst = unpickler_type(file, encoding="bytes") try: return inst.load() except UnpicklingError: raise except Exception as e: # unpickle can fail in many ways raise UnpicklingError(e)
def test_set_image(self): self.assertTrue(self.song.has_images) fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) self.song.set_image(image) image = self.song.get_primary_image() self.assertTrue(image) self.assertEqual(image.file.read(), "foo") self.assertTrue(self.song.has_images)
def test_set_image(self): self.assertTrue(self.song.has_images) fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) self.song.set_image(image) image = self.song.get_primary_image() self.assertTrue(image) self.assertEqual(image.read(), b"foo") self.assertTrue(self.song.has_images)
def test_set_image_no_tag(self): f = mutagen.File(self.filename) f.delete() song = MP3File(self.filename) fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) song.set_image(image) song = MP3File(self.filename) self.assertTrue(song.has_images)
def test_set_image(self): fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) self.s.set_image(image) self.assertTrue(self.s.has_images) images = self.s.get_images() self.assertEqual(len(images), 1) self.assertEqual(images[0].mime_type, "image/") self.assertEqual(images[0].file.read(), "foo")
def test_set_image(self): fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) self.s.set_image(image) self.assertTrue(self.s.has_images) images = self.s.get_images() self.assertEqual(len(images), 1) self.assertEqual(images[0].mime_type, "image/") self.assertEqual(images[0].read(), b"foo")
def test_set_image(self): fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) song = MP3File(self.filename) self.failIf(song.has_images) song.set_image(image) self.assertTrue(song.has_images) song = MP3File(self.filename) self.assertTrue(song.has_images) self.assertEqual(song.get_primary_image().mime_type, "image/jpeg")
def __process(self, results): req_data = [] req_data.append( urlencode({ "format": "json", "client": APP_KEY, "batch": "1", })) for i, result in enumerate(results): postfix = ".%d" % i req_data.append( urlencode({ "duration" + postfix: str(int(round(result.length))), "fingerprint" + postfix: result.chromaprint, })) req_data.append("meta=releases+recordings+tracks+sources") urldata = "&".join(req_data) obj = cBytesIO() gzip.GzipFile(fileobj=obj, mode="wb").write(urldata.encode()) urldata = obj.getvalue() headers = { "Content-Encoding": "gzip", "Content-type": "application/x-www-form-urlencoded" } req = Request(self.URL, urldata, headers) releases = {} error = "" try: response = urlopen(req, timeout=self.TIMEOUT) except EnvironmentError as e: error = "urllib error: " + str(e) else: try: data = response.read() data = json.loads(data.decode()) except ValueError as e: error = str(e) else: if data["status"] == "ok": for result_data in data.get("fingerprints", []): if "index" not in result_data: continue index = result_data["index"] releases[index] = parse_acoustid_response(result_data) for i, result in enumerate(results): yield LookupResult(result, releases.get(str(i), []), error)
def pickle_loads(data, lookup_func=None): """Like pickle_load() but takes bytes instead of a file-like Args: data (bytes) lookup_func (callable or None) Returns: The unpickled objects Raises: pickle.UnpicklingError """ return pickle_load(cBytesIO(data), lookup_func=lookup_func)
def __process(self, results): req_data = [] req_data.append(urlencode({ "format": "json", "client": APP_KEY, "batch": "1", })) for i, result in enumerate(results): postfix = ".%d" % i req_data.append(urlencode({ "duration" + postfix: str(int(round(result.length))), "fingerprint" + postfix: result.chromaprint, })) req_data.append("meta=releases+recordings+tracks+sources") urldata = "&".join(req_data) obj = cBytesIO() gzip.GzipFile(fileobj=obj, mode="wb").write(urldata) urldata = obj.getvalue() headers = { "Content-Encoding": "gzip", "Content-type": "application/x-www-form-urlencoded" } req = Request(self.URL, urldata, headers) releases = {} error = "" try: response = urlopen(req, timeout=self.TIMEOUT) except EnvironmentError as e: error = "urllib error: " + str(e) else: try: data = response.read() data = json.loads(data) except ValueError as e: error = str(e) else: if data["status"] == "ok": for result_data in data.get("fingerprints", []): if "index" not in result_data: continue index = result_data["index"] releases[index] = parse_acoustid_response(result_data) for i, result in enumerate(results): yield LookupResult(result, releases.get(str(i), []), error)
def __send(self, urldata): if self.__stopped: return gatekeeper.wait() self.__done += len(urldata) basedata = urlencode({ "format": "xml", "client": APP_KEY, "user": get_api_key(), }) urldata = "&".join([basedata] + map(urlencode, urldata)) obj = cBytesIO() gzip.GzipFile(fileobj=obj, mode="wb").write(urldata) urldata = obj.getvalue() headers = { "Content-Encoding": "gzip", "Content-type": "application/x-www-form-urlencoded" } req = Request(self.URL, urldata, headers) error = None try: response = urlopen(req, timeout=self.TIMEOUT) except EnvironmentError as e: error = "urllib error: " + str(e) else: xml = response.read() try: dom = parseString(xml) except: error = "xml error" else: status = dom.getElementsByTagName("status") if not status or not status[0].childNodes or not \ status[0].childNodes[0].nodeValue == "ok": error = "response status error" if error: print_w("[fingerprint] Submission failed: " + error) # emit progress self.__idle(self.__progress_cb, float(self.__done) / len(self.__results))
def __send(self, urldata): if self.__stopped: return gatekeeper.wait() self.__done += len(urldata) basedata = urlencode({ "format": "xml", "client": APP_KEY, "user": get_api_key(), }) urldata = "&".join([basedata] + list(map(urlencode, urldata))) obj = cBytesIO() gzip.GzipFile(fileobj=obj, mode="wb").write(urldata.encode()) urldata = obj.getvalue() headers = { "Content-Encoding": "gzip", "Content-type": "application/x-www-form-urlencoded" } req = Request(self.URL, urldata, headers) error = None try: response = urlopen(req, timeout=self.TIMEOUT) except EnvironmentError as e: error = "urllib error: " + str(e) else: xml = response.read() try: dom = parseString(xml) except: error = "xml error" else: status = dom.getElementsByTagName("status") if not status or not status[0].childNodes or not \ status[0].childNodes[0].nodeValue == "ok": error = "response status error" if error: print_w("[fingerprint] Submission failed: " + error) # emit progress self.__idle(self.__progress_cb, float(self.__done) / len(self.__results))
def test_set_image(self): data = _get_jpeg() song = self.MutagenType(self.filename) song["coverart"] = base64.b64encode(data).decode("ascii") song["coverartmime"] = "image/jpeg" song.save() fileobj = cBytesIO(b"foo") image = EmbeddedImage(fileobj, "image/jpeg", 10, 10, 8) song = self.QLType(self.filename) self.assertTrue(song.has_images) self.assertTrue(song.get_primary_image()) self.assertTrue(song.has_images) song.set_image(image) self.assertTrue(song.has_images) self.assertEqual(song.get_primary_image().width, 10) song = self.MutagenType(self.filename) self.assertTrue("coverart" not in song) self.assertTrue("coverartmime" not in song)
def test_pickle_load(self): data = {b"foo": u"bar", u"quux": b"baz"} for protocol in [0, 1, 2]: assert pickle_loads(pickle_dumps(data)) == data assert pickle_load(cBytesIO(pickle_dumps(data))) == data
def __set_async(self, url): """Manages various things: Fast switching of covers (aborting old HTTP requests), The image cache, etc.""" self.current_job += 1 job = self.current_job self.stop_loading = True while self.loading: time.sleep(0.05) self.stop_loading = False if job != self.current_job: return self.loading = True GLib.idle_add(self.button.set_sensitive, False) self.current_pixbuf = None pbloader = GdkPixbuf.PixbufLoader() pbloader.connect('closed', self.__close) # Look for cached images raw_data = None for entry in self.data_cache: if entry[0] == url: raw_data = entry[1] break if not raw_data: pbloader.connect('area-updated', self.__update) data_store = cBytesIO() try: request = Request(url) request.add_header('User-Agent', USER_AGENT) url_sock = urlopen(request) except EnvironmentError: print_w(_("[albumart] HTTP Error: %s") % url) else: while not self.stop_loading: tmp = url_sock.read(1024 * 10) if not tmp: break pbloader.write(tmp) data_store.write(tmp) url_sock.close() if not self.stop_loading: raw_data = data_store.getvalue() self.data_cache.insert(0, (url, raw_data)) while 1: cache_sizes = [ len(data[1]) for data in self.data_cache ] if sum(cache_sizes) > self.max_cache_size: del self.data_cache[-1] else: break data_store.close() else: # Sleep for fast switching of cached images time.sleep(0.05) if not self.stop_loading: pbloader.write(raw_data) try: pbloader.close() except GLib.GError: pass self.current_data = raw_data if not self.stop_loading: GLib.idle_add(self.button.set_sensitive, True) self.loading = False
def test_pickle_dump(self): f = cBytesIO() pickle_dump(42, f) assert pickle_loads(f.getvalue()) == 42
def __set_async(self, url): """Manages various things: Fast switching of covers (aborting old HTTP requests), The image cache, etc.""" self.current_job += 1 job = self.current_job self.stop_loading = True while self.loading: time.sleep(0.05) self.stop_loading = False if job != self.current_job: return self.loading = True GLib.idle_add(self.button.set_sensitive, False) self.current_pixbuf = None pbloader = GdkPixbuf.PixbufLoader() pbloader.connect('closed', self.__close) # Look for cached images raw_data = None for entry in self.data_cache: if entry[0] == url: raw_data = entry[1] break if not raw_data: pbloader.connect('area-updated', self.__update) data_store = cBytesIO() try: request = Request(url) request.add_header('User-Agent', USER_AGENT) url_sock = urlopen(request) except EnvironmentError: print_w(_("[albumart] HTTP Error: %s") % url) else: while not self.stop_loading: tmp = url_sock.read(1024 * 10) if not tmp: break pbloader.write(tmp) data_store.write(tmp) url_sock.close() if not self.stop_loading: raw_data = data_store.getvalue() self.data_cache.insert(0, (url, raw_data)) while 1: cache_sizes = [len(data[1]) for data in self.data_cache] if sum(cache_sizes) > self.max_cache_size: del self.data_cache[-1] else: break data_store.close() else: # Sleep for fast switching of cached images time.sleep(0.05) if not self.stop_loading: pbloader.write(raw_data) try: pbloader.close() except GLib.GError: pass self.current_data = raw_data if not self.stop_loading: GLib.idle_add(self.button.set_sensitive, True) self.loading = False
def test_invalid(self): with self.assertRaises(UnpicklingError): pickle_loads(b"") with self.assertRaises(UnpicklingError): pickle_load(cBytesIO(b""))