def upnp_ImportResource(self, *args, **kwargs): SourceURI = kwargs['SourceURI'] DestinationURI = kwargs['DestinationURI'] if DestinationURI.endswith('?import'): id = DestinationURI.split('/')[-1] id = id[:-7] # remove the ?import else: return failure.Failure(errorCode(718)) item = self.get_by_id(id) if item is None: return failure.Failure(errorCode(718)) def gotPage(headers): # print('gotPage', headers) content_type = headers.get('content-type', []) if not isinstance(content_type, list): content_type = list(content_type) if len(content_type) > 0: extension = mimetypes.guess_extension(content_type[0], strict=False) item.set_path(None, extension) shutil.move(tmp_path, item.get_path()) item.rebuild(self.urlbase) if hasattr(self, 'update_id'): self.update_id += 1 if self.server: if hasattr(self.server, 'content_directory_server'): self.server.content_directory_server.set_variable( 0, 'SystemUpdateID', self.update_id) if item.parent is not None: value = (item.parent.get_id(), item.parent.get_update_id()) if self.server: if hasattr(self.server, 'content_directory_server'): self.server.content_directory_server.set_variable( 0, 'ContainerUpdateIDs', value) def gotError(error, url): self.warning(f'error requesting {url}') self.info(error) os.unlink(tmp_path) return failure.Failure(errorCode(718)) tmp_fp, tmp_path = tempfile.mkstemp() os.close(tmp_fp) utils.downloadPage(SourceURI, tmp_path).addCallbacks(gotPage, gotError, None, None, [SourceURI], None) transfer_id = 0 # FIXME return {'TransferID': transfer_id}
def upnp_ImportResource(self, *args, **kwargs): SourceURI = kwargs['SourceURI'] DestinationURI = kwargs['DestinationURI'] if DestinationURI.endswith('?import'): id = DestinationURI.split('/')[-1] id = id[:-7] # remove the ?import else: return failure.Failure(errorCode(718)) item = self.get_by_id(id) if item == None: return failure.Failure(errorCode(718)) def gotPage(headers): #print "gotPage", headers content_type = headers.get('content-type',[]) if not isinstance(content_type, list): content_type = list(content_type) if len(content_type) > 0: extension = mimetypes.guess_extension(content_type[0], strict=False) item.set_path(None,extension) shutil.move(tmp_path, item.get_path()) item.rebuild(self.urlbase) if hasattr(self, 'update_id'): self.update_id += 1 if self.server: if hasattr(self.server,'content_directory_server'): self.server.content_directory_server.set_variable(0, 'SystemUpdateID', self.update_id) if item.parent is not None: value = (item.parent.get_id(),item.parent.get_update_id()) if self.server: if hasattr(self.server,'content_directory_server'): self.server.content_directory_server.set_variable(0, 'ContainerUpdateIDs', value) def gotError(error, url): self.warning("error requesting", url) self.info(error) os.unlink(tmp_path) return failure.Failure(errorCode(718)) tmp_fp, tmp_path = tempfile.mkstemp() os.close(tmp_fp) utils.downloadPage(SourceURI, tmp_path).addCallbacks(gotPage, gotError, None, None, [SourceURI], None) transfer_id = 0 #FIXME return {'TransferID': transfer_id}
def getFile(self, request): if not self.id in cache: real_url = self.store.api.get_stream_url(self.id) (tmpfile, tmpfilename) = tempfile.mkstemp() res = utils.downloadPage(real_url, tmpfilename, supportPartial=1) res.addCallback(self.gotFile, (request, tmpfile, tmpfilename)) res.addErrback(self.gotDownloadError, request) self.info("Started download") return server.NOT_DONE_YET else: downloadedFile = utils.StaticFile(cache[self.id][1], self.parent.mimetype) # mark as recent recent_cache[self.id] = cache[self.id] downloadedFile.type = self.parent.mimetype self.filesize = downloadedFile.getFileSize() self.parent.item.size = self.filesize self.mimetype = self.parent.mimetype downloadedFile.encoding = None self.info("File downloaded") file = downloadedFile.render(request) self.info("File rendered") if isinstance(file, int): return file request.write(file) request.finish()
def device_extract(workdevice, workpath): tmp_dir = workpath.child(workdevice.get_uuid()) if tmp_dir.exists(): tmp_dir.remove() tmp_dir.createDirectory() target = tmp_dir.child('device-description.xml') print "d",target,target.path d = downloadPage(workdevice.get_location(),target.path) l.append(d) for service in workdevice.services: target = tmp_dir.child('%s-description.xml'%service.service_type.split(':',3)[3]) print "s",target,target.path d = downloadPage(service.get_scpd_url(),target.path) l.append(d) for ed in workdevice.devices: device_extract(ed, tmp_dir)
def downloadFile(self, request, filepath, callback, *args): if (self.downloader is None): self.info("Proxy: download data to cache file %s" % filepath) self.checkCacheSize() self.downloader = utils.downloadPage(self.stream_url, filepath, supportPartial=1) self.downloader.addCallback(self.downloadFinished) self.downloader.addErrback(self.gotDownloadError, request) if(callback is not None): self.downloader.addCallback(callback, request, filepath, *args) return self.downloader
def device_extract(workdevice, workpath): tmp_dir = workpath.child(workdevice.get_uuid()) if tmp_dir.exists(): tmp_dir.remove() tmp_dir.createDirectory() target = tmp_dir.child('device-description.xml') print "device", target.path d = downloadPage(workdevice.get_location(), target.path) l.append(d) for service in workdevice.services: target = tmp_dir.child('%s-description.xml' % service.service_type.split(':', 3)[3]) print "service", target.path d = downloadPage(service.get_scpd_url(), target.path) l.append(d) for ed in workdevice.devices: device_extract(ed, tmp_dir) return tmp_dir
def downloadFile(self, request, filepath, callback, *args): if self.downloader is None: self.info(f'Proxy: download data to cache file {filepath}') self.checkCacheSize() self.downloader = utils.downloadPage(self.stream_url, filepath, supportPartial=1) self.downloader.addCallback(self.downloadFinished) self.downloader.addErrback(self.gotDownloadError, request) if callback is not None: self.downloader.addCallback(callback, request, filepath, *args) return self.downloader