def get_item(self, path): item = Item(id=path.strip().split("/")[-1], router=router) item.expandable = True item.streamable = True self.add_routes(item, path, skip=True) # item.add_route(self.route_input_rfs_list, False, True, False, kwargs={'path': path}) # item.streamable = True # item.add_route(self.route_input_rfs_stream, False, False, True, kwargs={'path': path}) return item
def thomas_list(self, item, path, depth=0, modified_since=None): logger.info(f"Listing path {path!r} with depth {depth}") item_id = item.id headers = self.get_headers() if modified_since: headers["If-Modified-Since"] = modified_since.strftime( "%a, %d %b %Y %H:%M:%S GMT") r = requests.get( urljoin(self.config["url"].strip("/") + "/", path), params={"depth": depth}, headers=headers, ) if r.status_code == 200: item = Item.unserialize(r.json(), router=router) item.id = item_id self.add_routes(item, path, skip=True) return item elif r.status_code == 304: raise NotModifiedException() elif r.status_code == 404 or r.status_code == 403: raise PathNotFoundException() else: logger.warning( f"Unknown status code {r.status_code} while listing {self.name}/{path}" )
def list_file(self, path): path = cleanup_path(path) key = keyify(DatabaseType.FILE, path) if key not in self.db: raise PathNotFoundException() metadata = self.db[key].copy() item_path = metadata["path"] item = Item(id=metadata["path"].split("/")[-1], attributes=metadata) item.readable = True self._trigger_event("file_route_needed", item=item, path=item_path) self._trigger_event("on_item", item=item, path=item_path) return item
def list_torrent(self, item, torrent_file): torrent = torrent_file.get_torrent_data() availability_metadata = item.get("metadata:availability") def get_folder(folders, item, path): if not path: return item if tuple(path) in folders: return folders[tuple(path)] parent_item = get_folder(folders, item, path[:-1]) folders[tuple(path)] = folder_item = Item(id=path[-1]) parent_item.add_item(folder_item) return folder_item if b"files" in torrent[b"info"]: # multifile torrent folders = {} for f in torrent[b"info"][b"files"]: logger.trace("Handling torrent file %r" % (f, )) path = [self.try_decode(x) for x in f[b"path"] if x] # remove empty fragments if not self.is_legal_path(path): logger.warning(f"Dangerous path {path!r} found, skipping") continue name = path.pop() torrent_item = Item(id=name, attributes={"size": f[b"length"]}) if availability_metadata: torrent_item[ "metadata:availability"] = availability_metadata folder_item = get_folder(folders, item, path) folder_item.add_item(torrent_item) else: name = self.try_decode(torrent[b"info"][b"name"]) if self.is_legal_path(name): torrent_item = Item( id=name, attributes={"size": torrent[b"info"][b"length"]}) if availability_metadata: torrent_item[ "metadata:availability"] = availability_metadata item.add_item(torrent_item) return item
def get(self, request, path): path = path.strip("/") if path == "": listing = Item(id="") for input_config in self.service.config.get("inputs", []): if not input_config or not input_config["input"]: continue plugin = input_config["input"] item = plugin.get_item("") item.id = plugin.name listing.add_item(item) return Response(listing.serialize()) else: if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE") if if_modified_since: if_modified_since = parse_http_date_safe(if_modified_since) if if_modified_since: if_modified_since = datetime.fromtimestamp( if_modified_since, pytz.UTC) depth = int(request.GET.get("depth", 0)) plugin, plugin_path = self.get_plugin_path(path) if not plugin: raise Http404 item = InputPluginManager.get_item(plugin, plugin_path) logger.info( f"Trying to create listing for {plugin!r} - path:{plugin_path} - last_modified:{if_modified_since}" ) item.list(depth=depth) if not item.is_readable and not item.is_listable: raise Http404 if if_modified_since and item.modified <= if_modified_since: return HttpResponseNotModified() return Response(item.serialize())
def get_cache_results(self, search_token, path): try: listing_cache = ListingCache.objects.get(app=self.name, search_token=search_token, path=path) except ListingCache.DoesNotExist: logger.info( f"Unable to find listing for search_token:{search_token}") return return Item.unserialize(listing_cache.listing)
def get_folder(folders, item, path): if not path: return item if tuple(path) in folders: return folders[tuple(path)] parent_item = get_folder(folders, item, path[:-1]) folders[tuple(path)] = folder_item = Item(id=path[-1]) parent_item.add_item(folder_item) return folder_item
def list_dir(self, path, depth=0, show_deleted=False): path = cleanup_path(path) key = keyify(DatabaseType.DIRECTORY, path) if key not in self.db: raise PathNotFoundException() metadata = self.db[key].copy() item_path = metadata["path"] item = Item(id=metadata["path"].split("/")[-1], attributes=metadata) if depth >= 0: item.initiate_nested_items() self._list_dir(item, path, depth, show_deleted) else: item.expandable = True self._trigger_event("list_route_needed", item=item, path=item_path) self._trigger_event("on_item", item=item, path=item_path) return item
def _list_dir(self, parent_folder, path, depth, show_deleted): filelist_key = keyify(DatabaseType.FILELIST, path) for key in self.db[filelist_key]: item_type = key[0] metadata = self.db[key].copy() if not show_deleted and metadata.get("deleted", False): continue item_path = metadata["path"] item = Item(id=item_path.split("/")[-1], attributes=metadata, router=router) parent_folder.add_item(item) if item_type == DatabaseType.DIRECTORY: if depth: item.initiate_nested_items() self._list_dir(item, item_path, depth - 1, show_deleted) else: item.expandable = True self._trigger_event("list_route_needed", item=item, path=item_path) elif item_type == DatabaseType.FILE: item.readable = True self._trigger_event("file_route_needed", item=item, path=item_path) self._trigger_event("on_item", item=item, path=item_path) parent_folder.nested_items.sort(key=lambda x: x.id)
def parse_result(self, search_token, session, response, response_type): item = Item.unserialize(response["item"]) self.add_stream_routes( item, { "search_token": response["search_token"], "path": response["path"] }, ensure_already_streamable=True, ) url = urljoin(self.base_url, response["search_token"], response["path"]) self.add_expand_routes(item, search_token, url) return item
def get_item(self, searcher_name, search_token, path, sub_path): try: listing_cache = ListingCache.objects.get( app=self.service.name, searcher_name=searcher_name, search_token=search_token, path=path, ) except ListingCache.DoesNotExist: logger.warning( f"Unable to find cache for search_token:{search_token} path:{path}" ) raise Http404 return Item.unserialize( listing_cache.listing).get_item_from_path(sub_path)
def get_filesystem(self, infohash): torrent = get_torrent(infohash) status = torrent.get_status(['files', 'file_progress', 'save_path']) save_path = status['save_path'] found_rar = False path_item_mapping = {} for f, progress in zip(status['files'], status['file_progress']): full_path = os.path.join(save_path, f['path']) if '/' in f['path']: path, fn = f['path'].rsplit('/', 1) else: fn = f['path'] path = '' item = Item(fn, attributes={'size': f['size']}) item.readable = True item.streamable = True path_item_mapping.setdefault(path, []).append(item) if progress == 1.0: item.add_route('file', True, False, False, kwargs={'path': full_path}) else: item.add_route('torrent_file', True, False, False, kwargs={ 'torrent_handler': self, 'infohash': infohash, 'offset': f['offset'], 'path': full_path, }) item.add_route('direct', False, False, True) if not found_rar and fn.split('.')[-1].lower() == 'rar': found_rar = True path_mapping = {} for path, items in path_item_mapping.items(): combined_path = [] for path_part in (path + '/').split('/'): partial_path = '/'.join(combined_path) if partial_path not in path_mapping: item = path_mapping[partial_path] = Item( partial_path.split('/')[-1]) item.streamable = True item.add_route( 'direct', False, False, True, kwargs={'allowed_extensions': STREAMABLE_EXTENSIONS}) if found_rar: item.add_route('rar', False, False, True, kwargs={'lazy': True}) if combined_path: parent_path = '/'.join(combined_path[:-1]) path_mapping[parent_path].add_item(item) combined_path.append(path_part) for item in items: path_mapping[path].add_item(item) item = path_mapping[''].list()[0] # TODO: make not use an empty item item.parent_item = None return item
def get_original_item(self): return Item.unserialize(self.config["original_item"], router=router)
def get_listing(self, config, path, use_background_recheck=True, do_not_rebuild=False): """ Returns listing if it is good and available """ path = path.strip("/") last_modified = None try: listing_item_root = ListingItem.objects.get(app=self.service.name, path=path) if listing_item_root.is_root: last_modified = listing_item_root.last_updated if settings.DEBUG_CLEAR_CACHE: logger.debug("Clear cache enabled, removing old listing") last_modified = None ListingItem.objects.filter( parent=listing_item_root).delete() listing_item_root.delete() listing_item_root = None except ListingItem.DoesNotExist: listing_item_root = None if (config["level"].get("background_recheck") and use_background_recheck and listing_item_root and listing_item_root.last_checked and listing_item_root.last_checked > now() - timedelta(minutes=self.service.automatic_rebuild_timer)): logger.info( "We run updates to this listing in the background and it is sufficiently recent to be used" ) if not do_not_rebuild: self.service.rebuild_listing(config, path, delay=True) return listing_item_root if listing_item_root: item = Item.unserialize(listing_item_root.config["original_item"]) else: item = self.service.get_item(config, path) try: if not item: raise PathNotFoundException() item.list(depth=config["level"]["listing_depth"]) if last_modified and item.modified and item.modified <= last_modified: raise NotModifiedException() except NotModifiedException: logger.info( "Our current listing is up-to-date, no need to do anything") listing_item_root.last_checked = now() listing_item_root.save() return listing_item_root except PathNotFoundException: logger.info( "We got an all paths not found, just returning empty for now and hope it will be better next time" ) return None if not item.is_listable: if last_modified is None: logger.info( "Empty listing and no last_modified, this is an unknown path" ) return None else: logger.info( "Empty listing and last_modified, returning already built root" ) return listing_item_root lock_path = "%s/%s" % (self.service.name, path) with self.locktracker.get_path(lock_path) as lt: if lt.waited: logger.info( "Seems like someome built this view just before us, no need to build" ) try: listing_item_root = ListingItem.objects.get( app=self.service.name, path=path) except ListingItem.DoesNotExist: listing_item_root = None else: logger.info("Got lock, building listview") listing_item_root = self.build_listing(item, config, path) return listing_item_root