def parse_what(self, what): """Given an imdb id or movie title, looks up from imdb and returns a dict with imdb_id and title keys""" imdb_id = extract_id(what) title = what if imdb_id: # Given an imdb id, find title parser = ImdbParser() try: parser.parse('http://www.imdb.com/title/%s' % imdb_id) except Exception: raise QueueError('Error parsing info from imdb for %s' % imdb_id) if parser.name: title = parser.name else: # Given a title, try to do imdb search for id console('Searching imdb for %s' % what) search = ImdbSearch() result = search.smart_match(what) if not result: raise QueueError( 'ERROR: Unable to find any such movie from imdb, use imdb url instead.' ) imdb_id = extract_id(result['url']) title = result['name'] self.options['imdb_id'] = imdb_id self.options['title'] = title return {'title': title, 'imdb_id': imdb_id}
def search_imdb(request): if request.is_ajax(): q = request.GET.get('term', '') imdbS = ImdbSearch() search = imdbS.search(q) results = [] if len(search) == 0: movie_json = {} movie_json['id'] = "NO MOVIES FOUND" movie_json['label'] = "NO MOVIES FOUND" movie_json['value'] = "NO MOVIES FOUND" results.append(movie_json) else: for movie in search: movie_json = {} movie_json['id'] = int(movie['imdb_id'].lstrip('tt')) title = movie['name'] if 'year' in movie: title += " (%s)" % movie['year'] movie_json['label'] = title movie_json['value'] = title results.append(movie_json) data = json.dumps(results) else: data = 'fail' mimetype = 'application/json' return HttpResponse(data, mimetype)
def get(self, title, session=None): """ Get a list of IMDB search result by name or ID""" raw_movies = ImdbSearch().smart_match(title, single_match=False) if not raw_movies: return [] # Convert single movie to list to preserve consistent reply if not isinstance(raw_movies, list): raw_movies = [raw_movies] return jsonify(raw_movies)
def lookup(self, entry, search_allowed=True, session=None): """ Perform imdb lookup for entry. :param entry: Entry instance :param search_allowed: Allow fallback to search :raises PluginError: Failure reason """ from flexget.manager import manager if entry.get('imdb_id', eval_lazy=False): log.debug('No title passed. Lookup for %s' % entry['imdb_id']) elif entry.get('imdb_url', eval_lazy=False): log.debug('No title passed. Lookup for %s' % entry['imdb_url']) elif entry.get('title', eval_lazy=False): log.debug('lookup for %s' % entry['title']) else: raise plugin.PluginError( 'looking up IMDB for entry failed, no title, imdb_url or imdb_id passed.' ) # if imdb_id is included, build the url. if entry.get('imdb_id', eval_lazy=False) and not entry.get( 'imdb_url', eval_lazy=False): entry['imdb_url'] = make_url(entry['imdb_id']) # make sure imdb url is valid if entry.get('imdb_url', eval_lazy=False): imdb_id = extract_id(entry['imdb_url']) if imdb_id: entry['imdb_url'] = make_url(imdb_id) else: log.debug('imdb url %s is invalid, removing it' % entry['imdb_url']) del (entry['imdb_url']) # no imdb_url, check if there is cached result for it or if the # search is known to fail if not entry.get('imdb_url', eval_lazy=False): result = session.query(SearchResult).filter( SearchResult.title == entry['title']).first() if result: # TODO: 1.2 this should really be checking task.options.retry if result.fails and not manager.options.execute.retry: # this movie cannot be found, not worth trying again ... log.debug('%s will fail lookup' % entry['title']) raise plugin.PluginError('IMDB lookup failed for %s' % entry['title']) else: if result.url: log.trace('Setting imdb url for %s from db' % entry['title']) entry['imdb_id'] = result.imdb_id entry['imdb_url'] = result.url movie = None # no imdb url, but information required, try searching if not entry.get('imdb_url', eval_lazy=False) and search_allowed: log.verbose('Searching from imdb `%s`' % entry['title']) search = ImdbSearch() search_name = entry.get('movie_name', entry['title'], eval_lazy=False) search_result = search.smart_match(search_name) if search_result: entry['imdb_url'] = search_result['url'] # store url for this movie, so we don't have to search on every run result = SearchResult(entry['title'], entry['imdb_url']) session.add(result) session.commit() log.verbose('Found %s' % (entry['imdb_url'])) else: log_once('IMDB lookup failed for %s' % entry['title'], log, logging.WARN, session=session) # store FAIL for this title result = SearchResult(entry['title']) result.fails = True session.add(result) session.commit() raise plugin.PluginError('Title `%s` lookup failed' % entry['title']) # check if this imdb page has been parsed & cached movie = session.query(Movie).filter( Movie.url == entry['imdb_url']).first() # If we have a movie from cache, we are done if movie and not movie.expired: entry.update_using_map(self.field_map, movie) return # Movie was not found in cache, or was expired if movie is not None: if movie.expired: log.verbose('Movie `%s` details expired, refreshing ...' % movie.title) # Remove the old movie, we'll store another one later. session.query(MovieLanguage).filter( MovieLanguage.movie_id == movie.id).delete() session.query(Movie).filter( Movie.url == entry['imdb_url']).delete() session.commit() # search and store to cache if 'title' in entry: log.verbose('Parsing imdb for `%s`' % entry['title']) else: log.verbose('Parsing imdb for `%s`' % entry['imdb_id']) try: movie = self._parse_new_movie(entry['imdb_url'], session) except UnicodeDecodeError: log.error( 'Unable to determine encoding for %s. Installing chardet library may help.' % entry['imdb_url']) # store cache so this will not be tried again movie = Movie() movie.url = entry['imdb_url'] session.add(movie) session.commit() raise plugin.PluginError('UnicodeDecodeError') except ValueError as e: # TODO: might be a little too broad catch, what was this for anyway? ;P if manager.options.debug: log.exception(e) raise plugin.PluginError( 'Invalid parameter: %s' % entry['imdb_url'], log) for att in [ 'title', 'score', 'votes', 'year', 'genres', 'languages', 'actors', 'directors', 'mpaa_rating' ]: log.trace('movie.%s: %s' % (att, getattr(movie, att))) # Update the entry fields entry.update_using_map(self.field_map, movie)
def lookup(self, entry, search_allowed=True): """ Perform imdb lookup for entry. :param entry: Entry instance :param search_allowed: Allow fallback to search :raises PluginError: Failure reason """ from flexget.manager import manager if entry.get('imdb_url', eval_lazy=False): log.debug('No title passed. Lookup for %s' % entry['imdb_url']) elif entry.get('imdb_id', eval_lazy=False): log.debug('No title passed. Lookup for %s' % entry['imdb_id']) elif entry.get('title', eval_lazy=False): log.debug('lookup for %s' % entry['title']) else: raise PluginError('looking up IMDB for entry failed, no title, imdb_url or imdb_id passed.') session = Session() try: # entry sanity checks for field in ['imdb_votes', 'imdb_score']: if entry.get(field, eval_lazy=False): value = entry[field] if not isinstance(value, (int, float)): raise PluginError('Entry field %s should be a number!' % field) # if imdb_id is included, build the url. if entry.get('imdb_id', eval_lazy=False) and not entry.get('imdb_url', eval_lazy=False): entry['imdb_url'] = make_url(entry['imdb_id']) # make sure imdb url is valid if entry.get('imdb_url', eval_lazy=False): imdb_id = extract_id(entry['imdb_url']) if imdb_id: entry['imdb_url'] = make_url(imdb_id) else: log.debug('imdb url %s is invalid, removing it' % entry['imdb_url']) del(entry['imdb_url']) # no imdb_url, check if there is cached result for it or if the # search is known to fail if not entry.get('imdb_url', eval_lazy=False): result = session.query(SearchResult).\ filter(SearchResult.title == entry['title']).first() if result: if result.fails and not manager.options.retry: # this movie cannot be found, not worth trying again ... log.debug('%s will fail lookup' % entry['title']) raise PluginError('Title `%s` lookup fails' % entry['title']) else: if result.url: log.trace('Setting imdb url for %s from db' % entry['title']) entry['imdb_url'] = result.url # no imdb url, but information required, try searching if not entry.get('imdb_url', eval_lazy=False) and search_allowed: log.verbose('Searching from imdb `%s`' % entry['title']) search = ImdbSearch() search_name = entry.get('movie_name', entry['title'], eval_lazy=False) search_result = search.smart_match(search_name) if search_result: entry['imdb_url'] = search_result['url'] # store url for this movie, so we don't have to search on # every run result = SearchResult(entry['title'], entry['imdb_url']) session.add(result) log.verbose('Found %s' % (entry['imdb_url'])) else: log_once('Imdb lookup failed for %s' % entry['title'], log) # store FAIL for this title result = SearchResult(entry['title']) result.fails = True session.add(result) raise PluginError('Title `%s` lookup failed' % entry['title']) # check if this imdb page has been parsed & cached movie = session.query(Movie).\ options(joinedload_all(Movie.genres), joinedload_all(Movie.languages), joinedload_all(Movie.actors), joinedload_all(Movie.directors)).\ filter(Movie.url == entry['imdb_url']).first() # determine whether or not movie details needs to be parsed req_parse = False if not movie: req_parse = True elif movie.expired: req_parse = True if req_parse: if movie is not None: if movie.expired: log.verbose('Movie `%s` details expired, refreshing ...' % movie.title) # Remove the old movie, we'll store another one later. session.query(MovieLanguage).filter(MovieLanguage.movie_id == movie.id).delete() session.query(Movie).filter(Movie.url == entry['imdb_url']).delete() # search and store to cache if 'title' in entry: log.verbose('Parsing imdb for `%s`' % entry['title']) else: log.verbose('Parsing imdb for `%s`' % entry['imdb_id']) try: movie = self._parse_new_movie(entry['imdb_url'], session) except UnicodeDecodeError: log.error('Unable to determine encoding for %s. Installing chardet library may help.' % entry['imdb_url']) # store cache so this will not be tried again movie = Movie() movie.url = entry['imdb_url'] session.add(movie) raise PluginError('UnicodeDecodeError') except ValueError as e: # TODO: might be a little too broad catch, what was this for anyway? ;P if manager.options.debug: log.exception(e) raise PluginError('Invalid parameter: %s' % entry['imdb_url'], log) for att in ['title', 'score', 'votes', 'year', 'genres', 'languages', 'actors', 'directors', 'mpaa_rating']: log.trace('movie.%s: %s' % (att, getattr(movie, att))) # store to entry entry.update_using_map(self.field_map, movie) finally: log.trace('committing session') session.commit()
def lookup(self, entry, search_allowed=True): """ Perform imdb lookup for entry. :param entry: Entry instance :param search_allowed: Allow fallback to search :raises PluginError: Failure reason """ from flexget.manager import manager if entry.get("imdb_url", eval_lazy=False): log.debug("No title passed. Lookup for %s" % entry["imdb_url"]) elif entry.get("imdb_id", eval_lazy=False): log.debug("No title passed. Lookup for %s" % entry["imdb_id"]) elif entry.get("title", eval_lazy=False): log.debug("lookup for %s" % entry["title"]) else: raise PluginError("looking up IMDB for entry failed, no title, imdb_url or imdb_id passed.") take_a_break = False session = Session() try: # entry sanity checks for field in ["imdb_votes", "imdb_score"]: if entry.get(field, eval_lazy=False): value = entry[field] if not isinstance(value, (int, float)): raise PluginError("Entry field %s should be a number!" % field) # if imdb_id is included, build the url. if entry.get("imdb_id", eval_lazy=False) and not entry.get("imdb_url", eval_lazy=False): entry["imdb_url"] = make_url(entry["imdb_id"]) # make sure imdb url is valid if entry.get("imdb_url", eval_lazy=False): imdb_id = extract_id(entry["imdb_url"]) if imdb_id: entry["imdb_url"] = make_url(imdb_id) else: log.debug("imdb url %s is invalid, removing it" % entry["imdb_url"]) del (entry["imdb_url"]) # no imdb_url, check if there is cached result for it or if the # search is known to fail if not entry.get("imdb_url", eval_lazy=False): result = session.query(SearchResult).filter(SearchResult.title == entry["title"]).first() if result: if result.fails and not manager.options.retry: # this movie cannot be found, not worth trying again ... log.debug("%s will fail lookup" % entry["title"]) raise PluginError("Title `%s` lookup fails" % entry["title"]) else: if result.url: log.trace("Setting imdb url for %s from db" % entry["title"]) entry["imdb_url"] = result.url # no imdb url, but information required, try searching if not entry.get("imdb_url", eval_lazy=False) and search_allowed: log.verbose("Searching from imdb `%s`" % entry["title"]) take_a_break = True search = ImdbSearch() search_result = search.smart_match(entry["title"]) if search_result: entry["imdb_url"] = search_result["url"] # store url for this movie, so we don't have to search on # every run result = SearchResult(entry["title"], entry["imdb_url"]) session.add(result) log.verbose("Found %s" % (entry["imdb_url"])) else: log_once("Imdb lookup failed for %s" % entry["title"], log) # store FAIL for this title result = SearchResult(entry["title"]) result.fails = True session.add(result) raise PluginError("Title `%s` lookup failed" % entry["title"]) # check if this imdb page has been parsed & cached movie = ( session.query(Movie) .options( joinedload_all(Movie.genres), joinedload_all(Movie.languages), joinedload_all(Movie.actors), joinedload_all(Movie.directors), ) .filter(Movie.url == entry["imdb_url"]) .first() ) # determine whether or not movie details needs to be parsed req_parse = False if not movie: req_parse = True elif movie.expired: req_parse = True if req_parse: if movie is not None: if movie.expired: log.verbose("Movie `%s` details expired, refreshing ..." % movie.title) # Remove the old movie, we'll store another one later. session.query(Movie).filter(Movie.url == entry["imdb_url"]).delete() # search and store to cache if "title" in entry: log.verbose("Parsing imdb for `%s`" % entry["title"]) else: log.verbose("Parsing imdb for `%s`" % entry["imdb_id"]) try: take_a_break = True movie = self._parse_new_movie(entry["imdb_url"], session) except UnicodeDecodeError: log.error( "Unable to determine encoding for %s. Installing chardet library may help." % entry["imdb_url"] ) # store cache so this will not be tried again movie = Movie() movie.url = entry["imdb_url"] session.add(movie) raise PluginError("UnicodeDecodeError") except ValueError, e: # TODO: might be a little too broad catch, what was this for anyway? ;P if manager.options.debug: log.exception(e) raise PluginError("Invalid parameter: %s" % entry["imdb_url"], log) for att in ["title", "score", "votes", "year", "genres", "languages", "actors", "directors", "mpaa_rating"]: log.trace("movie.%s: %s" % (att, getattr(movie, att))) # store to entry entry.update_using_map(self.field_map, movie) # give imdb a little break between requests (see: http://flexget.com/ticket/129#comment:1) if take_a_break and not manager.options.debug and not manager.unit_test: import time time.sleep(3)
def lookup(self, entry, search_allowed=True): """Perform imdb lookup for entry. Raises PluginError with failure reason.""" from flexget.manager import manager if entry.get('imdb_url', lazy=False): log.debug('No title passed. Lookup for %s' % entry['imdb_url']) elif entry.get('imdb_id', lazy=False): log.debug('No title passed. Lookup for %s' % entry['imdb_id']) elif entry.get('title', lazy=False): log.debug('lookup for %s' % entry['title']) else: raise PluginError('looking up IMDB for entry failed, no title, imdb_url or imdb_id passed.') take_a_break = False session = Session() try: # entry sanity checks for field in ['imdb_votes', 'imdb_score']: if entry.get(field, lazy=False): value = entry[field] if not isinstance(value, (int, float)): raise PluginError('Entry field %s should be a number!' % field) # if imdb_id is included, build the url. if entry.get('imdb_id', lazy=False) and not entry.get('imdb_url', lazy=False): entry['imdb_url'] = make_url(entry['imdb_id']) # make sure imdb url is valid if entry.get('imdb_url', lazy=False): imdb_id = extract_id(entry['imdb_url']) if imdb_id: entry['imdb_url'] = make_url(imdb_id) else: log.debug('imdb url %s is invalid, removing it' % entry['imdb_url']) del(entry['imdb_url']) # no imdb_url, check if there is cached result for it or if the # search is known to fail if not entry.get('imdb_url', lazy=False): result = session.query(SearchResult).\ filter(SearchResult.title == entry['title']).first() if result: if result.fails and not manager.options.retry: # this movie cannot be found, not worth trying again ... log.debug('%s will fail lookup' % entry['title']) raise PluginError('Title `%s` lookup fails' % entry['title']) else: if result.url: log.trace('Setting imdb url for %s from db' % entry['title']) entry['imdb_url'] = result.url # no imdb url, but information required, try searching if not entry.get('imdb_url', lazy=False) and search_allowed: log.verbose('Searching from imdb `%s`' % entry['title']) take_a_break = True search = ImdbSearch() search_result = search.smart_match(entry['title']) if search_result: entry['imdb_url'] = search_result['url'] # store url for this movie, so we don't have to search on # every run result = SearchResult(entry['title'], entry['imdb_url']) session.add(result) log.verbose('Found %s' % (entry['imdb_url'])) else: log_once('Imdb lookup failed for %s' % entry['title'], log) # store FAIL for this title result = SearchResult(entry['title']) result.fails = True session.add(result) raise PluginError('Title `%s` lookup failed' % entry['title']) # check if this imdb page has been parsed & cached movie = session.query(Movie).\ options(joinedload_all(Movie.genres, Movie.languages, Movie.actors, Movie.directors)).\ filter(Movie.url == entry['imdb_url']).first() refresh_interval = 2 if movie: if movie.year: age = (datetime.now().year - movie.year) refresh_interval += age * 5 log.debug('cached movie `%s` age %i refresh interval %i days' % (movie.title, age, refresh_interval)) if not movie or movie.updated is None or \ movie.updated < datetime.now() - timedelta(days=refresh_interval): # Remove the old movie, we'll store another one later. session.query(Movie).filter(Movie.url == entry['imdb_url']).delete() # search and store to cache if 'title' in entry: log.verbose('Parsing imdb for `%s`' % entry['title']) else: log.verbose('Parsing imdb for `%s`' % entry['imdb_id']) try: take_a_break = True imdb = ImdbParser() imdb.parse(entry['imdb_url']) # store to database movie = Movie() movie.photo = imdb.photo movie.title = imdb.name movie.score = imdb.score movie.votes = imdb.votes movie.year = imdb.year movie.mpaa_rating = imdb.mpaa_rating movie.plot_outline = imdb.plot_outline movie.url = entry['imdb_url'] for name in imdb.genres: genre = session.query(Genre).\ filter(Genre.name == name).first() if not genre: genre = Genre(name) movie.genres.append(genre) # pylint:disable=E1101 for name in imdb.languages: language = session.query(Language).\ filter(Language.name == name).first() if not language: language = Language(name) movie.languages.append(language) # pylint:disable=E1101 for imdb_id, name in imdb.actors.iteritems(): actor = session.query(Actor).\ filter(Actor.imdb_id == imdb_id).first() if not actor: actor = Actor(imdb_id, name) movie.actors.append(actor) # pylint:disable=E1101 for imdb_id, name in imdb.directors.iteritems(): director = session.query(Director).\ filter(Director.imdb_id == imdb_id).first() if not director: director = Director(imdb_id, name) movie.directors.append(director) # pylint:disable=E1101 # so that we can track how long since we've updated the info later movie.updated = datetime.now() session.add(movie) except UnicodeDecodeError: log.error('Unable to determine encoding for %s. Installing chardet library may help.' % entry['imdb_url']) # store cache so this will not be tried again movie = Movie() movie.url = entry['imdb_url'] session.add(movie) raise PluginError('UnicodeDecodeError') except ValueError, e: # TODO: might be a little too broad catch, what was this for anyway? ;P if manager.options.debug: log.exception(e) raise PluginError('Invalid parameter: %s' % entry['imdb_url'], log) for att in ['title', 'score', 'votes', 'year', 'genres', 'languages', 'actors', 'directors', 'mpaa_rating']: log.trace('movie.%s: %s' % (att, getattr(movie, att))) # store to entry entry.update_using_map(self.field_map, movie) # give imdb a little break between requests (see: http://flexget.com/ticket/129#comment:1) if (take_a_break and not manager.options.debug and not manager.unit_test): import time time.sleep(3)
def lookup(self, entry, search_allowed=True, session=None): """ Perform imdb lookup for entry. :param entry: Entry instance :param search_allowed: Allow fallback to search :raises PluginError: Failure reason """ from flexget.manager import manager if entry.get("imdb_id", eval_lazy=False): log.debug("No title passed. Lookup for %s" % entry["imdb_id"]) elif entry.get("imdb_url", eval_lazy=False): log.debug("No title passed. Lookup for %s" % entry["imdb_url"]) elif entry.get("title", eval_lazy=False): log.debug("lookup for %s" % entry["title"]) else: raise plugin.PluginError("looking up IMDB for entry failed, no title, imdb_url or imdb_id passed.") # if imdb_id is included, build the url. if entry.get("imdb_id", eval_lazy=False) and not entry.get("imdb_url", eval_lazy=False): entry["imdb_url"] = make_url(entry["imdb_id"]) # make sure imdb url is valid if entry.get("imdb_url", eval_lazy=False): imdb_id = extract_id(entry["imdb_url"]) if imdb_id: entry["imdb_url"] = make_url(imdb_id) else: log.debug("imdb url %s is invalid, removing it" % entry["imdb_url"]) del (entry["imdb_url"]) # no imdb_url, check if there is cached result for it or if the # search is known to fail if not entry.get("imdb_url", eval_lazy=False): result = session.query(SearchResult).filter(SearchResult.title == entry["title"]).first() if result: # TODO: 1.2 this should really be checking task.options.retry if result.fails and not manager.options.execute.retry: # this movie cannot be found, not worth trying again ... log.debug("%s will fail lookup" % entry["title"]) raise plugin.PluginError("IMDB lookup failed for %s" % entry["title"]) else: if result.url: log.trace("Setting imdb url for %s from db" % entry["title"]) entry["imdb_id"] = result.imdb_id entry["imdb_url"] = result.url # no imdb url, but information required, try searching if not entry.get("imdb_url", eval_lazy=False) and search_allowed: log.verbose("Searching from imdb `%s`" % entry["title"]) search = ImdbSearch() search_name = entry.get("movie_name", entry["title"], eval_lazy=False) search_result = search.smart_match(search_name) if search_result: entry["imdb_url"] = search_result["url"] # store url for this movie, so we don't have to search on every run result = SearchResult(entry["title"], entry["imdb_url"]) session.add(result) session.commit() log.verbose("Found %s" % (entry["imdb_url"])) else: log_once("IMDB lookup failed for %s" % entry["title"], log, logging.WARN, session=session) # store FAIL for this title result = SearchResult(entry["title"]) result.fails = True session.add(result) session.commit() raise plugin.PluginError("Title `%s` lookup failed" % entry["title"]) # check if this imdb page has been parsed & cached movie = session.query(Movie).filter(Movie.url == entry["imdb_url"]).first() # If we have a movie from cache, we are done if movie and not movie.expired: entry.update_using_map(self.field_map, movie) return # Movie was not found in cache, or was expired if movie is not None: if movie.expired: log.verbose("Movie `%s` details expired, refreshing ..." % movie.title) # Remove the old movie, we'll store another one later. session.query(MovieLanguage).filter(MovieLanguage.movie_id == movie.id).delete() session.query(Movie).filter(Movie.url == entry["imdb_url"]).delete() session.commit() # search and store to cache if "title" in entry: log.verbose("Parsing imdb for `%s`" % entry["title"]) else: log.verbose("Parsing imdb for `%s`" % entry["imdb_id"]) try: movie = self._parse_new_movie(entry["imdb_url"], session) except UnicodeDecodeError: log.error("Unable to determine encoding for %s. Installing chardet library may help." % entry["imdb_url"]) # store cache so this will not be tried again movie = Movie() movie.url = entry["imdb_url"] session.add(movie) session.commit() raise plugin.PluginError("UnicodeDecodeError") except ValueError as e: # TODO: might be a little too broad catch, what was this for anyway? ;P if manager.options.debug: log.exception(e) raise plugin.PluginError("Invalid parameter: %s" % entry["imdb_url"], log) for att in [ "title", "score", "votes", "year", "genres", "languages", "actors", "directors", "writers", "mpaa_rating", ]: log.trace("movie.%s: %s" % (att, getattr(movie, att))) # Update the entry fields entry.update_using_map(self.field_map, movie)
def add_new_downloaditem_pre(sender, instance, **kwargs): if instance.id is None: from lazy_common import metaparser logger.debug("Adding a new download %s" % instance.ftppath) instance.ftppath = instance.ftppath.strip() #Check if it exists already.. try: existing_obj = DownloadItem.objects.get(ftppath=instance.ftppath) if existing_obj: logger.info("Found existing record %s" % instance.ftppath) if existing_obj.status == DownloadItem.COMPLETE: #its complete... maybe delete it so we can re-add if its older then 2 weeks? curTime = timezone.now() hours = 0 if existing_obj.dateadded is None: hours = 300 else: diff = curTime - existing_obj.dateadded hours = diff.total_seconds() / 60 / 60 if hours > 288: existing_obj.delete() else: raise AlradyExists() else: #lets update it with the new downloaded eps if instance.onlyget is not None: for get_season, get_eps in instance.onlyget.iteritems(): for get_ep in get_eps: existing_obj.add_download(get_season, get_ep) existing_obj.reset() existing_obj.save() raise AlradyExists_Updated(existing_obj) raise AlradyExists_Updated(existing_obj) except ObjectDoesNotExist: pass #Set default status as download queue if instance.status is None: instance.status = 1 #Get section and title if instance.section is None: split = instance.ftppath.split("/") try: section = split[1] title = split[-1] except: raise Exception("Unable to determine section from path %s" % instance.ftppath) if section: instance.section = section else: raise Exception("Unable to determine section from path %s" % instance.ftppath) if title: instance.title = title else: raise Exception("Unable to determine title from path %s" % instance.ftppath) #Figure out the local path if instance.localpath is None: if section == "XVID" or section == "HD": path = settings.MOVIE_PATH_TEMP elif section == "TVHD" or section == "TV": path = settings.TV_PATH_TEMP elif section == "REQUESTS": path = settings.REQUESTS_PATH_TEMP else: raise Exception("Unable to find section path in config: %s" % section) instance.localpath = os.path.join(path, instance.title) instance.parse_title() parser = instance.metaparser() title = None if 'title' in parser.details: title = parser.details['title'] if 'series' in parser.details: title = TVShow.clean_title(parser.details['series']) #Ok now we know its a valid downloaditem lets add it to the db tvdbapi = Tvdb() type = instance.get_type() from lazy_common import metaparser #must be a tvshow if type == metaparser.TYPE_TVSHOW: if instance.tvdbid_id is None: logger.debug("Looks like we are working with a TVShow, lets try find the tvdb object") #We need to try find the series info parser = instance.metaparser() if parser.details and 'series' in parser.details: series_name = TVShow.clean_title(parser.details['series']) #search via database first found = TVShow.find_by_title(series_name) if found: instance.tvdbid_id = found.id else: try: match = tvdbapi[series_name] logger.debug("Show found") instance.tvdbid_id = int(match['id']) if match['imdb_id'] is not None: logger.debug("also found imdbid %s from thetvdb" % match['imdb_id']) instance.imdbid_id = int(match['imdb_id'].lstrip("tt")) except tvdb_shownotfound: logger.exception("Error finding show on thetvdb %s" % series_name) except Exception as e: logger.exception("Error finding : %s via thetvdb.com due to %s" % (series_name, e.message)) else: logger.exception("Unable to parse series info") else: #must be a movie! if instance.imdbid_id is None: logger.debug("Looks like we are working with a Movie") #Lets try find the movie details parser = instance.metaparser() movie_title = parser.details['title'] if 'year' in parser.details: movie_year = parser.details['year'] else: movie_year = None imdbs = ImdbSearch() results = imdbs.best_match(movie_title, movie_year) if results and results['match'] > 0.70: movieObj = ImdbParser() movieObj.parse(results['url']) logger.debug("Found imdb movie id %s" % movieObj.imdb_id) instance.imdbid_id = int(movieObj.imdb_id.lstrip("tt")) else: logger.debug("Didnt find a good enough match on imdb") #Now we have sorted both imdbid and thetvdbid lets sort it all out #If we have a tvdbid do we need to add it to the db or does it exist or ignored? if instance.tvdbid_id is not None and instance.tvdbid_id != "": #Does it already exist? try: if instance.tvdbid: #Do we need to update it curTime = timezone.now() hours = 0 if instance.tvdbid.updated is None: hours = 50 else: diff = curTime - instance.tvdbid.updated hours = diff.total_seconds() / 60 / 60 if hours > 24: try: instance.tvdbid.update_from_tvdb() instance.tvdbid.save() except Exception as e: logger.exception("Error updating TVDB info %s" % e.message) except ObjectDoesNotExist as e: logger.debug("Getting tvdb data for release") new_tvdb_item = TVShow() new_tvdb_item.id = instance.tvdbid_id try: new_tvdb_item.save() except: instance.tvdbid = None pass if instance.tvdbid and instance.tvdbid.ignored: logger.info("Show wont be added as it is marked as ignored") raise Ignored("Show wont be added as it is marked as ignored") #If we have a imdbid do we need to add it to the db or does it exist if instance.imdbid_id is not None and instance.imdbid_id != "": try: if instance.imdbid: #Do we need to update it curTime = timezone.now() imdb_date = instance.imdbid.updated try: if imdb_date: diff = curTime - instance.imdbid.updated hours = diff.total_seconds() / 60 / 60 if hours > 24: instance.imdbid.update_from_imdb() else: instance.imdbid.update_from_imdb() except ObjectDoesNotExist as e: logger.info("Error updating IMDB info as it was not found") except ObjectDoesNotExist as e: logger.debug("Getting IMDB data for release") new_imdb = Movie() new_imdb.id = instance.imdbid_id try: new_imdb.save() except ObjectDoesNotExist: instance.imdbid_id = None if instance.imdbid.ignored: logger.info("Movie wont be added as it is marked as ignored") raise Ignored("Movie cannot be added as it is marked as ignored") if title: logger.info("Looking for existing %s in the queue" % title) type = instance.get_type() if instance.tvdbid_id: #Check if already in queue (maybe this is higher quality or proper). existing_items = [dlitem for dlitem in DownloadItem.objects.all().filter(Q(status=DownloadItem.QUEUE) | Q(status=DownloadItem.DOWNLOADING) | Q(status=DownloadItem.PENDING) | Q(tvdbid_id=instance.tvdbid_id))] elif instance.imdbid_id: #Check if already in queue (maybe this is higher quality or proper). existing_items = [dlitem for dlitem in DownloadItem.objects.all().filter(Q(status=DownloadItem.QUEUE) | Q(status=DownloadItem.DOWNLOADING) | Q(status=DownloadItem.PENDING) | Q(imdbid_id=instance.imdbid_id))] else: existing_items = [dlitem for dlitem in DownloadItem.objects.all().filter(Q(status=DownloadItem.QUEUE) | Q(status=DownloadItem.DOWNLOADING) | Q(status=DownloadItem.PENDING))] for dlitem in existing_items: #If its a tvshow and the tvdbid does not match then skip if type == metaparser.TYPE_TVSHOW and dlitem.tvdbid_id and instance.tvdbid_id: if instance.tvdbid_id != dlitem.tvdbid_id: continue if type == metaparser.TYPE_MOVIE and dlitem.imdbid_id and instance.imdbid_id: if instance.imdbid_id != dlitem.imdbid_id: continue dlitem_title = None dlitem_parser = dlitem.metaparser() if 'title' in dlitem_parser.details: dlitem_title = dlitem_parser.details['title'] if 'series' in dlitem_parser.details: dlitem_title = TVShow.clean_title(dlitem_parser.details['series']) if dlitem_title and dlitem_title.lower() == title.lower(): check = False if parser.type == metaparser.TYPE_TVSHOW: if 'season' in parser.details and 'episode_number' in parser.details and 'season' in dlitem_parser.details and 'episode_number' in dlitem_parser.details: if parser.details['season'] == dlitem_parser.details['season'] and parser.details['episode_number'] == dlitem_parser.details['episode_number']: check = True else: check = True if check: logger.info("Found %s already in queue, lets see what is better quality" % dlitem.title) if dlitem_parser.quality > parser.quality: logger.info("Download already existsin queue with better quality will ignore this one") raise AlradyExists_Updated(dlitem) else: logger.info("Deleting %s from queue as it has a lower quality" % dlitem.title) dlitem.delete()