def notify(self, message = '', data = None, listener = None): if not data: data = {} # Get all the device IDs linked to this user devices = self.getDevices() or [None] successful = 0 for device in devices: response = self.request( 'pushes', device_iden = device, type = 'note', title = self.default_title, body = toUnicode(message) ) if response: successful += 1 else: log.error('Unable to push notification to Pushbullet device with ID %s' % device) for channel in self.getChannels(): self.request( 'pushes', channel_tag = channel, type = 'note', title = self.default_title, body = toUnicode(message) ) return successful == len(devices)
def notify(self, message = '', data = None, listener = None): if not data: data = {} try: message = message.strip() long_message = '' if listener == 'test': long_message = 'This is a test message' elif data.get('identifier'): long_message = 'More movie info <a href="http://www.imdb.com/title/%s/">on IMDB</a>' % data['identifier'] data = { 'user_credentials': self.conf('token'), 'notification[title]': toUnicode('%s - %s' % (self.default_title, message)), 'notification[long_message]': toUnicode(long_message), } self.urlopen(self.url, data = data) except: log.error('Make sure the token provided is for the correct device') return False log.info('Boxcar notification successful.') return True
def notify(self, message = '', data = None, listener = None): if not data: data = {} api_data = { 'user': self.conf('user_key'), 'token': self.conf('api_token'), 'message': toUnicode(message), 'priority': self.conf('priority'), 'sound': self.conf('sound'), } if data and getIdentifier(data): api_data.update({ 'url': toUnicode('http://www.imdb.com/title/%s/' % getIdentifier(data)), 'url_title': toUnicode('%s on IMDb' % getTitle(data)), }) try: data = self.urlopen('%s/%s' % (self.api_url, '1/messages.json'), headers = {'Content-type': 'application/x-www-form-urlencoded'}, data = api_data) log.info2('Pushover responded with: %s', data) return True except: return False
def notify(self, message = '', data = None, listener = None): if not data: data = {} # Extract all the settings from settings from_address = self.conf('from') to_address = self.conf('to') ssl = self.conf('ssl') smtp_server = self.conf('smtp_server') smtp_user = self.conf('smtp_user') smtp_pass = self.conf('smtp_pass') smtp_port = self.conf('smtp_port') starttls = self.conf('starttls') # Make the basic message email = MIMEText(toUnicode(message), _charset = Env.get('encoding')) email['Subject'] = '%s: %s' % (self.default_title, toUnicode(message)) email['From'] = from_address email['To'] = to_address email['Date'] = formatdate(localtime = 1) email['Message-ID'] = make_msgid() try: # Open the SMTP connection, via SSL if requested log.debug("Connecting to host %s on port %s" % (smtp_server, smtp_port)) log.debug("SMTP over SSL %s", ("enabled" if ssl == 1 else "disabled")) mailserver = smtplib.SMTP_SSL(smtp_server, smtp_port) if ssl == 1 else smtplib.SMTP(smtp_server, smtp_port) if starttls: log.debug("Using StartTLS to initiate the connection with the SMTP server") mailserver.starttls() # Say hello to the server mailserver.ehlo() # Check too see if an login attempt should be attempted if len(smtp_user) > 0: log.debug("Logging on to SMTP server using username \'%s\'%s", (smtp_user, " and a password" if len(smtp_pass) > 0 else "")) mailserver.login(smtp_user, smtp_pass) # Send the e-mail log.debug("Sending the email") mailserver.sendmail(from_address, splitString(to_address), email.as_string()) # Close the SMTP connection mailserver.quit() log.info('Email notification sent') return True except: log.error('E-mail failed: %s', traceback.format_exc()) return False
def calculate(self, nzb, movie): """ Calculate the score of a NZB, used for sorting later """ # Merge global and category preferred_words = splitString(Env.setting('preferred_words', section = 'searcher').lower()) try: preferred_words = removeDuplicate(preferred_words + splitString(movie['category']['preferred'].lower())) except: pass score = nameScore(toUnicode(nzb['name']), movie['info']['year'], preferred_words) for movie_title in movie['info']['titles']: score += nameRatioScore(toUnicode(nzb['name']), toUnicode(movie_title)) score += namePositionScore(toUnicode(nzb['name']), toUnicode(movie_title)) score += sizeScore(nzb['size']) # Torrents only if nzb.get('seeders'): try: score += nzb.get('seeders') * 100 / 15 score += nzb.get('leechers') * 100 / 30 except: pass # Provider score score += providerScore(nzb['provider']) # Duplicates in name score += duplicateScore(nzb['name'], getTitle(movie)) # Merge global and category ignored_words = splitString(Env.setting('ignored_words', section = 'searcher').lower()) try: ignored_words = removeDuplicate(ignored_words + splitString(movie['category']['ignored'].lower())) except: pass # Partial ignored words score += partialIgnoredScore(nzb['name'], getTitle(movie), ignored_words) # Ignore single downloads from multipart score += halfMultipartScore(nzb['name']) # Extra provider specific check extra_score = nzb.get('extra_score') if extra_score: score += extra_score(nzb) # Scene / Nuke scoring score += sceneScore(nzb['name']) return score
def searchSingle(self, group): if self.isDisabled(): return try: available_languages = sum(group['subtitle_language'].values(), []) downloaded = [] files = [toUnicode(x) for x in group['files']['movie']] log.debug('Searching for subtitles for: %s', files) for lang in self.getLanguages(): if lang not in available_languages: download = subliminal.download_subtitles(files, multi = True, force = self.conf('force'), languages = [lang], services = self.services, cache_dir = Env.get('cache_dir')) for subtitle in download: downloaded.extend(download[subtitle]) for d_sub in downloaded: log.info('Found subtitle (%s): %s', (d_sub.language.alpha2, files)) group['files']['subtitle'].append(sp(d_sub.path)) group['before_rename'].append(sp(d_sub.path)) group['subtitle_language'][sp(d_sub.path)] = [d_sub.language.alpha2] return True except: log.error('Failed searching for subtitle: %s', (traceback.format_exc())) return False
def _search(self, media, quality, results): nzbs = self.getRSSData(self.urls['search'] % self.buildUrl(media)) for nzb in nzbs: nzbclub_id = tryInt(self.getTextElement(nzb, "link").split('/nzb_view/')[1].split('/')[0]) enclosure = self.getElement(nzb, "enclosure").attrib size = enclosure['length'] date = self.getTextElement(nzb, "pubDate") def extra_check(item): full_description = self.getCache('nzbclub.%s' % nzbclub_id, item['detail_url'], cache_timeout = 25920000) for ignored in ['ARCHIVE inside ARCHIVE', 'Incomplete', 'repair impossible']: if ignored in full_description: log.info('Wrong: Seems to be passworded or corrupted files: %s', item['name']) return False return True results.append({ 'id': nzbclub_id, 'name': toUnicode(self.getTextElement(nzb, "title")), 'age': self.calculateAge(int(time.mktime(parse(date).timetuple()))), 'size': tryInt(size) / 1024 / 1024, 'url': enclosure['url'].replace(' ', '_'), 'detail_url': self.getTextElement(nzb, "link"), 'get_more_info': self.getMoreInfo, 'extra_check': extra_check })
def toList(self, log_content = ''): logs_raw = re.split(r'\[0m\n', toUnicode(log_content)) logs = [] re_split = r'\x1b' for log_line in logs_raw: split = re.split(re_split, log_line) if split and len(split) == 3: try: date, time, log_type = splitString(split[0], ' ') timestamp = '%s %s' % (date, time) except: timestamp = 'UNKNOWN' log_type = 'UNKNOWN' message = ''.join(split[1]) if len(split) > 1 else split[0] message = re.sub('\[\d+m\[', '[', message) logs.append({ 'time': timestamp, 'type': log_type, 'message': message }) return logs
def _searchOnTitle(self, title, movie, quality, results): q = '%s %s' % (title, movie['info']['year']) params = tryUrlencode({ 'search': q, 'catid': ','.join([str(x) for x in self.getCatId(quality)]), 'user': self.conf('username', default = ''), 'api': self.conf('api_key', default = ''), }) if len(self.conf('custom_tag')) > 0: params = '%s&%s' % (params, self.conf('custom_tag')) nzbs = self.getJsonData(self.urls['search'] % params) if isinstance(nzbs, list): for nzb in nzbs: results.append({ 'id': nzb.get('nzbid'), 'name': toUnicode(nzb.get('release')), 'age': self.calculateAge(tryInt(nzb.get('usenetage'))), 'size': tryInt(nzb.get('sizebytes')) / 1024 / 1024, 'url': nzb.get('getnzb'), 'detail_url': nzb.get('details'), 'description': nzb.get('weblink') })
def _searchOnHost(self, host, media, quality, results): torrents = self.getJsonData(self.buildUrl(media, host), cache_timeout = 1800) if torrents: try: if torrents.get('error'): log.error('%s: %s', (torrents.get('error'), host['host'])) elif torrents.get('results'): for torrent in torrents.get('results', []): results.append({ 'id': torrent.get('torrent_id'), 'protocol': 'torrent' if re.match('^(http|https|ftp)://.*$', torrent.get('download_url')) else 'torrent_magnet', 'provider_extra': urlparse(host['host']).hostname or host['host'], 'name': toUnicode(torrent.get('release_name')), 'url': torrent.get('download_url'), 'detail_url': torrent.get('details_url'), 'size': torrent.get('size'), 'score': host['extra_score'], 'seeders': torrent.get('seeders'), 'leechers': torrent.get('leechers'), 'seed_ratio': host['seed_ratio'], 'seed_time': host['seed_time'], }) except: log.error('Failed getting results from %s: %s', (host['host'], traceback.format_exc()))
def notify(self, message = '', data = None, listener = None): if not data: data = {} n = { '_t': 'notification', 'time': int(time.time()), } try: db = get_db() n['message'] = toUnicode(message) if data.get('sticky'): n['sticky'] = True if data.get('important'): n['important'] = True db.insert(n) self.frontend(type = listener, data = n) return True except: log.error('Failed notify "%s": %s', (n, traceback.format_exc()))
def safeMessage(self, msg, replace_tuple = ()): from whatpotato.core.helpers.encoding import ss, toUnicode msg = ss(msg) try: if isinstance(replace_tuple, tuple): msg = msg % tuple([ss(x) if not isinstance(x, (int, float)) else x for x in list(replace_tuple)]) elif isinstance(replace_tuple, dict): msg = msg % dict((k, ss(v) if not isinstance(v, (int, float)) else v) for k, v in replace_tuple.iteritems()) else: msg = msg % ss(replace_tuple) except Exception as e: self.logger.error('Failed encoding stuff to log "%s": %s' % (msg, e)) self.setup() if not self.is_develop: for replace in self.replace_private: msg = re.sub('(\?%s=)[^\&]+' % replace, '?%s=xxx' % replace, msg) msg = re.sub('(&%s=)[^\&]+' % replace, '&%s=xxx' % replace, msg) # Replace api key try: api_key = self.Env.setting('api_key') if api_key: msg = msg.replace(api_key, 'API_KEY') except: pass return toUnicode(msg)
def getDefaultTitle(self, info, default_title = None): # Set default title default_title = default_title if default_title else toUnicode(info.get('title')) titles = info.get('titles', []) counter = 0 def_title = None for title in titles: if (len(default_title) == 0 and counter == 0) or len(titles) == 1 or title.lower() == toUnicode(default_title.lower()) or (toUnicode(default_title) == six.u('') and toUnicode(titles[0]) == title): def_title = toUnicode(title) break counter += 1 if not def_title and titles and len(titles) > 0: def_title = toUnicode(titles[0]) return def_title or 'UNKNOWN'
def notify(self, message = '', data = None, listener = None): if not data: data = {} data = { 'title': self.default_title, 'text': toUnicode(message), 'sender': toUnicode("CouchPotato"), 'image': 'https://raw.github.com/RuudBurger/CouchPotatoServer/master/whatpotato/static/images/homescreen.png', } try: self.urlopen(self.urls['api'] % (self.conf('api_key'), tryUrlencode(data)), show_error = False) return True except: log.error('Toasty failed: %s', traceback.format_exc()) return False
def getMoreInfo(self, item): full_description = self.getCache('bithdtv.%s' % item['id'], item['detail_url'], cache_timeout = 25920000) html = BeautifulSoup(full_description) nfo_pre = html.find('table', attrs = {'class': 'detail'}) description = toUnicode(nfo_pre.text) if nfo_pre else '' item['description'] = description return item
def setProperty(self, identifier, value = ''): from whatpotato import get_db db = get_db() try: p = db.get('property', identifier, with_doc = True) p['doc'].update({ 'identifier': identifier, 'value': toUnicode(value), }) db.update(p['doc']) except: db.insert({ '_t': 'property', 'identifier': identifier, 'value': toUnicode(value), })
def fill(self): try: db = get_db() profiles = [{ 'label': 'Best', 'qualities': ['720p', '1080p', 'brrip', 'dvdrip'] }, { 'label': 'HD', 'qualities': ['720p', '1080p'] }, { 'label': 'SD', 'qualities': ['dvdrip', 'dvdr'] }, { 'label': 'Prefer 3D HD', 'qualities': ['1080p', '720p', '720p', '1080p'], '3d': [True, True] }, { 'label': '3D HD', 'qualities': ['1080p', '720p'], '3d': [True, True] }] # Create default quality profile order = 0 for profile in profiles: log.info('Creating default profile: %s', profile.get('label')) pro = { '_t': 'profile', 'label': toUnicode(profile.get('label')), 'order': order, 'qualities': profile.get('qualities'), 'minimum_score': 1, 'finish': [], 'wait_for': [], 'stop_after': [], '3d': [] } threed = profile.get('3d', []) for q in profile.get('qualities'): pro['finish'].append(True) pro['wait_for'].append(0) pro['stop_after'].append(0) pro['3d'].append(threed.pop() if threed else False) db.insert(pro) order += 1 return True except: log.error('Failed: %s', traceback.format_exc()) return False
def first(self, title): title = toUnicode(title) title = simplifyString(title) for prefix in ['the ', 'an ', 'a ']: if prefix == title[:len(prefix)]: title = title[len(prefix):] break return str(title[0] if title and len(title) > 0 and title[0] in ascii_letters else '#').lower()
def getMoreInfo(self, item): full_description = self.getCache('tpb.%s' % item['id'], item['detail_url'], cache_timeout = 25920000) html = BeautifulSoup(full_description) nfo_pre = html.find('div', attrs = {'class': 'nfo'}) description = '' try: description = toUnicode(nfo_pre.text) except: pass item['description'] = description return item
def simplify(self, title): title = toUnicode(title) nr_prefix = '' if title and len(title) > 0 and title[0] in ascii_letters else '#' title = simplifyString(title) for prefix in ['the ', 'an ', 'a ']: if prefix == title[:len(prefix)]: title = title[len(prefix):] break return str(nr_prefix + title).ljust(32, ' ')[:32]
def createNzbName(self, data, media, unique_tag = False): release_name = data.get('name') tag = self.cpTag(media, unique_tag = unique_tag) # Check if password is filename name_password = scanForPassword(data.get('name')) if name_password: release_name, password = name_password tag += '{{%s}}' % password elif data.get('password'): tag += '{{%s}}' % data.get('password') max_length = 127 - len(tag) # Some filesystems don't support 128+ long filenames return '%s%s' % (toSafeString(toUnicode(release_name)[:max_length]), tag)
def getParams(params): reg = re.compile('^[a-z0-9_\.]+$') # Sort keys param_keys = params.keys() param_keys.sort(key = natsortKey) temp = {} for param in param_keys: value = params[param] nest = re.split("([\[\]]+)", param) if len(nest) > 1: nested = [] for key in nest: if reg.match(key): nested.append(key) current = temp for item in nested: if item is nested[-1]: current[item] = toUnicode(unquote(value)) else: try: current[item] except: current[item] = {} current = current[item] else: temp[param] = toUnicode(unquote(value)) if temp[param].lower() in ['true', 'false']: temp[param] = temp[param].lower() != 'false' return dictToList(temp)
def save(self, **kwargs): try: db = get_db() category = { '_t': 'category', 'order': kwargs.get('order', 999), 'label': toUnicode(kwargs.get('label', '')), 'ignored': toUnicode(kwargs.get('ignored', '')), 'preferred': toUnicode(kwargs.get('preferred', '')), 'required': toUnicode(kwargs.get('required', '')), 'destination': toUnicode(kwargs.get('destination', '')), } try: c = db.get('id', kwargs.get('id')) category['order'] = c.get('order', category['order']) c.update(category) db.update(c) except: c = db.insert(category) c.update(category) return { 'success': True, 'category': c } except: log.error('Failed: %s', traceback.format_exc()) return { 'success': False, 'category': None }
def notify(self, message = '', data = None, listener = None): if not data: data = {} data = { 'AuthorizationToken': self.conf('auth_token'), 'Title': self.default_title, 'Body': toUnicode(message), 'IsImportant': self.conf('important'), 'IsSilent': self.conf('silent'), 'Image': toUnicode(self.getNotificationImage('medium') + '?1'), 'Source': toUnicode(self.default_title) } headers = { 'Content-type': 'application/x-www-form-urlencoded' } try: self.urlopen(self.urls['api'], headers = headers, data = data, show_error = False) return True except: log.error('PushAlot failed: %s', traceback.format_exc()) return False
def save(self, **kwargs): try: db = get_db() profile = { '_t': 'profile', 'label': toUnicode(kwargs.get('label')), 'order': tryInt(kwargs.get('order', 999)), 'core': kwargs.get('core', False), 'minimum_score': tryInt(kwargs.get('minimum_score', 1)), 'qualities': [], 'wait_for': [], 'stop_after': [], 'finish': [], '3d': [] } # Update types order = 0 for type in kwargs.get('types', []): profile['qualities'].append(type.get('quality')) profile['wait_for'].append(tryInt(kwargs.get('wait_for', 0))) profile['stop_after'].append(tryInt(kwargs.get('stop_after', 0))) profile['finish'].append((tryInt(type.get('finish')) == 1) if order > 0 else True) profile['3d'].append(tryInt(type.get('3d'))) order += 1 id = kwargs.get('id') try: p = db.get('id', id) profile['order'] = tryInt(kwargs.get('order', p.get('order', 999))) except: p = db.insert(profile) p.update(profile) db.update(p) return { 'success': True, 'profile': p } except: log.error('Failed: %s', traceback.format_exc()) return { 'success': False }
def getDirectories(self, path = '/', show_hidden = True): # Return driveletters or root if path is empty if path == '/' or not path or path == '\\': if os.name == 'nt': return self.getDriveLetters() path = '/' dirs = [] path = sp(path) for f in os.listdir(path): p = sp(os.path.join(path, f)) if os.path.isdir(p) and ((self.is_hidden(p) and bool(int(show_hidden))) or not self.is_hidden(p)): dirs.append(toUnicode('%s%s' % (p, os.path.sep))) return sorted(dirs)
def getPoster(self, media, image_urls): if 'files' not in media: media['files'] = {} existing_files = media['files'] image_type = 'poster' file_type = 'image_%s' % image_type # Make existing unique unique_files = list(set(existing_files.get(file_type, []))) # Remove files that can't be found for ef in unique_files: if not os.path.isfile(ef): unique_files.remove(ef) # Replace new files list existing_files[file_type] = unique_files if len(existing_files) == 0: del existing_files[file_type] images = image_urls.get(image_type, []) for y in ['SX300', 'tmdb']: initially_try = [x for x in images if y in x] images[:-1] = initially_try # Loop over type for image in images: if not isinstance(image, (str, unicode)): continue # Check if it has top image filename = '%s.%s' % (md5(image), getExt(image)) existing = existing_files.get(file_type, []) has_latest = False for x in existing: if filename in x: has_latest = True if not has_latest or file_type not in existing_files or len(existing_files.get(file_type, [])) == 0: file_path = fireEvent('file.download', url = image, single = True) if file_path: existing_files[file_type] = [toUnicode(file_path)] break else: break
def getMoreInfo(self, item): cache_key = 'ilt.%s' % item['id'] description = self.getCache(cache_key) if not description: try: full_description = self.getHTMLData(item['detail_url']) html = BeautifulSoup(full_description) nfo_pre = html.find('td', attrs = {'class': 'main'}).findAll('table')[1] description = toUnicode(nfo_pre.text) if nfo_pre else '' except: log.error('Failed getting more info for %s', item['name']) description = '' self.setCache(cache_key, description, timeout = 25920000) item['description'] = description return item
def notify(self, message="", data=None, listener=None): if not data: data = {} data = { "apikey": self.conf("api_key"), "application": self.default_title, "description": toUnicode(message), "priority": self.conf("priority"), } headers = {"Content-type": "application/x-www-form-urlencoded"} try: self.urlopen(self.urls["api"], headers=headers, data=data, show_error=False) log.info("Prowl notifications sent.") return True except: log.error("Prowl failed: %s", traceback.format_exc()) return False
def _searchOnTitle(self, title, movie, quality, results): url = self.urls["search"] % ( tryUrlencode("%s %s" % (title.replace(":", ""), movie["info"]["year"])), self.getCatId(quality)[0], ) data = self.getHTMLData(url) if data: html = BeautifulSoup(data) try: result_table = html.find("table", attrs={"border": "1"}) if not result_table: return entries = result_table.find_all("tr") for result in entries[1:]: cells = result.find_all("td") link = cells[1].find("a", attrs={"class": "index"}) full_id = link["href"].replace("details.php?id=", "") torrent_id = full_id[:6] name = toUnicode(link.get("title", link.contents[0]).encode("ISO-8859-1")).strip() results.append( { "id": torrent_id, "name": name, "url": self.urls["download"] % (torrent_id, name), "detail_url": self.urls["detail"] % torrent_id, "size": self.parseSize(cells[6].contents[0] + cells[6].contents[2]), "seeders": tryInt(cells[8].find("span").contents[0]), "leechers": tryInt(cells[9].find("span").contents[0]), } ) except: log.error("Failed to parsing %s: %s", (self.getName(), traceback.format_exc()))