def fetch(self, query, args=None): with db_lock: if query == None: return sqlResult = None attempt = 0 while attempt < 5: try: if args == None: #logger.fdebug("[FETCH] : " + query) cursor = self.connection.cursor() sqlResult = cursor.execute(query) else: #logger.fdebug("[FETCH] : " + query + " with args " + str(args)) cursor = self.connection.cursor() sqlResult = cursor.execute(query, args) # get out of the connection attempt loop since we were successful break except sqlite3.OperationalError, e: if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]: logger.warn('Database Error: %s' % e) attempt += 1 time.sleep(1) else: logger.warn('DB error: %s' % e) raise except sqlite3.DatabaseError, e: logger.error('Fatal error executing query: %s' % e) raise
def panicbutton(self): dbpath = os.path.join(mylar.DATA_DIR, 'mylar.db') with zipfile.ZipFile(self.panicfile, 'w') as zip: zip.write(self.filename, os.path.basename(self.filename)) zip.write(dbpath, os.path.basename(dbpath)) zip.write(self.cleanpath, os.path.basename(self.cleanpath)) if os.path.exists(self.lastrelpath): zip.write(self.lastrelpath, os.path.basename(self.lastrelpath)) files = [] try: caredir = os.path.join(mylar.CONFIG.LOG_DIR, 'carepackage') os.mkdir(caredir) except Exception as e: pass for file in glob(os.path.join(mylar.CONFIG.LOG_DIR,'mylar.log*')): #files.append(pathlib.Path(pathlib.PurePath(mylar.CONFIG.LOG_DIR).joinpath(os.path.basename(file)))) #os.path.join(mylar.CONFIG.LOG_DIR, os.path.basename(file))) files.append(os.path.join(mylar.CONFIG.LOG_DIR, os.path.basename(file))) if len(files) > 0: for fname in files: logger.fdebug('analyzing %s' % fname) cnt = 0 wrote = False #remove the apikeys first. filename = os.path.join(caredir, os.path.basename(fname)) output = open(filename, 'w') #output = pathlib.Path(filename) #open(filename, 'w') with open(fname, 'r') as f: line = f.readline() while line: for keyed in self.keylist: if keyed in line and len(keyed) > 0: cnt+=1 output.write(line.replace(keyed, '-REDACTED-')) wrote = True if wrote is False: output.write(line) line = f.readline() wrote = False logger.fdebug('removed %s keys from %s' % (cnt, fname)) try: zip.write(filename, os.path.basename(fname), zipfile.ZIP_DEFLATED) except RuntimeError: #if zlib isn't available, will throw RuntimeError, then just use default compression zip.write(filename, os.path.basename(fname)) except Exception as e: logger.warn(e) else: os.unlink(filename) try: os.rmdir(os.path.join(mylar.CONFIG.LOG_DIR, 'carepackage')) except: pass os.unlink(self.filename) os.unlink(self.cleanpath)
def downloadfile(self, payload, filepath): url = 'https://32pag.es/torrents.php' try: r = self.session.get(url, params=payload, verify=True, stream=True, allow_redirects=True) except Exception as e: logger.error('%s [%s] Could not POST URL %s' % ('[32P-DOWNLOADER]', e, url)) return False if str(r.status_code) != '200': logger.warn( 'Unable to download torrent from 32P [Status Code returned: %s]' % r.status_code) if str(r.status_code) == '404' and site == '32P': logger.warn( '[32P-CACHED_ENTRY] Entry found in 32P cache - incorrect. Torrent has probably been merged into a pack, or another series id. Removing from cache.' ) helpers.delete_cache_entry(linkit) else: logger.info('content: %s' % r.content) return False with open(filepath, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() return True
def storyarcinfo(xmlid): comicLibrary = listLibrary() arcinfo = {} if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI: logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.') comicapi = mylar.DEFAULT_CVAPI else: comicapi = mylar.COMICVINE_API #respawn to the exact id for the story arc and count the # of issues present. ARCPULL_URL = mylar.CVURL + 'story_arc/4045-' + str(xmlid) + '/?api_key=' + str(comicapi) + '&field_list=issues,name,first_appeared_in_issue,deck,image&format=xml&offset=0' logger.fdebug('arcpull_url:' + str(ARCPULL_URL)) if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX: chkit = cvapi_check() if chkit == False: return 'apireached' try: file = urllib2.urlopen(ARCPULL_URL) except urllib2.HTTPError, err: logger.error('err : ' + str(err)) logger.error('There was a major problem retrieving data from ComicVine - on their end.') return
def writeconfig(self, values=None): logger.fdebug("Writing configuration to file") self.provider_sequence() config.set('Newznab', 'extra_newznabs', ', '.join(self.write_extras(self.EXTRA_NEWZNABS))) config.set('Torznab', 'extra_torznabs', ', '.join(self.write_extras(self.EXTRA_TORZNABS))) ###this should be moved elsewhere... if type(self.BLACKLISTED_PUBLISHERS) != list: if self.BLACKLISTED_PUBLISHERS is None: bp = 'None' else: bp = ', '.join(self.write_extras(self.BLACKLISTED_PUBLISHERS)) config.set('CV', 'blacklisted_publishers', bp) else: config.set('CV', 'blacklisted_publishers', ', '.join(self.BLACKLISTED_PUBLISHERS)) ### config.set('General', 'dynamic_update', str(self.DYNAMIC_UPDATE)) if values is not None: self.process_kwargs(values) try: with codecs.open(self._config_file, encoding='utf8', mode='w+') as configfile: config.write(configfile) logger.fdebug('Configuration written to disk.') except IOError as e: logger.warn("Error writing configuration file: %s", e)
def addfile(self, filepath=None, filename=None, bytes=None): params = {'action': 'add-file', 'token': self.token} try: d = open(filepath, 'rb') tordata = d.read() d.close() except: logger.warn('Unable to load torrent file. Aborting at this time.') return 'fail' files = {'torrent_file': tordata} try: r = requests.post(url=self.utorrent_url, auth=self.auth, cookies=self.cookies, params=params, files=files) except requests.exceptions.RequestException as err: logger.debug('URL: ' + str(self.utorrent_url)) logger.debug('Error sending to uTorrent Client. uTorrent responded with error: ' + str(err)) return 'fail' # (to-do) verify the hash in order to ensure it's loaded here if str(r.status_code) == '200': logger.info('Successfully added torrent to uTorrent client.') hash = self.calculate_torrent_hash(data=tordata) if mylar.UTORRENT_LABEL: try: self.setlabel(hash) except: logger.warn('Unable to set label for torrent.') return hash else: return 'fail'
def pullsearch(comicapi, comicquery, offset, type): cnt = 1 for x in comicquery: if cnt == 1: filterline = '%s' % x else: filterline+= ',name:%s' % x cnt+=1 PULLURL = mylar.CVURL + str(type) + 's?api_key=' + str(comicapi) + '&filter=name:' + filterline + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,deck,description,first_issue,last_issue&format=xml&sort=date_last_updated:desc&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page #all these imports are standard on most modern python implementations #logger.info('MB.PULLURL:' + PULLURL) #new CV API restriction - one api request / second. if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2: time.sleep(2) else: time.sleep(mylar.CONFIG.CVAPI_RATE) #download the file: payload = None try: r = requests.get(PULLURL, params=payload, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS) except Exception, e: logger.warn('Error fetching data from ComicVine: %s' % (e)) return
def shutdown(restart=False, update=False): cherrypy.engine.exit() halt() if not restart and not update: logger.info('Mylar is shutting down...') if update: logger.info('Mylar is updating...') try: versioncheck.update() except Exception as e: logger.warn('Mylar failed to update: %s. Restarting.' % e) if CREATEPID: logger.info('Removing pidfile %s' % PIDFILE) os.remove(PIDFILE) if restart: logger.info('Mylar is restarting...') popen_list = [sys.executable, FULL_PATH] popen_list += ARGS # if '--nolaunch' not in popen_list: # popen_list += ['--nolaunch'] logger.info('Restarting Mylar with ' + str(popen_list)) subprocess.Popen(popen_list, cwd=os.getcwd()) os._exit(0)
def addurl(self, url): params = {'action': 'add-url', 'token': self.token, 's': url} try: r = requests.post(url=self.utorrent_url, auth=self.auth, cookies=self.cookies, params=params) except requests.exceptions.RequestException as err: logger.debug('URL: ' + str(self.utorrent_url)) logger.debug( 'Error sending to uTorrent Client. uTorrent responded with error: ' + str(err)) return 'fail' # (to-do) verify the hash in order to ensure it's loaded here if str(r.status_code) == '200': logger.info('Successfully added torrent to uTorrent client.') hash = self.calculate_torrent_hash(link=url) if mylar.UTORRENT_LABEL: try: self.setlabel(hash) except: logger.warn('Unable to set label for torrent.') return hash else: return 'fail'
def retrieve_image(url): try: r = requests.get(url, params=None, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS) except Exception as e: logger.warn( '[ERROR: %s] Unable to download image from CV URL link: %s' % (e, url)) ComicImage = None else: statuscode = str(r.status_code) if statuscode != '200': logger.warn( 'Unable to download image from CV URL link: %s [Status Code returned: %s]' % (url, statuscode)) coversize = 0 ComicImage = None else: data = r.content img = Image.open(BytesIO(data)) imdata = scale_image(img, "JPEG", 600) ComicImage = str(base64.b64encode(imdata), 'utf-8') return ComicImage
def torsend2client(seriesname, linkit, site): logger.info('matched on ' + str(seriesname)) filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',seriesname) if site == 'ComicBT': logger.info(linkit) linkit = str(linkit) + '&passkey=' + str(mylar.CBT_PASSKEY) if linkit[-7:] != "torrent": filename += ".torrent" request = urllib2.Request(linkit) request.add_header('User-Agent', str(mylar.USER_AGENT)) if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None: filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None: filepath = os.path.join(mylar.CACHE_DIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) else: logger.error('No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.') return "fail" try: opener = helpers.urlretrieve(urllib2.urlopen(request), filepath) except Exception, e: logger.warn('Error fetching data from %s: %s' % (site, e)) return "fail"
def __init__(self): if mylar.CONFIG.NZBGET_HOST[:5] == 'https': protocol = "https" nzbget_host = mylar.CONFIG.NZBGET_HOST[8:] elif mylar.CONFIG.NZBGET_HOST[:4] == 'http': protocol = "http" nzbget_host = mylar.CONFIG.NZBGET_HOST[7:] else: logger.warn( '[NZB-GET] You need to specify the protocol for your nzbget instance (ie. http:// or https://). You provided: %s' % (mylar.CONFIG.NZBGET_HOST)) return {'status': False} url = '%s://%s:%s' val = (protocol, nzbget_host, mylar.CONFIG.NZBGET_PORT) logon_info = '' if mylar.CONFIG.NZBGET_USERNAME is not None: logon_info = '%s:' val = val + (mylar.CONFIG.NZBGET_USERNAME, ) if mylar.CONFIG.NZBGET_PASSWORD is not None: if logon_info == '': logon_info = ':%s' else: logon_info += '%s' val = val + (mylar.CONFIG.NZBGET_PASSWORD, ) if logon_info != '': url = url + '/' + logon_info url = url + '/xmlrpc' #val = val + (nzbget_host,mylar.CONFIG.NZBGET_PORT,) self.display_url = '%s://%s:%s/xmlrpc' % (protocol, nzbget_host, mylar.CONFIG.NZBGET_PORT) self.nzb_url = (url % val) self.server = xmlrpc.client.ServerProxy( self.nzb_url) #,allow_none=True)
def action(self, query, args=None): with db_lock: if query == None: return sqlResult = None attempt = 0 while attempt < 5: try: if args == None: #logger.debug(self.filename+": "+query) sqlResult = self.connection.execute(query) else: #logger.debug(self.filename+": "+query+" with args "+str(args)) sqlResult = self.connection.execute(query, args) self.connection.commit() break except sqlite3.OperationalError, e: if "unable to open database file" in e.message or "database is locked" in e.message: logger.warn('Database Error: %s' % e) logger.warn('sqlresult: %s' % query) attempt += 1 time.sleep(1) else: logger.error('Database error executing %s :: %s' % (query, e)) raise except sqlite3.DatabaseError, e: logger.error('Fatal Error executing %s :: %s' % (query, e)) raise
def __init__(self, test_apikey=None, test_userkey=None, test_device=None): if all([test_apikey is None, test_userkey is None, test_device is None]): self.PUSHOVER_URL = 'https://api.pushover.net/1/messages.json' self.test = False else: self.PUSHOVER_URL = 'https://api.pushover.net/1/users/validate.json' self.test = True self.enabled = mylar.CONFIG.PUSHOVER_ENABLED if test_apikey is None: if mylar.CONFIG.PUSHOVER_APIKEY is None or mylar.CONFIG.PUSHOVER_APIKEY == 'None': logger.warn('No Pushover Apikey is present. Fix it') return False else: self.apikey = mylar.CONFIG.PUSHOVER_APIKEY else: self.apikey = test_apikey if test_device is None: self.device = mylar.CONFIG.PUSHOVER_DEVICE else: self.device = test_device if test_userkey is None: self.userkey = mylar.CONFIG.PUSHOVER_USERKEY else: self.userkey = test_userkey self.priority = mylar.CONFIG.PUSHOVER_PRIORITY self._session = requests.Session() self._session.headers = {'Content-type': "application/x-www-form-urlencoded"}
def login(self): ''' This is generally the only method you'll want to call, as it handles testing test_skey_valid() before trying test_login(). Returns: True (success) / False (failure) Side effects: Methods called will handle saving the cookies to disk, and setting self.authkey, self.passkey, and self.uid ''' if (self.test_skey_valid()): logger.fdebug('%s Session key-based login was good.' % self.module) self.method = 'Session Cookie retrieved OK.' return {'ses': self.ses, 'status': True} if (self.test_login()): logger.fdebug('%s Credential-based login was good.' % self.module) self.method = 'Credential-based login OK.' return {'ses': self.ses, 'status': True} logger.warn('%s Both session key and credential-based logins failed.' % self.module) self.method = 'Both session key & credential login failed.' return {'ses': self.ses, 'status': False}
def downloadfile(self, payload, filepath): url = 'https://32pag.es/torrents.php' try: r = self.session.get(url, params=payload, verify=True, stream=True, allow_redirects=True) except Exception as e: logger.error('%s [%s] Could not POST URL %s' % ('[32P-DOWNLOADER]', e, url)) return False if str(r.status_code) != '200': logger.warn('Unable to download torrent from 32P [Status Code returned: %s]' % r.status_code) if str(r.status_code) == '404' and site == '32P': logger.warn('[32P-CACHED_ENTRY] Entry found in 32P cache - incorrect. Torrent has probably been merged into a pack, or another series id. Removing from cache.') helpers.delete_cache_entry(linkit) else: logger.info('content: %s' % r.content) return False with open(filepath, 'wb') as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() return True
def notify(self, message, subject, module=None): if module is None: module = '' module += '[NOTIFIER]' sent_successfully = False try: logger.debug(module + u' Sending email notification. From: [%s] - To: [%s] - Server: [%s] - Port: [%s] - Username: [%s] - Password: [********] - Encryption: [%s] - Message: [%s]' % (self.emailfrom, self.emailto, self.emailsvr, self.emailport, self.emailuser, self.emailenc, message)) msg = MIMEMultipart() msg['From'] = str(self.emailfrom) msg['To'] = str(self.emailto) msg['Subject'] = subject msg.attach(MIMEText(message, 'plain')) if self.emailenc is 1: sock = smtplib.SMTP_SSL(self.emailsvr, str(self.emailport)) else: sock = smtplib.SMTP(self.emailsvr, str(self.emailport)) if self.emailenc is 2: sock.starttls() if self.emailuser or self.emailpass: sock.login(str(self.emailuser), str(self.emailpass)) sock.sendmail(str(self.emailfrom), str(self.emailto), msg.as_string()) sock.quit() sent_successfully = True except Exception, e: logger.warn(module + u' Oh no!! Email notification failed: ' + str(e))
def login(self): ''' This is generally the only method you'll want to call, as it handles testing test_skey_valid() before trying test_login(). Returns: True (success) / False (failure) Side effects: Methods called will handle saving the cookies to disk, and setting self.authkey, self.passkey, and self.uid ''' if (self.test_skey_valid()): logger.fdebug('%s Session key-based login was good.' % self.module) self.method = 'Session Cookie retrieved OK.' return {'ses': self.ses, 'status': True} if (self.test_login()): logger.fdebug('%s Credential-based login was good.' % self.module) self.method = 'Credential-based login OK.' return {'ses': self.ses, 'status': True} logger.warn( '%s Both session key and credential-based logins failed.' % self.module) self.method = 'Both session key & credential login failed.' return {'ses': self.ses, 'status': False}
def storyarcinfo(xmlid): comicLibrary = listStoryArcs() arcinfo = {} if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI: logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.') comicapi = mylar.DEFAULT_CVAPI else: comicapi = mylar.COMICVINE_API #respawn to the exact id for the story arc and count the # of issues present. ARCPULL_URL = mylar.CVURL + 'story_arc/4045-' + str(xmlid) + '/?api_key=' + str(comicapi) + '&field_list=issues,publisher,name,first_appeared_in_issue,deck,image&format=xml&offset=0' #logger.fdebug('arcpull_url:' + str(ARCPULL_URL)) #new CV API restriction - one api request / second. if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2: time.sleep(2) else: time.sleep(mylar.CVAPI_RATE) #download the file: payload = None try: r = requests.get(ARCPULL_URL, params=payload, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS) except Exception, e: logger.warn('Error fetching data from ComicVine: %s' % (e)) return
def pullsearch(comicapi, comicquery, offset, explicit, type): u_comicquery = urllib.quote(comicquery.encode('utf-8').strip()) u_comicquery = u_comicquery.replace(" ", "%20") if explicit == 'all' or explicit == 'loose': PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=' + str(type) + '&query=' + u_comicquery + '&field_list=id,name,start_year,first_issue,site_detail_url,count_of_issues,image,publisher,deck,description,last_issue&format=xml&limit=100&page=' + str(offset) else: # 02/22/2014 use the volume filter label to get the right results. # add the 's' to the end of type to pluralize the caption (it's needed) if type == 'story_arc': u_comicquery = re.sub("%20AND%20", "%20", u_comicquery) PULLURL = mylar.CVURL + str(type) + 's?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,deck,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page #all these imports are standard on most modern python implementations #logger.info('MB.PULLURL:' + PULLURL) #new CV API restriction - one api request / second. if mylar.CVAPI_RATE is None or mylar.CVAPI_RATE < 2: time.sleep(2) else: time.sleep(mylar.CVAPI_RATE) #download the file: payload = None try: r = requests.get(PULLURL, params=payload, verify=mylar.CV_VERIFY, headers=mylar.CV_HEADERS) except Exception, e: logger.warn('Error fetching data from ComicVine: %s' % (e)) return
def notify(self, ComicName=None, Year=None, Issue=None, snatched_nzb=None): apikey = self.apikey priority = self.priority if snatched_nzb: event = snatched_nzb + " snatched!" description = "Mylar has snatched: " + snatched_nzb + " and has sent it to SABnzbd+" else: event = ComicName + ' (' + Year + ') - Issue #' + Issue + ' complete!' description = "Mylar has downloaded and postprocessed: " + ComicName + ' (' + Year + ') #' + Issue data = { 'apikey': apikey, 'application': 'Mylar', 'event': event, 'description': description, 'priority': priority } logger.info('Sending notification request to NotifyMyAndroid') request = self._send(data) if not request: logger.warn( 'Error sending notification request to NotifyMyAndroid')
def __init__(self, test_apikey=None, test_userkey=None, test_device=None): if all( [test_apikey is None, test_userkey is None, test_device is None]): self.PUSHOVER_URL = 'https://api.pushover.net/1/messages.json' self.test = False else: self.PUSHOVER_URL = 'https://api.pushover.net/1/users/validate.json' self.test = True self.enabled = mylar.CONFIG.PUSHOVER_ENABLED if test_apikey is None: if mylar.CONFIG.PUSHOVER_APIKEY is None or mylar.CONFIG.PUSHOVER_APIKEY == 'None': logger.warn('No Pushover Apikey is present. Fix it') return False else: self.apikey = mylar.CONFIG.PUSHOVER_APIKEY else: self.apikey = test_apikey if test_device is None: self.device = mylar.CONFIG.PUSHOVER_DEVICE else: self.device = test_device if test_userkey is None: self.userkey = mylar.CONFIG.PUSHOVER_USERKEY else: self.userkey = test_userkey self.priority = mylar.CONFIG.PUSHOVER_PRIORITY self._session = requests.Session() self._session.headers = { 'Content-type': "application/x-www-form-urlencoded" }
def shutdown(restart=False, update=False, maintenance=False): if maintenance is False: cherrypy.engine.exit() halt() if not restart and not update: logger.info('Mylar is shutting down...') if update: logger.info('Mylar is updating...') try: versioncheck.update() except Exception as e: logger.warn('Mylar failed to update: %s. Restarting.' % e) if CREATEPID: logger.info('Removing pidfile %s' % PIDFILE) os.remove(PIDFILE) if restart: logger.info('Mylar is restarting...') popen_list = [sys.executable, FULL_PATH] if 'maintenance' not in ARGS: popen_list += ARGS else: for x in ARGS: if all([x != 'maintenance', x != '-u']): popen_list += x logger.info('Restarting Mylar with ' + str(popen_list)) subprocess.Popen(popen_list, cwd=os.getcwd()) os._exit(0)
def test_login(self): ''' This is the method to call if you JUST want to login using self.un & self.pw Note that this will generate a new session on 32pag.es every time you login successfully! This is why the "keeplogged" option is only for when you persist cookies to disk. Note that after a successful login, it will test the session key, which has the side effect of getting the authkey,passkey & uid Returns: True (login success) False (login failure) Side Effects: On success: Sets the authkey, uid, passkey and saves the cookies to disk (on failure): clears the cookies and saves that to disk. ''' if (self.valid_login_attempt(self.un, self.pw)): if self.cookie_exists('session'): self.ses.cookies.save(ignore_discard=True) if (not self.test_skey_valid()): logger.error('Bad error: The attempt to get your attributes after successful login failed!') self.error = {'status': 'Bad error', 'message': 'Attempt to get attributes after successful login failed.'} return False return True logger.warn('Missing session cookie after successful login: %s' % self.ses.cookies) self.ses.cookies.clear() self.ses.cookies.save() return False
def pullsearch(comicapi, comicquery, offset, type): cnt = 1 for x in comicquery: if cnt == 1: filterline = '%s' % x else: filterline += ',name:%s' % x cnt += 1 PULLURL = mylar.CVURL + str(type) + 's?api_key=' + str( comicapi ) + '&filter=name:' + filterline + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,deck,description,first_issue,last_issue&format=xml&sort=date_last_updated:desc&offset=' + str( offset) # 2012/22/02 - CVAPI flipped back to offset instead of page #all these imports are standard on most modern python implementations #logger.info('MB.PULLURL:' + PULLURL) #new CV API restriction - one api request / second. if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2: time.sleep(2) else: time.sleep(mylar.CONFIG.CVAPI_RATE) #download the file: payload = None try: r = requests.get(PULLURL, params=payload, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS) except Exception, e: logger.warn('Error fetching data from ComicVine: %s' % (e)) return
def retrieve_image(url): try: r = requests.get(url, params=None, verify=mylar.CONFIG.CV_VERIFY, headers=mylar.CV_HEADERS) except Exception as e: logger.warn('[ERROR: %s] Unable to download image from CV URL link: %s' % (e, url)) ComicImage = None else: statuscode = str(r.status_code) if statuscode != '200': logger.warn('Unable to download image from CV URL link: %s [Status Code returned: %s]' % (url, statuscode)) coversize = 0 ComicImage = None else: data = r.content img = Image.open(BytesIO(data)) wpercent = (600/float(img.size[0])) hsize = int((float(img.size[1])*float(wpercent))) img = img.resize((600, hsize), Image.ANTIALIAS) output = BytesIO() img.save(output, format="JPEG") ComicImage = str(base64.b64encode(output.getvalue()), 'utf-8') output.close() return ComicImage
def fix_slashes(self): self.sql_attachmylar() for ct in self.dbmylar.execute("SELECT ComicID, ComicLocation FROM comics WHERE ComicLocation like ?", ['%' + os.sep.encode('unicode-escape') + os.sep.encode('unicode-escape') + '%']): st = ct[1].find(os.sep.encode('unicode-escape')+os.sep.encode('unicode-escape')) if st != -1: rootloc = ct[1][:st] clocation = ct[1][st+2:] if clocation[0] != os.sep.encode('unicode-escape'): new_path = os.path.join(rootloc, clocation) logger.info('[Incorrect slashes in path detected for OS] %s' % os.path.join(rootloc, ct[1])) logger.info('[PATH CORRECTION] %s' % new_path) self.comiclist.append({'ComicLocation': new_path, 'ComicID': ct[0]}) for cm in self.comiclist: try: self.dbmylar.execute("UPDATE comics SET ComicLocation=? WHERE ComicID=?", (cm['ComicLocation'], cm['ComicID'])) except Exception as e: logger.warn('Unable to correct entry: [ComicID:%s] %s [%e]' % (cm['ComicLocation'], cm['ComicID'],e)) self.sql_closemylar() if len(self.comiclist) >0: logger.info('[MAINTENANCE-MODE][%s] Successfully fixed the path slashes for %s series' % (self.mode.upper(), len(self.comiclist))) else: logger.info('[MAINTENANCE-MODE][%s] No series found with incorrect slashes in the path' % self.mode.upper())
def notify(self, message, subject, module=None): if module is None: module = '' module += '[NOTIFIER]' sent_successfully = False try: logger.debug(module + ' Sending email notification. From: [%s] - To: [%s] - Server: [%s] - Port: [%s] - Username: [%s] - Password: [********] - Encryption: [%s] - Message: [%s]' % (self.emailfrom, self.emailto, self.emailsvr, self.emailport, self.emailuser, self.emailenc, message)) msg = MIMEMultipart() msg['From'] = str(self.emailfrom) msg['To'] = str(self.emailto) msg['Subject'] = subject msg['Date'] = formatdate() msg['Message-ID'] = make_msgid('mylar') msg.attach(MIMEText(message, 'plain')) if self.emailenc == 1: sock = smtplib.SMTP_SSL(self.emailsvr, str(self.emailport)) else: sock = smtplib.SMTP(self.emailsvr, str(self.emailport)) if self.emailenc == 2: sock.starttls() if self.emailuser or self.emailpass: sock.login(str(self.emailuser), str(self.emailpass)) sock.sendmail(str(self.emailfrom), str(self.emailto), msg.as_string()) sock.quit() sent_successfully = True except Exception as e: logger.warn(module + ' Oh no!! Email notification failed: ' + str(e)) return sent_successfully
def notify(self, ComicName=None, Year=None, Issue=None, snatched_nzb=None, sent_to=None): apikey = self.apikey priority = self.priority if snatched_nzb: event = snatched_nzb + " snatched!" description = "Mylar has snatched: " + snatched_nzb + " and has sent it to " + sent_to else: event = ComicName + " (" + Year + ") - Issue #" + Issue + " complete!" description = "Mylar has downloaded and postprocessed: " + ComicName + " (" + Year + ") #" + Issue data = { "apikey": apikey, "application": "Mylar", "event": event, "description": description, "priority": priority, } logger.info("Sending notification request to NotifyMyAndroid") request = self._send(data) if not request: logger.warn("Error sending notification request to NotifyMyAndroid")
def test_login(self): ''' This is the method to call if you JUST want to login using self.un & self.pw Note that this will generate a new session on 32pag.es every time you login successfully! This is why the "keeplogged" option is only for when you persist cookies to disk. Note that after a successful login, it will test the session key, which has the side effect of getting the authkey,passkey & uid Returns: True (login success) False (login failure) Side Effects: On success: Sets the authkey, uid, passkey and saves the cookies to disk (on failure): clears the cookies and saves that to disk. ''' if (self.valid_login_attempt(self.un, self.pw)): if self.cookie_exists('session'): self.ses.cookies.save(ignore_discard=True) if (not self.test_skey_valid()): logger.error("Bad error: The attempt to get your attributes after successful login failed!") self.error = {'status': 'Bad error', 'message': 'Attempt to get attributes after successful login failed.'} return False return True logger.warn("Missing session cookie after successful login: %s", self.ses.cookies) self.ses.cookies.clear() self.ses.cookies.save() return False
def notify(self, event, message=None, snatched_nzb=None, prov=None, sent_to=None, module=None, imageFile=None): if self.apikey is None: return False if module is None: module = '' module += '[NOTIFIER]' if snatched_nzb: if snatched_nzb[-1] == '\.': snatched_nzb = snatched_nzb[:-1] message = "Mylar has snatched: " + snatched_nzb + " from " + prov + " and " + sent_to data = {'token': mylar.CONFIG.PUSHOVER_APIKEY, 'user': mylar.CONFIG.PUSHOVER_USERKEY, 'message': message.encode("utf-8"), 'title': event, 'priority': mylar.CONFIG.PUSHOVER_PRIORITY} files = None if imageFile: # Add image. files = {'attachment': ('image.jpeg', base64.b64decode(imageFile), 'image/jpeg')} if all([self.device is not None, self.device != 'None']): data.update({'device': self.device}) r = self._session.post(self.PUSHOVER_URL, data=data, files=files, verify=True) if r.status_code == 200: try: response = r.json() if 'devices' in response and self.test is True: logger.fdebug('%s Available devices: %s' % (module, response)) if any([self.device is None, self.device == 'None']): self.device = 'all available devices' r = self._session.post('https://api.pushover.net/1/messages.json', data=data, verify=True) if r.status_code == 200: logger.info('%s PushOver notifications sent to %s.' % (module, self.device)) elif r.status_code >=400 and r.status_code < 500: logger.error('%s PushOver request failed to %s: %s' % (module, self.device, r.content)) return False else: logger.error('%s PushOver notification failed serverside.' % module) return False else: logger.info('%s PushOver notifications sent.' % module) except Exception as e: logger.warn('%s[ERROR] - %s' % (module, e)) return False else: return True elif r.status_code >= 400 and r.status_code < 500: logger.error('%s PushOver request failed: %s' % (module, r.content)) return False else: logger.error('%s PushOver notification failed serverside.' % module) return False
def newpull(): pagelinks = "http://www.previewsworld.com/Home/1/1/71/952" try: r = requests.get(pagelinks, verify=False) except Exception, e: logger.warn("Error fetching data: %s" % e)
def newpull(): pagelinks = "http://www.previewsworld.com/Home/1/1/71/952" try: r = requests.get(pagelinks, verify=False) except Exception, e: logger.warn('Error fetching data: %s' % e)
def wwt_data(self, data): resultw = data.find("table", {"class": "w3-table w3-striped w3-bordered w3-card-4"}) resultp = resultw.findAll("tr") #final = [] results = [] for res in resultp: if res.findNext(text=True) == 'Torrents Name': continue title = res.find('a') torrent = title['title'] try: for link in res.find_all('a', href=True): if link['href'].startswith('download.php'): linkurl = urlparse.parse_qs(urlparse.urlparse(link['href']).query)['id'] #results = {'torrent': torrent, # 'link': link['href']} break for td in res.findAll('td'): try: seed = td.find("font", {"color": "green"}) leech = td.find("font", {"color": "#ff0000"}) value = td.findNext(text=True) if any(['MB' in value, 'GB' in value]): if 'MB' in value: szform = 'MB' sz = 'M' else: szform = 'GB' sz = 'G' size = helpers.human2bytes(str(re.sub(szform, '', value)).strip() + sz) elif seed is not None: seeders = value #results['seeders'] = seeders elif leech is not None: leechers = value #results['leechers'] = leechers else: age = value #results['age'] = age except Exception as e: logger.warn('exception: %s' % e) logger.info('age: %s' % age) results.append({'title': torrent, 'link': ''.join(linkurl), 'pubdate': self.string_to_delta(age), 'size': size, 'site': 'WWT'}) logger.info('results: %s' % results) except Exception as e: logger.warn('Error: %s' % e) continue #else: # final.append(results) return results
def valid_skey_attempt(self, skey): ''' Not generally the proper method to call - call test_key_valid() instead - which calls this method. Attempts to fetch data via an ajax method that will fail if not authorized. The parameter skey should be set to the string value of the cookie named session. Returns: True on success, False on failure. Side Effects: Sets self.uid, self,authkey and self.passkey ''' u = '''https://32pag.es/ajax.php''' params = {'action': 'index'} testcookie = dict(session=skey) try: r = self.ses.get(u, params=params, timeout=60, allow_redirects=False, cookies=testcookie) except Exception as e: logger.error("Got an exception trying to GET from to:" + u) self.error = {'status':'error', 'message':'exception trying to retrieve site'} return False if r.status_code != 200: if r.status_code == 302: newloc = r.headers.get('location', '') logger.warn("Got redirect from the POST-ajax action=login GET:" + newloc) self.error = {'status':'redirect-error', 'message':'got redirect from POST-ajax login action : ' + newloc} else: logger.error("Got bad status code in the POST-ajax action=login GET:" + str(r.status_code)) self.error = {'status':'bad status code', 'message':'bad status code received in the POST-ajax login action :' + str(r.status_code)} return False try: j = r.json() except: logger.warn("Error - response from session-based skey check was not JSON: %s",r.text) return False #logger.info(j) self.uid = j['response']['id'] self.authkey = j['response']['authkey'] self.passkey = pk = j['response']['passkey'] try: d = self.ses.get('https://32pag.es/ajax.php', params={'action': 'user_inkdrops'}, verify=True, allow_redirects=True) except Exception as e: logger.error('Unable to retreive Inkdrop total : %s' % e) else: try: self.inkdrops = d.json() except: logger.error('Inkdrop result did not return valid JSON, unable to verify response') else: logger.info('inkdrops: %s' % self.inkdrops) return True
def environment(self): f = open(self.filename, "w+") f.write("Mylar host information:\n") match = re.search('Windows', platform.system(), re.IGNORECASE) if match: objline = ['systeminfo'] else: objline = ['uname', '-a'] hi = subprocess.run(objline, capture_output=True, text=True) for hiline in hi.stdout.split('\n'): if platform.system() == 'Windows': if all(['Host Name' not in hiline, 'OS Name' not in hiline, 'OS Version' not in hiline, 'OS Configuration' not in hiline, 'OS Build Type' not in hiline, 'Locale' not in hiline, 'Time Zone' not in hiline]): continue f.write("%s\n" % hiline) f.write("\n\nMylar python information:\n") pyloc = sys.executable pi = subprocess.run([pyloc, '-V'], capture_output=True, text=True) f.write("%s" % pi.stdout) f.write("%s\n" % pyloc) try: pf = subprocess.run(['pip3', 'freeze'], capture_output=True, text=True) f.write("\nPIP (freeze) list:\n") for pfout in pf.stdout.split('\n'): f.write("%s\n" % pfout) except Exception as e: logger.warn('Unable to retrieve current pip listing. Usually this is due to pip being referenced as something other than pip3') f.write("\n\nMylar running environment:\n") for param in list(os.environ.keys()): if all(['SSH' not in param, 'LS_COLORS' not in param]): f.write("%20s = %s\n" % (param,os.environ[param])) f.write("\n\nMylar git status:\n") try: cmd = [['git', '--version'],['git', 'status']] for c in cmd: gs = subprocess.run(c, capture_output=True, text=True) for line in gs.stdout.split('\n'): f.write("%s\n" % line) except Exception as e: f.write("\n\nUnable to retrieve Git information") f.close()
def archivefiles(comicid, comlocation, imported): myDB = db.DBConnection() # if move files isn't enabled, let's set all found comics to Archive status :) try: imported = ast.literal_eval(imported) except Exception as e: logger.warn('[%s] Error encountered converting import data' % e) ComicName = imported['ComicName'] impres = imported['filelisting'] if impres is not None: scandir = [] for impr in impres: srcimp = impr['comiclocation'] orig_filename = impr['comicfilename'] if not any([ os.path.abspath(os.path.join(srcimp, os.pardir)) == x for x in scandir ]): scandir.append(os.path.abspath(os.path.join(srcimp, os.pardir))) for sdir in scandir: logger.info( 'Updating issue information and setting status to Archived for location: ' + sdir) updater.forceRescan( comicid, archive=sdir) #send to rescanner with archive mode turned on logger.info('Now scanning in files.') updater.forceRescan(comicid) for result in impres: try: res = result['import_id'] except: #if it's an 'older' import that wasn't imported, just make it a basic match so things can move and update properly. controlValue = { "ComicFilename": result['comicfilename'], "SRID": imported['srid'] } newValue = {"Status": "Imported", "ComicID": comicid} else: controlValue = { "impID": result['import_id'], "ComicFilename": result['comicfilename'] } newValue = { "Status": "Imported", "SRID": imported['srid'], "ComicID": comicid } myDB.upsert("importresults", newValue, controlValue) return
def searchit(cm): entries = [] mres = {} if mylar.NZBX: provider = "nzbx" #stringsearch = str(cm) + "%20" + str(issue) + "%20" + str(year) searchURL = 'https://nzbx.co/api/search?cat=7030&q=' + str(cm) logger.fdebug(u'Parsing results from <a href="%s">nzbx.co</a>' % searchURL) request = urllib2.Request(searchURL) request.add_header('User-Agent', str(mylar.USER_AGENT)) opener = urllib2.build_opener() try: data = opener.open(request).read() except Exception, e: logger.warn('Error fetching data from nzbx.co : %s' % str(e)) data = False return "no results" if data: d = json.loads(data) if not len(d): logger.info(u"No results found from nzbx.co") return "no results" else: for item in d: try: url = item['nzb'] title = item['name'] size = item['size'] nzbdate = datetime.datetime.fromtimestamp( item['postdate']) nzbage = abs((datetime.datetime.now() - nzbdate).days) if nzbage <= int(mylar.USENET_RETENTION): entries.append({ 'title': str(title), 'link': str(url) }) #logger.fdebug('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size))) else: logger.fdebug( '%s outside usenet retention: %s days.' % (title, nzbage)) #resultlist.append((title, size, url, provider)) #logger.fdebug('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size))) except Exception, e: logger.error( u"An unknown error occurred trying to parse the feed: %s" % e)
def valid_skey_attempt(self, skey): ''' Not generally the proper method to call - call test_key_valid() instead - which calls this method. Attempts to fetch data via an ajax method that will fail if not authorized. The parameter skey should be set to the string value of the cookie named session. Returns: True on success, False on failure. Side Effects: Sets self.uid, self,authkey and self.passkey ''' u = '''https://32pag.es/ajax.php''' params = {'action': 'index'} testcookie = dict(session=skey) try: r = self.ses.get(u, params=params, timeout=60, allow_redirects=False, cookies=testcookie) except Exception as e: logger.error('Got an exception [%s] trying to GET to: %s' % (e,u)) self.error = {'status':'error', 'message':'exception trying to retrieve site'} return False if r.status_code != 200: if r.status_code == 302: newloc = r.headers.get('Location', '') logger.warn('Got redirect from the POST-ajax action=login GET: %s' % newloc) self.error = {'status':'redirect-error', 'message':'got redirect from POST-ajax login action : ' + newloc} else: logger.error('Got bad status code in the POST-ajax action=login GET: %s' % r.status_code) self.error = {'status':'bad status code', 'message':'bad status code received in the POST-ajax login action :' + str(r.status_code)} return False try: j = r.json() except: logger.warn('Error - response from session-based skey check was not JSON: %s' % r.text) return False self.uid = j['response']['id'] self.authkey = j['response']['authkey'] self.passkey = pk = j['response']['passkey'] try: d = self.ses.get('https://32pag.es/ajax.php', params={'action': 'user_inkdrops'}, verify=True, allow_redirects=True) except Exception as e: logger.error('Unable to retreive Inkdrop total : %s' % e) else: try: self.inkdrops = d.json() except: logger.error('Inkdrop result did not return valid JSON, unable to verify response') else: logger.fdebug('inkdrops: %s' % self.inkdrops) return True
def __init__(self, test_phonefrom=None, test_phoneto=None): pysignal_spec = find_spec('signald') if pysignal_spec is None: logger.warn('Could not find Signald module') else: from signald import Signal self.phone_from = mylar.CONFIG.SIGNAL_PHONE_NUMBER_FROM if test_phonefrom is None else test_phonefrom self.phone_to = mylar.CONFIG.SIGNAL_PHONE_NUMBER_TO if test_phoneto is None else test_phoneto self.signal = Signal(self.phone_from)
def torsend2client(seriesname, issue, seriesyear, linkit, site): logger.info('matched on ' + str(seriesname)) filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '', seriesname) filename = re.sub(' ', '_', filename) filename += "_" + str(issue) + "_" + str(seriesyear) if site == 'CBT': logger.info(linkit) linkit = str(linkit) + '&passkey=' + str(mylar.CBT_PASSKEY) if linkit[-7:] != "torrent": # and site != "KAT": filename += ".torrent" if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None: filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None: filepath = os.path.join(mylar.CACHE_DIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) else: logger.error( 'No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.' ) return "fail" try: request = urllib2.Request(linkit) #request.add_header('User-Agent', str(mylar.USER_AGENT)) request.add_header('Accept-encoding', 'gzip') if site == 'KAT': stfind = linkit.find('?') kat_referrer = linkit[:stfind] request.add_header('Referer', kat_referrer) logger.fdebug('KAT Referer set to :' + kat_referrer) # response = helpers.urlretrieve(urllib2.urlopen(request), filepath) response = urllib2.urlopen(request) logger.fdebug('retrieved response.') if site == 'KAT': if response.info( )['content-encoding'] == 'gzip': #.get('Content-Encoding') == 'gzip': logger.fdebug('gzip detected') buf = StringIO(response.read()) logger.fdebug('gzip buffered') f = gzip.GzipFile(fileobj=buf) logger.fdebug('gzip filed.') torrent = f.read() logger.fdebug('gzip read.') else: torrent = response.read() except Exception, e: logger.warn('Error fetching data from %s: %s' % (site, e)) return "fail"
def sendnotify(message, filename, module): prline = filename prline2 = 'Mylar metatagging error: ' + message + ' File: ' + prline try: if mylar.CONFIG.PROWL_ENABLED: pushmessage = prline prowl = notifiers.PROWL() prowl.notify(pushmessage, "Mylar metatagging error: ", module=module) if mylar.CONFIG.PUSHOVER_ENABLED: pushover = notifiers.PUSHOVER() pushover.notify(prline, prline2, module=module) if mylar.CONFIG.BOXCAR_ENABLED: boxcar = notifiers.BOXCAR() boxcar.notify(prline=prline, prline2=prline2, module=module) if mylar.CONFIG.PUSHBULLET_ENABLED: pushbullet = notifiers.PUSHBULLET() pushbullet.notify(prline=prline, prline2=prline2, module=module) if mylar.CONFIG.TELEGRAM_ENABLED: telegram = notifiers.TELEGRAM() telegram.notify(prline2) if mylar.CONFIG.SLACK_ENABLED: slack = notifiers.SLACK() slack.notify("Mylar metatagging error: ", prline2, module=module) if mylar.CONFIG.MATTERMOST_ENABLED: mattermost = notifiers.MATTERMOST() mattermost.notify("Mylar metatagging error: ", prline2, module=module) if mylar.CONFIG.DISCORD_ENABLED: discord = notifiers.DISCORD() discord.notify(filename, message, module=module) if mylar.CONFIG.EMAIL_ENABLED and mylar.CONFIG.EMAIL_ONPOST: logger.info("Sending email notification") email = notifiers.EMAIL() email.notify(prline2, "Mylar metatagging error: ", module=module) if mylar.CONFIG.GOTIFY_ENABLED: gotify = notifiers.GOTIFY() gotify.notify("Mylar metatagging error: ", prline2, module=module) except Exception as e: logger.warn('[NOTIFICATION] Unable to send notification: %s' % e) return
def encrypt_it(self): try: salt = os.urandom(8) saltedhash = [salt[i] for i in range (0, len(salt))] salted_pass = base64.b64encode('%s%s' % (self.password,salt)) except Exception as e: logger.warn('Error when encrypting: %s' % e) return {'status': False} else: return {'status': True, 'password': '******' + salted_pass}
def encrypt_it(self): try: salt = os.urandom(8) saltedhash = [salt[i] for i in range(0, len(salt))] salted_pass = base64.b64encode('%s%s' % (self.password, salt)) except Exception as e: logger.warn('Error when encrypting: %s' % e) return {'status': False} else: return {'status': True, 'password': '******' + salted_pass}
def _parse_feed(site, url, verify): logger.fdebug('[RSS] Fetching items from ' + site) payload = None headers = {'User-Agent': str(mylar.USER_AGENT)} try: r = requests.get(url, params=payload, verify=verify, headers=headers) except Exception, e: logger.warn('Error fetching RSS Feed Data from %s: %s' % (site, e)) return
def importIT(self): #set startup... if len(self.comiclist) > 0: self.sql_attach() query = "DELETE FROM maintenance" self.db.execute(query) query = "INSERT INTO maintenance (id, mode, total, status) VALUES (%s,'%s',%s,'%s')" % ('1', self.mode, len(self.comiclist), "running") self.db.execute(query) self.sql_close() logger.info('[MAINTENANCE-MODE][%s] Found %s series in previous db. Preparing to migrate into existing db.' % (self.mode.upper(), len(self.comiclist))) count = 1 for x in self.comiclist: logger.info('[MAINTENANCE-MODE][%s] [%s/%s] now attempting to add %s to watchlist...' % (self.mode.upper(), count, len(self.comiclist), x['ComicID'])) try: self.sql_attach() self.db.execute("UPDATE maintenance SET progress=?, total=?, current=? WHERE id='1'", (count, len(self.comiclist), re.sub('4050-', '', x['ComicID'].strip()))) self.sql_close() except Exception as e: logger.warn('[ERROR] %s' % e) maintenance_info = importer.addComictoDB(re.sub('4050-', '', x['ComicID']).strip(), calledfrom='maintenance') try: logger.info('MAINTENANCE: %s' % maintenance_info) if maintenance_info['status'] == 'complete': logger.fdebug('[MAINTENANCE-MODE][%s] Successfully added %s [%s] to watchlist.' % (self.mode.upper(), maintenance_info['comicname'], maintenance_info['year'])) else: logger.fdebug('[MAINTENANCE-MODE][%s] Unable to add %s [%s] to watchlist.' % (self.mode.upper(), maintenance_info['comicname'], maintenance_info['year'])) raise IOError self.maintenance_success.append(x) try: self.sql_attach() self.db.execute("UPDATE maintenance SET progress=?, last_comicid=?, last_series=?, last_seriesyear=? WHERE id='1'", (count, re.sub('4050-', '', x['ComicID'].strip()), maintenance_info['comicname'], maintenance_info['year'])) self.sql_close() except Exception as e: logger.warn('[ERROR] %s' % e) except IOError as e: logger.warn('[MAINTENANCE-MODE][%s] Unable to add series to watchlist: %s' % (self.mode.upper(), e)) self.maintenance_fail.append(x) count+=1 else: logger.warn('[MAINTENANCE-MODE][%s] Unable to locate any series in db. This is probably a FATAL error and an unrecoverable db.' % self.mode.upper()) return logger.info('[MAINTENANCE-MODE][%s] Successfully imported %s series into existing db.' % (self.mode.upper(), len(self.maintenance_success))) if len(self.maintenance_fail) > 0: logger.info('[MAINTENANCE-MODE][%s] Failed to import %s series into existing db: %s' % (self.mode.upper(), len(self.maintenance_success), self.maintenance_fail)) try: self.sql_attach() self.db.execute("UPDATE maintenance SET status=? WHERE id='1'", ["completed"]) self.sql_close() except Exception as e: logger.warn('[ERROR] %s' % e)
def torsend2client(seriesname, issue, seriesyear, linkit, site): logger.info('matched on ' + seriesname) filename = helpers.filesafe(seriesname) #filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',seriesname) filename = re.sub(' ', '_', filename) filename += "_" + str(issue) + "_" + str(seriesyear) if site == 'CBT': logger.info(linkit) linkit = str(linkit) + '&passkey=' + str(mylar.CBT_PASSKEY) if linkit[-7:] != "torrent": # and site != "KAT": filename += ".torrent" if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None: filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None: filepath = os.path.join(mylar.CACHE_DIR, filename) logger.fdebug('filename for torrent set to : ' + filepath) else: logger.error('No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.') return "fail" try: request = urllib2.Request(linkit) #request.add_header('User-Agent', str(mylar.USER_AGENT)) request.add_header('Accept-encoding', 'gzip') if site == 'KAT': stfind = linkit.find('?') kat_referrer = linkit[:stfind] request.add_header('Referer', kat_referrer) logger.fdebug('KAT Referer set to :' + kat_referrer) # response = helpers.urlretrieve(urllib2.urlopen(request), filepath) response = urllib2.urlopen(request) logger.fdebug('retrieved response.') if site == 'KAT': if response.info()['content-encoding'] == 'gzip':#.get('Content-Encoding') == 'gzip': logger.fdebug('gzip detected') buf = StringIO(response.read()) logger.fdebug('gzip buffered') f = gzip.GzipFile(fileobj=buf) logger.fdebug('gzip filed.') torrent = f.read() logger.fdebug('gzip read.') else: torrent = response.read() except Exception, e: logger.warn('Error fetching data from %s: %s' % (site, e)) return "fail"
def _send(self, data, module): url_data = urllib.urlencode(data) url = 'https://www.notifymyandroid.com/publicapi/notify' req = urllib2.Request(url, url_data) try: handle = urllib2.urlopen(req) except Exception, e: logger.warn(module + ' Error opening NotifyMyAndroid url: ' % e) return
def decrypt_it(self): try: if not self.password.startswith('^~$z$'): logger.warn('Error not an encryption that I recognize.') return {'status': False} passd = base64.b64decode(self.password[5:]) #(base64.decodestring(self.password)) saltedhash = [bytes(passd[-8:])] except Exception as e: logger.warn('Error when decrypting password: %s' % e) return {'status': False} else: return {'status': True, 'password': passd[:-8]}
def notify(self, event, message=None, snatched_nzb=None, prov=None, sent_to=None, module=None): if module is None: module = '' module += '[NOTIFIER]' if snatched_nzb: if snatched_nzb[-1] == '\.': snatched_nzb = snatched_nzb[:-1] message = "Mylar has snatched: " + snatched_nzb + " from " + prov + " and " + sent_to data = {'token': mylar.CONFIG.PUSHOVER_APIKEY, 'user': mylar.CONFIG.PUSHOVER_USERKEY, 'message': message.encode("utf-8"), 'title': event, 'priority': mylar.CONFIG.PUSHOVER_PRIORITY} if all([self.device is not None, self.device != 'None']): data.update({'device': self.device}) r = self._session.post(self.PUSHOVER_URL, data=data, verify=True) if r.status_code == 200: try: response = r.json() if 'devices' in response and self.test is True: logger.fdebug('%s Available devices: %s' % (module, response)) if any([self.device is None, self.device == 'None']): self.device = 'all available devices' r = self._session.post('https://api.pushover.net/1/messages.json', data=data, verify=True) if r.status_code == 200: logger.info('%s PushOver notifications sent to %s.' % (module, self.device)) elif r.status_code >=400 and r.status_code < 500: logger.error('%s PushOver request failed to %s: %s' % (module, self.device, r.content)) return False else: logger.error('%s PushOver notification failed serverside.' % module) return False else: logger.info('%s PushOver notifications sent.' % module) except Exception as e: logger.warn('%s[ERROR] - %s' % (module, e)) return False else: return True elif r.status_code >= 400 and r.status_code < 500: logger.error('%s PushOver request failed: %s' % (module, r.content)) return False else: logger.error('%s PushOver notification failed serverside.' % module) return False
def load_torrent(self, filepath): logger.info('filepath to torrent file set to : ' + filepath) torrent_id = False if self.client.connected is True: logger.info('Checking if Torrent Exists!') torrentcontent = open(filepath, 'rb').read() hash = str.lower(self.get_the_hash(filepath)) # Deluge expects a lower case hash logger.debug('Torrent Hash (load_torrent): "' + hash + '"') logger.debug('FileName (load_torrent): ' + str(os.path.basename(filepath))) #Check if torrent already added if self.find_torrent(str.lower(hash)): logger.info('load_torrent: Torrent already exists!') else: logger.info('Torrent not added yet, trying to add it now!') try: torrent_id = self.client.call('core.add_torrent_file', str(os.path.basename(filepath)), base64.encodestring(torrentcontent), '') except Exception as e: logger.debug('Torrent not added') return False else: logger.debug('TorrentID: ' + torrent_id) # If label enabled put label on torrent in Deluge if torrent_id and mylar.DELUGE_LABEL: logger.info ('Setting label to ' + mylar.DELUGE_LABEL) try: self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL) except: #if label isn't set, let's try and create one. try: self.client.call('label.add', mylar.DELUGE_LABEL) self.client.call('label.set_torrent', torrent_id, mylar.DELUGE_LABEL) except: logger.warn('Unable to set label - Either try to create it manually within Deluge, and/or ensure there are no spaces, capitalization or special characters in label') return False logger.info('Succesfully set label to ' + mylar.DELUGE_LABEL) try: self.find_torrent(torrent_id) logger.info('Double checking torrent was added.') except Exception as e: logger.warn('Torrent was not added! Please check logs') return False else: logger.info('Torrent successfully added!') return True
def shutdown(restart=False, update=False): cherrypy.engine.exit() SCHED.shutdown(wait=False) config_write() if not restart and not update: logger.info('Mylar is shutting down...') if update: logger.info('Mylar is updating...') try: versioncheck.update() except Exception, e: logger.warn('Mylar failed to update: %s. Restarting.' % e)
def valid_login_attempt(self, un, pw): ''' Does the actual POST to the login.php method (using the ajax parameter, which is far more reliable than HTML parsing. Input: un: The username (usually would be self.un, but that's not a requirement pw: The password (usually self.pw but not a requirement) Note: The underlying self.ses object will handle setting the session cookie from a valid login, but you'll need to call the save method if your cookies are being persisted. Returns: True (success) False (failure) ''' postdata = {'username': un, 'password': pw, 'keeplogged': 1} u = 'https://32pag.es/login.php?ajax=1' try: r = self.ses.post(u, data=postdata, timeout=60, allow_redirects=True) logger.debug(self.module + ' Status Code: ' + str(r.status_code)) except Exception as e: logger.error(self.module + " Got an exception when trying to login to %s POST", u) self.error = {'status':'exception', 'message':'Exception when trying to login'} return False if r.status_code != 200: logger.warn(self.module + " Got bad status code from login POST: %d\n%s\n%s", r.status_code, r.text, r.headers) logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s", r.url ,r.text, r.history) self.error = {'status':'Bad Status code', 'message':(r.status_code, r.text, r.headers)} return False try: logger.debug(self.module + ' Trying to analyze login JSON reply from 32P: %s', r.text) d = r.json() except: logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s", r.url ,r.text, r.history) logger.error(self.module + " The data returned by the login page was not JSON: %s", r.text) self.error = {'status':'JSON not returned', 'message':r.text} return False if d['status'] == 'success': return True logger.error(self.module + " Got unexpected status result: %s", d) logger.debug(self.module + " Request URL: %s \n Content: %s \n History: %s \n Json: %s", r.url ,r.text, r.history, d) self.error = d return False