Example #1
0
    def connect(self, host, username, password, test=False):
        if self.conn is not None:
            return self.connect

        if not host:
            return {'status': False, 'error': 'No host specified'}

        if not username:
            return {'status': False, 'error': 'No username specified'}

        if not password:
            return {'status': False, 'error': 'No password specified'}

        # Get port from the config
        host,portnr = host.split(':')

        # logger.info('Connecting to ' + host + ':' + portnr + ' Username: '******' Password: '******'Could not create DelugeRPCClient Object %s' % e)
            return {'status': False, 'error': e}
        else:
            try:
                self.client.connect()
            except Exception as e:
                logger.error('Could not connect to Deluge: %s' % host)
                return {'status': False, 'error': e}
            else:
                if test is True:
                    daemon_version = self.client.call('daemon.info')
                    libtorrent_version = self.client.call('core.get_libtorrent_version')
                    return {'status': True, 'daemon_version': daemon_version, 'libtorrent_version': libtorrent_version}
                else:
                    return self.client
Example #2
0
    def fetch(self, query, args=None):

        with db_lock:

            if query == None:
                return

            sqlResult = None
            attempt = 0

            while attempt < 5:
                try:
                    if args == None:
                        #logger.fdebug("[FETCH] : " + query)
                        cursor = self.connection.cursor()
                        sqlResult = cursor.execute(query)
                    else:
                        #logger.fdebug("[FETCH] : " + query + " with args " + str(args))
                        cursor = self.connection.cursor()
                        sqlResult = cursor.execute(query, args)
                    # get out of the connection attempt loop since we were successful
                    break
                except sqlite3.OperationalError, e:
                    if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
                        logger.warn('Database Error: %s' % e)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.warn('DB error: %s' % e)
                        raise
                except sqlite3.DatabaseError, e:
                    logger.error('Fatal error executing query: %s' % e)
                    raise
Example #3
0
        def test_login(self):
            '''
               This is the method to call if you JUST want to login using self.un & self.pw

                Note that this will generate a new session on 32pag.es every time you login successfully!
                This is why the "keeplogged" option is only for when you persist cookies to disk.

                Note that after a successful login, it will test the session key, which has the side effect of
                getting the authkey,passkey & uid

                Returns: True (login success) False (login failure)
                Side Effects: On success: Sets the authkey, uid, passkey and saves the cookies to disk
                         (on failure): clears the cookies and saves that to disk.
            '''
            if (self.valid_login_attempt(self.un, self.pw)):
                if self.cookie_exists('session'):
                    self.ses.cookies.save(ignore_discard=True)
                    if (not self.test_skey_valid()):
                        logger.error('Bad error: The attempt to get your attributes after successful login failed!')
                        self.error = {'status': 'Bad error', 'message': 'Attempt to get attributes after successful login failed.'}
                        return False
                    return True

                logger.warn('Missing session cookie after successful login: %s' % self.ses.cookies)
            self.ses.cookies.clear()
            self.ses.cookies.save()
            return False
Example #4
0
def pullsearch(comicapi, comicquery, offset, explicit, type):
    u_comicquery = urllib.quote(comicquery.encode('utf-8').strip())
    u_comicquery = u_comicquery.replace(" ", "%20")

    if explicit == 'all' or explicit == 'loose':
        PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=' + str(type) + '&query=' + u_comicquery + '&field_list=id,name,start_year,first_issue,site_detail_url,count_of_issues,image,publisher,deck,description&format=xml&page=' + str(offset)

    else:
        # 02/22/2014 use the volume filter label to get the right results.
        # add the 's' to the end of type to pluralize the caption (it's needed)
        if type == 'story_arc':
            u_comicquery = re.sub("%20AND%20", "%20", u_comicquery)
        PULLURL = mylar.CVURL + str(type) + 's?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,deck,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page
    #all these imports are standard on most modern python implementations
    #CV API Check here.
    #logger.info('PULLURL:' + PULLURL)
    if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX:
        chkit = cvapi_check()
        if chkit == False:
            return 'apireached'
    #download the file:
    try:
        file = urllib2.urlopen(PULLURL)
    except urllib2.HTTPError, err:
        logger.error('err : ' + str(err))
        logger.error("There was a major problem retrieving data from ComicVine - on their end. You'll have to try again later most likely.")
        return
Example #5
0
    def connect(self, host, username, password):
        if self.conn is not None:
            return self.connect
	
        if not host:
            return False

        # Get port from the config
        host,portnr = host.split(':')


        #if username and password:
        # logger.info('Connecting to ' + host + ':' + portnr + ' Username: '******' Password: '******'Could not create DelugeRPCClient Object' + e)
            return False
        else:
            try:
                self.client.connect()
            except Exception as e:
                logger.error('Could not connect to Deluge ' + host)
            else:
                return self.client
Example #6
0
    def notify(self, message, event, module=None):
        if not mylar.PUSHOVER_ENABLED:
            return
        if module is None:
            module = ''
        module += '[NOTIFIER]'

        data = {
            'token': mylar.PUSHOVER_APIKEY,
            'user': mylar.PUSHOVER_USERKEY,
            'message': message.encode("utf-8"),
            'title': event,
            'priority': mylar.PUSHOVER_PRIORITY
        }

        r = self._session.post(self.PUSHOVER_URL, data=data, verify=True)

        if r.status_code == 200:
            logger.info(module + ' PushOver notifications sent.')
            return True
        elif r.status_code >= 400 and r.status_code < 500:
            logger.error(module + ' PushOver request failed: %s' % r.content)
            return False
        else:
            logger.error(module + ' PushOver notification failed serverside.')
            return False
Example #7
0
File: db.py Project: wraslor/mylar
    def action(self, query, args=None):

        with db_lock:

            if query == None:
                return

            sqlResult = None
            attempt = 0

            while attempt < 5:
                try:
                    if args == None:
                        #logger.debug(self.filename+": "+query)
                        sqlResult = self.connection.execute(query)
                    else:
                        #logger.debug(self.filename+": "+query+" with args "+str(args))
                        sqlResult = self.connection.execute(query, args)
                    self.connection.commit()
                    break
                except sqlite3.OperationalError, e:
                    if "unable to open database file" in e.message or "database is locked" in e.message:
                        logger.warn('Database Error: %s' % e)
                        logger.warn('sqlresult: %s' % query)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.error('Database error executing %s :: %s' %
                                     (query, e))
                        raise
                except sqlite3.DatabaseError, e:
                    logger.error('Fatal Error executing %s :: %s' % (query, e))
                    raise
Example #8
0
    def _changeStatus(self, **kwargs):
        #change status_from of every issue in series to specified status_to
        #if no comicid specified will mark ALL issues in EVERY series from status_from to specific status_to
        #required fields: status_to, status_from. Optional: id  (which is the ComicID if applicable)
        if all(['status_to' not in kwargs, 'status_from' not in kwargs]):
            self.data = self._failureResponse('Missing Status')
            return
        else:
            self.status_to = kwargs['status_to']
            self.status_from = kwargs['status_from']

        if 'id' not in kwargs:
            self.data = self._failureResponse('Missing Status')
            return
        else:
            self.id = kwargs['id']
            if self.id == 'All':
                bulk = True
            else:
                bulk = False
                self.id = kwargs['id']
                if type(self.id) is list:
                    bulk = True

        logger.info('[BULK:%s] [%s --> %s] ComicIDs to Change Status: %s' % (bulk, self.status_from, self.status_to, self.id))

        try:
            self.data = helpers.statusChange(self.status_from, self.status_to, self.id, bulk=bulk, api=True)
        except Exception as e:
            logger.error('[ERROR] %s' % e)
            self.data = e

        return
Example #9
0
File: mb.py Project: ruinit/mylar
def pullsearch(comicapi, comicquery, offset, explicit, type):
    u_comicquery = urllib.quote(comicquery.encode('utf-8').strip())
    u_comicquery = u_comicquery.replace(" ", "%20")

    if explicit == 'all' or explicit == 'loose':
        PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=' + str(type) + '&query=' + u_comicquery + '&field_list=id,name,start_year,first_issue,site_detail_url,count_of_issues,image,publisher,deck,description&format=xml&page=' + str(offset)

    else:
        # 02/22/2014 use the volume filter label to get the right results.
        # add the 's' to the end of type to pluralize the caption (it's needed)
        if type == 'story_arc':
            u_comicquery = re.sub("%20AND%20", "%20", u_comicquery)
        PULLURL = mylar.CVURL + str(type) + 's?api_key=' + str(comicapi) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,deck,description&format=xml&offset=' + str(offset) # 2012/22/02 - CVAPI flipped back to offset instead of page
    #all these imports are standard on most modern python implementations
    #CV API Check here.
    #logger.info('PULLURL:' + PULLURL)
    if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX:
        chkit = cvapi_check()
        if chkit == False:
            return 'apireached'
    #download the file:
    try:
        file = urllib2.urlopen(PULLURL)
    except urllib2.HTTPError, err:
        logger.error('err : ' + str(err))
        logger.error("There was a major problem retrieving data from ComicVine - on their end. You'll have to try again later most likely.")
        return
Example #10
0
File: mb.py Project: ruinit/mylar
def storyarcinfo(xmlid):

    comicLibrary = listLibrary()

    arcinfo = {}

    if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
        logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
        comicapi = mylar.DEFAULT_CVAPI
    else:
        comicapi = mylar.COMICVINE_API

    #respawn to the exact id for the story arc and count the # of issues present.
    ARCPULL_URL = mylar.CVURL + 'story_arc/4045-' + str(xmlid) + '/?api_key=' + str(comicapi) + '&field_list=issues,name,first_appeared_in_issue,deck,image&format=xml&offset=0'
    logger.fdebug('arcpull_url:' + str(ARCPULL_URL))
    if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX:
        chkit = cvapi_check()
        if chkit == False:
            return 'apireached'
    try:
        file = urllib2.urlopen(ARCPULL_URL)
    except urllib2.HTTPError, err:
        logger.error('err : ' + str(err))
        logger.error('There was a major problem retrieving data from ComicVine - on their end.')
        return
Example #11
0
        def test_login(self):
            '''
               This is the method to call if you JUST want to login using self.un & self.pw

                Note that this will generate a new session on 32pag.es every time you login successfully!
                This is why the "keeplogged" option is only for when you persist cookies to disk.
 
                Note that after a successful login, it will test the session key, which has the side effect of
                getting the authkey,passkey & uid

                Returns: True (login success) False (login failure)
                Side Effects: On success: Sets the authkey, uid, passkey and saves the cookies to disk
                         (on failure): clears the cookies and saves that to disk.
            '''
            if (self.valid_login_attempt(self.un, self.pw)):
                if self.cookie_exists('session'):
                    self.ses.cookies.save(ignore_discard=True)
                    if (not self.test_skey_valid()):
                        logger.error("Bad error: The attempt to get your attributes after successful login failed!")
                        self.error = {'status': 'Bad error', 'message': 'Attempt to get attributes after successful login failed.'}
                        return False
                    return True

                logger.warn("Missing session cookie after successful login: %s", self.ses.cookies)
            self.ses.cookies.clear()
            self.ses.cookies.save()
            return False
Example #12
0
def storyarcinfo(xmlid):

    comicLibrary = listLibrary()

    arcinfo = {}

    if mylar.COMICVINE_API == 'None' or mylar.COMICVINE_API is None or mylar.COMICVINE_API == mylar.DEFAULT_CVAPI:
        logger.warn('You have not specified your own ComicVine API key - alot of things will be limited. Get your own @ http://api.comicvine.com.')
        comicapi = mylar.DEFAULT_CVAPI
    else:
        comicapi = mylar.COMICVINE_API

    #respawn to the exact id for the story arc and count the # of issues present.
    ARCPULL_URL = mylar.CVURL + 'story_arc/4045-' + str(xmlid) + '/?api_key=' + str(comicapi) + '&field_list=issues,name,first_appeared_in_issue,deck,image&format=xml&offset=0'
    logger.fdebug('arcpull_url:' + str(ARCPULL_URL))
    if mylar.CVAPI_COUNT == 0 or mylar.CVAPI_COUNT >= mylar.CVAPI_MAX:
        chkit = cvapi_check()
        if chkit == False:
            return 'apireached'
    try:
        file = urllib2.urlopen(ARCPULL_URL)
    except urllib2.HTTPError, err:
        logger.error('err : ' + str(err))
        logger.error('There was a major problem retrieving data from ComicVine - on their end.')
        return
Example #13
0
    def downloadfile(self, payload, filepath):
        url = 'https://32pag.es/torrents.php'
        try:
            r = self.session.get(url, params=payload, verify=True, stream=True, allow_redirects=True)
        except Exception as e:
            logger.error('%s [%s] Could not POST URL %s' % ('[32P-DOWNLOADER]', e, url))
            return False

        if str(r.status_code) != '200':
            logger.warn('Unable to download torrent from 32P [Status Code returned: %s]' % r.status_code)
            if str(r.status_code) == '404' and site == '32P':
                logger.warn('[32P-CACHED_ENTRY] Entry found in 32P cache - incorrect. Torrent has probably been merged into a pack, or another series id. Removing from cache.')
                helpers.delete_cache_entry(linkit)
            else:
                logger.info('content: %s' % r.content)
            return False


        with open(filepath, 'wb') as f:
            for chunk in r.iter_content(chunk_size=1024):
                if chunk: # filter out keep-alive new chunks
                    f.write(chunk)
                    f.flush()

        return True
Example #14
0
    def downloadfile(self, payload, filepath):
        url = 'https://32pag.es/torrents.php'
        try:
            r = self.session.get(url,
                                 params=payload,
                                 verify=True,
                                 stream=True,
                                 allow_redirects=True)
        except Exception as e:
            logger.error('%s [%s] Could not POST URL %s' %
                         ('[32P-DOWNLOADER]', e, url))
            return False

        if str(r.status_code) != '200':
            logger.warn(
                'Unable to download torrent from 32P [Status Code returned: %s]'
                % r.status_code)
            if str(r.status_code) == '404' and site == '32P':
                logger.warn(
                    '[32P-CACHED_ENTRY] Entry found in 32P cache - incorrect. Torrent has probably been merged into a pack, or another series id. Removing from cache.'
                )
                helpers.delete_cache_entry(linkit)
            else:
                logger.info('content: %s' % r.content)
            return False

        with open(filepath, 'wb') as f:
            for chunk in r.iter_content(chunk_size=1024):
                if chunk:  # filter out keep-alive new chunks
                    f.write(chunk)
                    f.flush()

        return True
Example #15
0
        def __init__(self, un, pw, session_path=None):
            '''
                Params:
                    un: account username (required)
                    pw: account password (required)
                    session_path: the path to the actual file you want to persist your cookies in
                                If blank, saves to $HOME/.32p_cookies.dat

            '''
            self.module = '[32P-AUTHENTICATION]'
            try:
                self.ses = cfscrape.create_scraper(delay=15)
            except Exception as e:
                logger.error('%s Can\'t create session with cfscrape' %
                             self.module)

            self.session_path = session_path if session_path is not None else os.path.join(
                mylar.CONFIG.SECURE_DIR, ".32p_cookies.dat")
            self.ses.cookies = LWPCookieJar(self.session_path)
            if not os.path.exists(self.session_path):
                logger.fdebug(
                    '%s Session cookie does not exist. Signing in and Creating.'
                    % self.module)
                self.ses.cookies.save()
            else:
                logger.fdebug(
                    '%s Session cookie found. Attempting to load...' %
                    self.module)
                self.ses.cookies.load(ignore_discard=True)
            self.un = un
            self.pw = pw
            self.authkey = None
            self.passkey = None
            self.uid = None
            self.inkdrops = None
Example #16
0
    def comic_config(self, com_location, ComicID, alt_search=None, fuzzy_year=None):
        myDB = db.DBConnection()
#--- this is for multipe search terms............
#--- works, just need to redo search.py to accomodate multiple search terms
#        ffs_alt = []
#        if '+' in alt_search:
            #find first +
#            ffs = alt_search.find('+')
#            ffs_alt.append(alt_search[:ffs])
#            ffs_alt_st = str(ffs_alt[0])
#            print("ffs_alt: " + str(ffs_alt[0]))

            # split the entire string by the delimter + 
#            ffs_test = alt_search.split('+')
#            if len(ffs_test) > 0:
#                print("ffs_test names: " + str(len(ffs_test)))
#                ffs_count = len(ffs_test)
#                n=1
#                while (n < ffs_count):
#                    ffs_alt.append(ffs_test[n])
#                    print("adding : " + str(ffs_test[n]))
                    #print("ffs_alt : " + str(ffs_alt))
#                    ffs_alt_st = str(ffs_alt_st) + "..." + str(ffs_test[n])
#                    n+=1
#            asearch = ffs_alt
#        else:
#            asearch = alt_search
        asearch = str(alt_search)

        controlValueDict = {'ComicID': ComicID}
        newValues = {"ComicLocation":        com_location }
                     #"QUALalt_vers":         qual_altvers,
                     #"QUALScanner":          qual_scanner,
                     #"QUALtype":             qual_type,
                     #"QUALquality":          qual_quality
                     #}
        if asearch is not None:
            if asearch == '':
                newValues['AlternateSearch'] = "None"
            else:
                newValues['AlternateSearch'] = str(asearch)

        if fuzzy_year is None:
            newValues['UseFuzzy'] = "0"
        else:
            newValues['UseFuzzy'] = str(fuzzy_year)

        #force the check/creation of directory com_location here
        if os.path.isdir(str(com_location)):
            logger.info(u"Validating Directory (" + str(com_location) + "). Already exists! Continuing...")
        else:
            logger.fdebug("Updated Directory doesn't exist! - attempting to create now.")
            try:
                os.makedirs(str(com_location))
                logger.info(u"Directory successfully created at: " + str(com_location))
            except OSError:
                logger.error(u"Could not create comicdir : " + str(com_location))

        myDB.upsert("comics", newValues, controlValueDict)
        raise cherrypy.HTTPRedirect("artistPage?ComicID=%s" % ComicID)
Example #17
0
File: db.py Project: ChaniD/mylar
    def action(self, query, args=None):
    
        with db_lock:

            if query == None:
                return
                
            sqlResult = None
            attempt = 0
            
            while attempt < 5:
                try:
                    if args == None:
                        #logger.debug(self.filename+": "+query)
                        sqlResult = self.connection.execute(query)
                    else:
                        #logger.debug(self.filename+": "+query+" with args "+str(args))
                        sqlResult = self.connection.execute(query, args)
                    self.connection.commit()
                    break
                except sqlite3.OperationalError, e:
                    if "unable to open database file" in e.message or "database is locked" in e.message:
                        logger.warn('Database Error: %s' % e)
                        logger.warn('sqlresult: %s' %  query)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.error('Database error executing %s :: %s' % (query, e))
                        raise
                except sqlite3.DatabaseError, e:
                    logger.error('Fatal Error executing %s :: %s' % (query, e))
                    raise
Example #18
0
def torsend2client(seriesname, linkit, site):
    logger.info('matched on ' + str(seriesname))
    filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',seriesname)
    if site == 'ComicBT':
        logger.info(linkit)
        linkit = str(linkit) + '&passkey=' + str(mylar.CBT_PASSKEY)

    if linkit[-7:] != "torrent":
        filename += ".torrent"

    request = urllib2.Request(linkit)
    request.add_header('User-Agent', str(mylar.USER_AGENT))
    if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None:
        filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename)
        logger.fdebug('filename for torrent set to : ' + filepath)
    elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None:
        filepath = os.path.join(mylar.CACHE_DIR, filename)
        logger.fdebug('filename for torrent set to : ' + filepath)
    else:
        logger.error('No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.')
        return "fail"

    try:
        opener = helpers.urlretrieve(urllib2.urlopen(request), filepath)
    except Exception, e:
        logger.warn('Error fetching data from %s: %s' % (site, e))
        return "fail"
Example #19
0
    def __init__(self, reauthenticate=False, searchterm=None, test=False):

        self.module = '[32P-AUTHENTICATION]'
        self.url = 'https://32pag.es/user.php?action=notify'
        self.headers = {'Content-type': 'application/x-www-form-urlencoded',
                        'Accept-Charset': 'utf-8',
                        'User-Agent': 'Mozilla/5.0'}

        self.error = None
        self.method = None
        lses = self.LoginSession(mylar.USERNAME_32P, mylar.PASSWORD_32P)

        if not lses.login():
            if not self.test:
                logger.error(self.module + ' [LOGIN FAILED] Disabling 32P provider until login error(s) can be fixed in order to avoid temporary bans.')
                return "disable"
            else:
                if self.error:
                    return self.error #rtnmsg
                else:
                    return self.method
        else:
            logger.info(self.module + '[LOGIN SUCCESS] Now preparing for the use of 32P keyed authentication...')
            self.authkey = lses.authkey
            self.passkey = lses.passkey
            self.uid = lses.uid
         
        self.reauthenticate = reauthenticate
        self.searchterm = searchterm
        self.test = test
        self.publisher_list = {'Entertainment', 'Press', 'Comics', 'Publishing', 'Comix', 'Studios!'}
Example #20
0
def pullsearch(comicapi, comicquery, offset, explicit):
    u_comicquery = urllib.quote(comicquery.encode('utf-8').strip())
    u_comicquery = u_comicquery.replace(" ", "%20")

    if explicit == 'all' or explicit == 'loose':
        PULLURL = mylar.CVURL + 'search?api_key=' + str(
            comicapi
        ) + '&resources=volume&query=' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&page=' + str(
            offset)

    else:
        # 02/22/2014 use the volume filter label to get the right results.
        PULLURL = mylar.CVURL + 'volumes?api_key=' + str(
            comicapi
        ) + '&filter=name:' + u_comicquery + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,description&format=xml&offset=' + str(
            offset
        )  # 2012/22/02 - CVAPI flipped back to offset instead of page

    #all these imports are standard on most modern python implementations
    #download the file:
    try:
        file = urllib2.urlopen(PULLURL)
    except urllib2.HTTPError, err:
        logger.error('err : ' + str(err))
        logger.error(
            "There was a major problem retrieving data from ComicVine - on their end. You'll have to try again later most likely."
        )
        return
Example #21
0
 def __init__(self):
     self.client = TorClient.TorrentClient()
     if not self.client.connect(mylar.RTORRENT_HOST,
                                mylar.RTORRENT_USERNAME,
                                mylar.RTORRENT_PASSWORD):
         logger.error('could not connect to %s, exiting', mylar.RTORRENT_HOST)
         sys.exit(-1)
Example #22
0
        def __init__(self, un, pw, session_path=None):
            '''
                Params:
                    un: account username (required)
                    pw: account password (required)
                    session_path: the path to the actual file you want to persist your cookies in
                                If blank, saves to $HOME/.32p_cookies.dat

            '''
            self.module = '[32P-AUTHENTICATION]'
            try:
                self.ses = cfscrape.create_scraper()
            except Exception as e:
                logger.error(self.module + " Can't create session with cfscrape")

            self.session_path = session_path if session_path is not None else os.path.join(mylar.CACHE_DIR, ".32p_cookies.dat")
            self.ses.cookies = LWPCookieJar(self.session_path)
            if not os.path.exists(self.session_path):
                logger.fdebug(self.module + ' Session cookie does not exist. Signing in and Creating.')
                self.ses.cookies.save()
            else:
                logger.fdebug(self.module + ' Session cookie found. Attempting to load...')
                self.ses.cookies.load(ignore_discard=True)
            self.un = un
            self.pw = pw
            self.authkey = None
            self.passkey = None
            self.uid = None
            self.inkdrops = None
Example #23
0
    def fetch(self, query, args=None):

        with db_lock:

            if query == None:
                return

            sqlResult = None
            attempt = 0

            while attempt < 5:
                try:
                    if args == None:
                        #logger.fdebug("[FETCH] : " + query)
                        cursor = self.connection.cursor()
                        sqlResult = cursor.execute(query)
                    else:
                        #logger.fdebug("[FETCH] : " + query + " with args " + str(args))
                        cursor = self.connection.cursor()
                        sqlResult = cursor.execute(query, args)
                    # get out of the connection attempt loop since we were successful
                    break
                except sqlite3.OperationalError, e:
                    if "unable to open database file" in e.args[0] or "database is locked" in e.args[0]:
                        logger.warn('Database Error: %s' % e)
                        attempt += 1
                        time.sleep(1)
                    else:
                        logger.warn('DB error: %s' % e)
                        raise
                except sqlite3.DatabaseError, e:
                    logger.error('Fatal error executing query: %s' % e)
                    raise
Example #24
0
    def notify(self, event, message=None, snatched_nzb=None, prov=None, sent_to=None, module=None):
        if not mylar.PUSHOVER_ENABLED:
            return
        if module is None:
            module = ''
        module += '[NOTIFIER]'

        if snatched_nzb:
            if snatched_nzb[-1] == '\.': 
                snatched_nzb = snatched_nzb[:-1]
            message = "Mylar has snatched: " + snatched_nzb + " from " + prov + " and has sent it to " + sent_to

        data = {'token': mylar.PUSHOVER_APIKEY,
                'user': mylar.PUSHOVER_USERKEY,
                'message': message.encode("utf-8"),
                'title': event,
                'priority': mylar.PUSHOVER_PRIORITY}

        r = self._session.post(self.PUSHOVER_URL, data=data, verify=True)

        if r.status_code == 200:
            logger.info(module + ' PushOver notifications sent.')
            return True
        elif r.status_code >= 400 and r.status_code < 500:
            logger.error(module + ' PushOver request failed: %s' % r.content)
            return False
        else:
            logger.error(module + ' PushOver notification failed serverside.')
            return False
Example #25
0
    def connect(self, host, username, password, test=False):
        if self.conn is not None:
            return self.connect

        if not host:
            return {'status': False, 'error': 'No host specified'}

        if not username:
            return {'status': False, 'error': 'No username specified'}

        if not password:
            return {'status': False, 'error': 'No password specified'}

        # Get port from the config
        host,portnr = host.split(':')

        # logger.info('Connecting to ' + host + ':' + portnr + ' Username: '******' Password: '******'Could not create DelugeRPCClient Object %s' % e)
            return {'status': False, 'error': e}
        else:
            try:
                self.client.connect()
            except Exception as e:
                logger.error('Could not connect to Deluge: %s' % host)
                return {'status': False, 'error': e}
            else:
                if test is True:
                    daemon_version = self.client.call('daemon.info')
                    libtorrent_version = self.client.call('core.get_libtorrent_version')
                    return {'status': True, 'daemon_version': daemon_version, 'libtorrent_version': libtorrent_version}
                else:
                    return self.client
Example #26
0
def movefiles(comicid, comlocation, imported):
    #comlocation is destination
    #comicid is used for rename
    files_moved = []
    try:
        imported = ast.literal_eval(imported)
    except ValueError:
        pass

    myDB = db.DBConnection()

    logger.fdebug('comlocation is : ' + comlocation)
    logger.fdebug('original comicname is : ' + imported['ComicName'])

    impres = imported['filelisting']

    if impres is not None:
        for impr in impres:
            srcimp = impr['comiclocation']
            orig_filename = impr['comicfilename']
            #before moving check to see if Rename to Mylar structure is enabled.
            if mylar.IMP_RENAME and mylar.FILE_FORMAT != '':
                logger.fdebug("Renaming files according to configuration details : " + str(mylar.FILE_FORMAT))
                renameit = helpers.rename_param(comicid, imported['ComicName'], impr['issuenumber'], orig_filename)
                nfilename = renameit['nfilename']
                dstimp = os.path.join(comlocation, nfilename)
            else:
                logger.fdebug("Renaming files not enabled, keeping original filename(s)")
                dstimp = os.path.join(comlocation, orig_filename)

            logger.info("moving " + srcimp + " ... to " + dstimp)
            try:
                shutil.move(srcimp, dstimp)
                files_moved.append({'srid':     imported['srid'],
                                    'filename': impr['comicfilename']})
            except (OSError, IOError):
                logger.error("Failed to move files - check directories and manually re-run.")

        logger.fdebug("all files moved.")
        #now that it's moved / renamed ... we remove it from importResults or mark as completed.

    if len(files_moved) > 0:
        logger.info('files_moved: ' + str(files_moved))
        for result in files_moved:
            try:
                res = result['import_id']
            except:
                #if it's an 'older' import that wasn't imported, just make it a basic match so things can move and update properly.
                controlValue = {"ComicFilename": result['filename'],
                                "SRID":          result['srid']}
                newValue = {"Status":            "Imported",
                            "ComicID":           comicid}
            else:                 
                controlValue = {"impID":         result['import_id'],
                                "ComicFilename": result['filename']}
                newValue = {"Status":            "Imported",
                            "SRID":              result['srid'],
                            "ComicID":           comicid}
            myDB.upsert("importresults", newValue, controlValue)
    return
Example #27
0
def csv_load():
    # for redudant module calls..include this.
    conn = sqlite3.connect(DB_FILE)
    c = conn.cursor()

    c.execute("DROP TABLE IF EXISTS exceptions")

    c.execute("CREATE TABLE IF NOT EXISTS exceptions (variloop TEXT, ComicID TEXT, NewComicID TEXT, GComicID TEXT)")

    # for Mylar-based Exception Updates....
    i = 0
    EXCEPTIONS = []
    EXCEPTIONS.append("exceptions.csv")
    EXCEPTIONS.append("custom_exceptions.csv")

    while i <= 1:
        # EXCEPTIONS_FILE = os.path.join(DATA_DIR, 'exceptions.csv')
        EXCEPTIONS_FILE = os.path.join(DATA_DIR, EXCEPTIONS[i])

        if not os.path.exists(EXCEPTIONS_FILE):
            try:
                csvfile = open(str(EXCEPTIONS_FILE), "rb")
            except (OSError, IOError):
                if i == 1:
                    logger.info(
                        "No Custom Exceptions found - Using base exceptions only. Creating blank custom_exceptions for your personal use."
                    )
                    try:
                        shutil.copy(os.path.join(DATA_DIR, "custom_exceptions_sample.csv"), EXCEPTIONS_FILE)
                    except (OSError, IOError):
                        logger.error(
                            "Cannot create custom_exceptions.csv in "
                            + str(DATA_DIR)
                            + ". Make sure _sample.csv is present and/or check permissions."
                        )
                        return
                else:
                    logger.error(
                        "Could not locate " + str(EXCEPTIONS[i]) + " file. Make sure it's in datadir: " + DATA_DIR
                    )
                break
        else:
            csvfile = open(str(EXCEPTIONS_FILE), "rb")
        if i == 0:
            logger.info(u"Populating Base Exception listings into Mylar....")
        elif i == 1:
            logger.info(u"Populating Custom Exception listings into Mylar....")

        creader = csv.reader(csvfile, delimiter=",")

        for row in creader:
            try:
                c.execute("INSERT INTO exceptions VALUES (?,?,?,?);", row)
            except Exception, e:
                # print ("Error - invald arguments...-skipping")
                pass
                pass
        csvfile.close()
        i += 1
Example #28
0
    def __init__(self, reauthenticate=False, searchterm=None, test=False):

        self.module = '[32P-AUTHENTICATION]'
        self.url = 'https://32pag.es/user.php?action=notify'
        self.headers = {
            'Content-type': 'application/x-www-form-urlencoded',
            'Accept-Charset': 'utf-8',
            'User-Agent': 'Mozilla/5.0'
        }

        if test:
            self.username_32p = test['username']
            self.password_32p = test['password']
            self.test = True
        else:
            self.username_32p = mylar.CONFIG.USERNAME_32P
            self.password_32p = mylar.CONFIG.PASSWORD_32P
            self.test = False

        self.error = None
        self.method = None

        if any([mylar.CONFIG.MODE_32P is True, self.test is True]):
            lses = self.LoginSession(mylar.CONFIG.USERNAME_32P,
                                     mylar.CONFIG.PASSWORD_32P)
            if not lses.login():
                if not self.test:
                    logger.error(
                        '%s [LOGIN FAILED] Disabling 32P provider until login error(s) can be fixed in order to avoid temporary bans.'
                        % self.module)
                    return "disable"
                else:
                    if self.error:
                        return self.error  #rtnmsg
                    else:
                        return self.method
            else:
                logger.fdebug(
                    '%s [LOGIN SUCCESS] Now preparing for the use of 32P keyed authentication...'
                    % self.module)
                self.authkey = lses.authkey
                self.passkey = lses.passkey
                self.session = lses.ses
                self.uid = lses.uid
                try:
                    mylar.INKDROPS_32P = int(
                        math.floor(
                            float(lses.inkdrops['results'][0]['inkdrops'])))
                except:
                    mylar.INKDROPS_32P = lses.inkdrops['results'][0][
                        'inkdrops']
        else:
            self.session = requests.Session()
        self.reauthenticate = reauthenticate
        self.searchterm = searchterm
        self.publisher_list = {
            'Entertainment', 'Press', 'Comics', 'Publishing', 'Comix',
            'Studios!'
        }
Example #29
0
    def notify(self,
               snline=None,
               prline=None,
               prline2=None,
               snatched=None,
               sent_to=None,
               prov=None,
               module=None):
        if not mylar.PUSHBULLET_ENABLED:
            return
        if module is None:
            module = ''
        module += '[NOTIFIER]'

        if snatched:
            if snatched[-1] == '.': snatched = snatched[:-1]
            event = snline
            message = "Mylar has snatched: " + snatched + " from " + prov + " and has sent it to " + sent_to
        else:
            event = prline + ' complete!'
            message = prline2

        http_handler = HTTPSConnection("api.pushbullet.com")

        data = {
            'device_iden': mylar.PUSHBULLET_DEVICEID,
            'type': "note",
            'title': event,  #"mylar",
            'body': message.encode("utf-8")
        }

        http_handler.request(
            "POST",
            "/api/pushes",
            headers={
                'Content-type':
                "application/x-www-form-urlencoded",
                'Authorization':
                'Basic %s' % base64.b64encode(mylar.PUSHBULLET_APIKEY + ":")
            },
            body=urlencode(data))
        response = http_handler.getresponse()
        request_status = response.status
        #logger.debug(u"PushBullet response status: %r" % request_status)
        #logger.debug(u"PushBullet response headers: %r" % response.getheaders())
        #logger.debug(u"PushBullet response body: %r" % response.read())

        if request_status == 200:
            logger.fdebug(module + ' PushBullet notifications sent.')
            return True
        elif request_status >= 400 and request_status < 500:
            logger.error(module +
                         ' PushBullet request failed: %s' % response.reason)
            return False
        else:
            logger.error(module +
                         ' PushBullet notification failed serverside.')
            return False
Example #30
0
    def notify(self,
               snline=None,
               prline=None,
               prline2=None,
               snatched=None,
               sent_to=None,
               prov=None,
               module=None,
               method=None):
        if not mylar.PUSHBULLET_ENABLED:
            return
        if module is None:
            module = ''
        module += '[NOTIFIER]'

        #        http_handler = HTTPSConnection("api.pushbullet.com")

        #        if method == 'GET':
        #            uri = '/v2/devices'
        #        else:
        #            method = 'POST'
        #            uri = '/v2/pushes'

        #        authString = base64.b64encode(self.apikey + ":")

        if method == 'GET':
            pass
#           http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString})
        else:
            if snatched:
                if snatched[-1] == '.': snatched = snatched[:-1]
                event = snline
                message = "Mylar has snatched: " + snatched + " from " + prov + " and has sent it to " + sent_to
            else:
                event = prline + ' complete!'
                message = prline2

            data = {
                'type': "note",  #'device_iden': self.deviceid,
                'title': event.encode('utf-8'),  #"mylar",
                'body': message.encode('utf-8')
            }

        r = self._session.post(self.PUSH_URL, data=json.dumps(data))

        if r.status_code == 200:
            if method == 'GET':
                return r.json()
            else:
                logger.info(module + ' PushBullet notifications sent.')
                return True
        elif r.status_code >= 400 and r.status_code < 500:
            logger.error(module + ' PushBullet request failed: %s' % r.content)
            return False
        else:
            logger.error(module +
                         ' PushBullet notification failed serverside.')
            return False
Example #31
0
def searchit(cm):
    entries = []
    mres = {}

    if mylar.NZBX:
        provider = "nzbx"
        #stringsearch = str(cm) + "%20" + str(issue) + "%20" + str(year)
        searchURL = 'https://nzbx.co/api/search?cat=7030&q=' + str(cm)

        logger.fdebug(u'Parsing results from <a href="%s">nzbx.co</a>' %
                      searchURL)
        request = urllib2.Request(searchURL)
        request.add_header('User-Agent', str(mylar.USER_AGENT))
        opener = urllib2.build_opener()

        try:
            data = opener.open(request).read()
        except Exception, e:
            logger.warn('Error fetching data from nzbx.co : %s' % str(e))
            data = False
            return "no results"

        if data:

            d = json.loads(data)

            if not len(d):
                logger.info(u"No results found from nzbx.co")
                return "no results"

            else:
                for item in d:
                    try:
                        url = item['nzb']
                        title = item['name']
                        size = item['size']
                        nzbdate = datetime.datetime.fromtimestamp(
                            item['postdate'])
                        nzbage = abs((datetime.datetime.now() - nzbdate).days)
                        if nzbage <= int(mylar.USENET_RETENTION):
                            entries.append({
                                'title': str(title),
                                'link': str(url)
                            })
                            #logger.fdebug('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))
                        else:
                            logger.fdebug(
                                '%s outside usenet retention: %s days.' %
                                (title, nzbage))

                        #resultlist.append((title, size, url, provider))
                        #logger.fdebug('Found %s. Size: %s' % (title, helpers.bytes_to_mb(size)))

                    except Exception, e:
                        logger.error(
                            u"An unknown error occurred trying to parse the feed: %s"
                            % e)
Example #32
0
    def notify(self, snline=None, prline=None, prline2=None, snatched=None, sent_to=None, prov=None, module=None, method=None):
        if not mylar.PUSHBULLET_ENABLED:
            return
        if module is None:
            module = ''
        module += '[NOTIFIER]'
        
        http_handler = HTTPSConnection("api.pushbullet.com")

        if method == 'GET':
            uri = '/v2/devices'
        else:
            method = 'POST'
            uri = '/v2/pushes'

        authString = base64.b64encode(self.apikey + ":")

        if method == 'GET':
            http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString})
        else:
            if snatched:
                if snatched[-1] == '.': snatched = snatched[:-1]
                event = snline
                message = "Mylar has snatched: " + snatched + " from " + prov + " and has sent it to " + sent_to
            else:
                event = prline + ' complete!'
                message = prline2

            data = {'type': "note", #'device_iden': self.deviceid,
                    'title': event.encode('utf-8'), #"mylar",
                    'body': message.encode('utf-8') }

        http_handler.request("POST",
                                "/v2/pushes",
                                headers = {'Content-type': "application/json",
                                           'Authorization': 'Basic %s' % base64.b64encode(mylar.PUSHBULLET_APIKEY + ":")},
                                body = json.dumps(data))

        response = http_handler.getresponse()
        request_body = response.read()
        request_status = response.status
        #logger.fdebug(u"PushBullet response status: %r" % request_status)
        #logger.fdebug(u"PushBullet response headers: %r" % response.getheaders())
        #logger.fdebug(u"PushBullet response body: %r" % response.read())

        if request_status == 200:
            if method == 'GET':
                return request_body
            else:
                logger.info(module + ' PushBullet notifications sent.')
                return True
        elif request_status >= 400 and request_status < 500:
            logger.error(module + ' PushBullet request failed: %s' % response.reason)
            return False
        else:
            logger.error(module + ' PushBullet notification failed serverside.')
            return False
Example #33
0
def launch_browser(host, port, root):

    if host == "0.0.0.0":
        host = "localhost"

    try:
        webbrowser.open("http://%s:%i%s" % (host, port, root))
    except Exception, e:
        logger.error("Could not launch browser: %s" % e)
Example #34
0
def launch_browser(host, port, root):

    if host == '0.0.0.0':
        host = 'localhost'
    
    try:    
        webbrowser.open('http://%s:%i%s' % (host, port, root))
    except Exception, e:
        logger.error('Could not launch browser: %s' % e)
Example #35
0
def launch_browser(host, port, root):

    if host == '0.0.0.0':
        host = 'localhost'
    
    try:    
        webbrowser.open('http://%s:%i%s' % (host, port, root))
    except Exception, e:
        logger.error('Could not launch browser: %s' % e)
Example #36
0
def runGit(args):

    git_locations = []
    if mylar.CONFIG.GIT_PATH is not None:
        git_locations.append(mylar.CONFIG.GIT_PATH)

    git_locations.append('git')

    if platform.system().lower() == 'darwin':
        git_locations.append('/usr/local/git/bin/git')

    output = err = None

    for cur_git in git_locations:
        gitworked = False

        cmd = '%s %s' % (cur_git, args)

        try:
            logger.debug('Trying to execute: %s with shell in %s' %
                         (cmd, mylar.PROG_DIR))
            output = subprocess.run(cmd,
                                    text=True,
                                    capture_output=True,
                                    shell=True,
                                    cwd=mylar.PROG_DIR)
            logger.debug('Git output: %s' % output)
            gitworked = True
        except Exception as e:
            logger.error('Command %s didn\'t work [%s]' % (cmd, e))
            gitworked = False
            continue
        else:
            if all([
                    output.stderr is not None, output.stderr != '',
                    output.returncode > 0
            ]):
                logger.error('Encountered error: %s' % output.stderr)
                gitworked = False

        if "not found" in output.stdout or "not recognized as an internal or external command" in output.stdout:
            logger.error('[%s] Unable to find git with command: %s' %
                         (output.stdout, cmd))
            output = None
            gitworked = False
        elif ('fatal:' in output.stdout) or ('fatal:' in output.stderr):
            logger.error('Error: %s' % output.stderr)
            logger.error(
                'Git returned bad info. Are you sure this is a git installation? [%s]'
                % output.stdout)
            output = None
            gitworked = False
        elif gitworked:
            break

    return (output.stdout, output.stderr)
Example #37
0
def scanLibrary(scan=None, queue=None):
    valreturn = []
    if scan:
        try:
            soma, noids = libraryScan()
        except Exception, e:
            logger.error('Unable to complete the scan: %s' % e)
            return
        if soma == "Completed":
            logger.info('Sucessfully completed import.')
        else:
            logger.info('Starting mass importing...' + str(noids) +
                        ' records.')
            #this is what it should do...
            #store soma (the list of comic_details from importing) into sql table so import can be whenever
            #display webpage showing results
            #allow user to select comic to add (one at a time)
            #call addComic off of the webpage to initiate the add.
            #return to result page to finish or continue adding.
            #....
            #threading.Thread(target=self.searchit).start()
            #threadthis = threadit.ThreadUrl()
            #result = threadthis.main(soma)
            myDB = db.DBConnection()
            sl = 0
            logger.fdebug("number of records: " + str(noids))
            while (sl < int(noids)):
                soma_sl = soma['comic_info'][sl]
                logger.fdebug("soma_sl: " + str(soma_sl))
                logger.fdebug("comicname: " +
                              soma_sl['comicname'].encode('utf-8'))
                logger.fdebug("filename: " +
                              soma_sl['comfilename'].encode('utf-8'))
                controlValue = {"impID": soma_sl['impid']}
                newValue = {
                    "ComicYear": soma_sl['comicyear'],
                    "Status": "Not Imported",
                    "ComicName": soma_sl['comicname'].encode('utf-8'),
                    "DisplayName": soma_sl['displayname'].encode('utf-8'),
                    "ComicFilename": soma_sl['comfilename'].encode('utf-8'),
                    "ComicLocation": soma_sl['comlocation'].encode('utf-8'),
                    "ImportDate": helpers.today(),
                    "WatchMatch": soma_sl['watchmatch']
                }
                myDB.upsert("importresults", newValue, controlValue)
                sl += 1
            # because we could be adding volumes/series that span years, we need to account for this
            # add the year to the db under the term, valid-years
            # add the issue to the db under the term, min-issue

            #locate metadata here.
            # unzip -z filename.cbz will show the comment field of the zip which contains the metadata.

        #self.importResults()
        valreturn.append({"somevalue": 'self.ie', "result": 'success'})
        return queue.put(valreturn)
Example #38
0
def torsend2client(seriesname, issue, seriesyear, linkit, site):
    logger.info('matched on ' + str(seriesname))
    filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '', seriesname)
    filename = re.sub(' ', '_', filename)
    filename += "_" + str(issue) + "_" + str(seriesyear)
    if site == 'CBT':
        logger.info(linkit)
        linkit = str(linkit) + '&passkey=' + str(mylar.CBT_PASSKEY)

    if linkit[-7:] != "torrent":  # and site != "KAT":
        filename += ".torrent"

    if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None:
        filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename)
        logger.fdebug('filename for torrent set to : ' + filepath)
    elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None:
        filepath = os.path.join(mylar.CACHE_DIR, filename)
        logger.fdebug('filename for torrent set to : ' + filepath)
    else:
        logger.error(
            'No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.'
        )
        return "fail"

    try:
        request = urllib2.Request(linkit)
        #request.add_header('User-Agent', str(mylar.USER_AGENT))
        request.add_header('Accept-encoding', 'gzip')

        if site == 'KAT':
            stfind = linkit.find('?')
            kat_referrer = linkit[:stfind]
            request.add_header('Referer', kat_referrer)
            logger.fdebug('KAT Referer set to :' + kat_referrer)


#        response = helpers.urlretrieve(urllib2.urlopen(request), filepath)
        response = urllib2.urlopen(request)
        logger.fdebug('retrieved response.')

        if site == 'KAT':
            if response.info(
            )['content-encoding'] == 'gzip':  #.get('Content-Encoding') == 'gzip':
                logger.fdebug('gzip detected')
                buf = StringIO(response.read())
                logger.fdebug('gzip buffered')
                f = gzip.GzipFile(fileobj=buf)
                logger.fdebug('gzip filed.')
                torrent = f.read()
                logger.fdebug('gzip read.')
        else:
            torrent = response.read()

    except Exception, e:
        logger.warn('Error fetching data from %s: %s' % (site, e))
        return "fail"
Example #39
0
    def notify(self,
               snline=None,
               prline=None,
               prline2=None,
               snatched=None,
               sent_to=None,
               prov=None,
               module=None,
               method=None):
        if module is None:
            module = ''
        module += '[NOTIFIER]'

        if method == 'GET':
            data = None
            self.PUSH_URL = 'https://api.pushbullet.com/v2/devices'
        else:
            if snatched:
                if snatched[-1] == '.': snatched = snatched[:-1]
                event = snline
                message = "Mylar has snatched: " + snatched + " from " + prov + " and " + sent_to
            else:
                event = prline + ' complete!'
                message = prline2
            data = {'type': 'note', 'title': event, 'body': message}

            if self.channel_tag:
                data['channel_tag'] = self.channel_tag

        r = self._session.post(self.PUSH_URL, data=json.dumps(data))
        dt = r.json()
        if r.status_code == 200:
            if method == 'GET':
                return dt
            else:
                logger.info(module + ' PushBullet notifications sent.')
                return {
                    'status': True,
                    'message': 'APIKEY verified OK / notification sent'
                }
        elif r.status_code >= 400 and r.status_code < 500:
            logger.error(module + ' PushBullet request failed: %s' % r.content)
            return {
                'status': False,
                'message':
                '[' + str(r.status_code) + '] ' + dt['error']['message']
            }
        else:
            logger.error(module +
                         ' PushBullet notification failed serverside: %s' %
                         r.content)
            return {
                'status': False,
                'message':
                '[' + str(r.status_code) + '] ' + dt['error']['message']
            }
Example #40
0
def movefiles(comicid, comlocation, imported):
    #comlocation is destination
    #comicid is used for rename
    files_moved = []

    myDB = db.DBConnection()

    logger.fdebug('comlocation is : ' + str(comlocation))
    logger.fdebug('original comicname is : ' + str(imported['ComicName']))

    impres = imported['filelisting']
    #impres = myDB.select("SELECT * from importresults WHERE ComicName=?", [ogcname])

    if impres is not None:
        for impr in impres:
            srcimp = impr['comiclocation']
            orig_filename = impr['comicfilename']
            #before moving check to see if Rename to Mylar structure is enabled.
            if mylar.IMP_RENAME and mylar.FILE_FORMAT != '':
                logger.fdebug(
                    "Renaming files according to configuration details : " +
                    str(mylar.FILE_FORMAT))
                renameit = helpers.rename_param(comicid, imported['ComicName'],
                                                impr['issuenumber'],
                                                orig_filename)
                nfilename = renameit['nfilename']
                dstimp = os.path.join(comlocation, nfilename)
            else:
                logger.fdebug(
                    "Renaming files not enabled, keeping original filename(s)")
                dstimp = os.path.join(comlocation, orig_filename)

            logger.info("moving " + srcimp + " ... to " + dstimp)
            try:
                shutil.move(srcimp, dstimp)
                files_moved.append({
                    'srid': imported['srid'],
                    'filename': impr['comicfilename']
                })
            except (OSError, IOError):
                logger.error(
                    "Failed to move files - check directories and manually re-run."
                )

        logger.fdebug("all files moved.")
        #now that it's moved / renamed ... we remove it from importResults or mark as completed.

    if len(files_moved) > 0:
        for result in files_moved:
            controlValue = {
                "ComicFilename": result['filename'],
                "SRID": result['srid']
            }
            newValue = {"Status": "Imported", "ComicID": comicid}
            myDB.upsert("importresults", newValue, controlValue)
    return
Example #41
0
    def addtoreadlist(self):
        annualize = False
        myDB = db.DBConnection()
        readlist = myDB.selectone("SELECT * from issues where IssueID=?", [self.IssueID]).fetchone()
        if readlist is None:
            logger.fdebug(self.module + ' Checking against annuals..')
            readlist = myDB.selectone("SELECT * from annuals where IssueID=?", [self.IssueID]).fetchone()
            if readlist is None:
                logger.error(self.module + ' Cannot locate IssueID - aborting..')
                return
            else:
                annualize = True
        comicinfo = myDB.selectone("SELECT * from comics where ComicID=?", [readlist['ComicID']]).fetchone()
        logger.info(self.module + ' Attempting to add issueid ' + readlist['IssueID'])
        if comicinfo is None:
            logger.info(self.module + ' Issue not located on your current watchlist. I should probably check story-arcs but I do not have that capability just yet.')
        else:
            locpath = None
            if mylar.MULTIPLE_DEST_DIRS is not None and mylar.MULTIPLE_DEST_DIRS != 'None' and os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicinfo['ComicLocation'])) != comicinfo['ComicLocation']:
                logger.fdebug(self.module + ' Multiple_dest_dirs:' + mylar.MULTIPLE_DEST_DIRS)
                logger.fdebug(self.module + ' Dir: ' + comicinfo['ComicLocation'])
                logger.fdebug(self.module + ' Os.path.basename: ' + os.path.basename(comicinfo['ComicLocation']))
                pathdir = os.path.join(mylar.MULTIPLE_DEST_DIRS, os.path.basename(comicinfo['ComicLocation']))
                if os.path.exists(os.path.join(pathdir, readlist['Location'])):
                    locpath = os.path.join(pathdir, readlist['Location'])
                else:
                    if os.path.exists(os.path.join(comicinfo['ComicLocation'], readlist['Location'])):
                        locpath = os.path.join(comicinfo['ComicLocation'], readlist['Location'])
            else:
                if os.path.exists(os.path.join(comicinfo['ComicLocation'], readlist['Location'])):
                    locpath = os.path.join(comicinfo['ComicLocation'], readlist['Location'])

            if not locpath is None:
                comicissue = readlist['Issue_Number']
                comicname = comicinfo['ComicName']
                dspinfo = comicname + ' #' + comicissue
                if annualize:
                    if mylar.ANNUALS_ON:
                        comicissue = 'Annual ' + readlist['Issue_Number']
                        dspinfo = comicname + ' Annual #' + readlist['Issue_Number']
                    else:
                        comicname = comicinfo['ComicName'] + ' Annual'
                        dspinfo = comicname + ' #' + comicissue
                ctrlval = {"IssueID":       self.IssueID}
                newval = {"DateAdded":      helpers.today(),
                          "Status":         "Added",
                          "ComicID":        readlist['ComicID'],
                          "Issue_Number":   comicissue,
                          "IssueDate":      readlist['IssueDate'],
                          "SeriesYear":     comicinfo['ComicYear'],
                          "ComicName":      comicname,
                          "Location":       locpath}

                myDB.upsert("readlist", newval, ctrlval)
                logger.info(self.module + ' Added ' + dspinfo + ' to the Reading list.')
        return
Example #42
0
 def get_torrent(self, hash):
     logger.debug('Getting Torrent info hash: ' + hash)
     try:
         torrent_info = self.client.get_torrent(hash)
     except Exception as e:
         logger.error('Could not get torrent info for ' + hash)
         return False
     else:
         logger.info('Successfully located information for torrent')
         return torrent_info
Example #43
0
 def get_torrent(self, hash):
     logger.debug('Getting Torrent info hash: ' + hash)
     try:
         torrent_info = self.client.get_torrent(hash)
     except Exception as e:
         logger.error('Could not get torrent info for ' + hash)
         return False
     else:
         logger.info('Successfully located information for torrent')
         return torrent_info
Example #44
0
 def get_torrent(self, hash):
     logger.debug('Getting Torrent info hash: ' + hash)
     try:
         torrent_info = self.client.call('core.get_torrent_status', hash, '')
     except Exception as e:
         logger.error('Could not get torrent info for ' + hash)
         return False
     else:
         logger.info('Getting Torrent Info!')
         return torrent_info
Example #45
0
def pullsearch(comicapi,comicquery,offset):
    PULLURL = mylar.CVURL + 'search?api_key=' + str(comicapi) + '&resources=volume&query=' + str(comicquery) + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher&format=xml&page=' + str(offset)

    #all these imports are standard on most modern python implementations
    #download the file:
    try:
        file = urllib2.urlopen(PULLURL)
    except urllib2.HTTPError, err:
        logger.error("There was a major problem retrieving data from ComicVine - on their end. You'll have to try again later most likely.")
        return        
Example #46
0
 def __init__(self):
     self.client = TorClient.TorrentClient()
     if not self.client.connect(mylar.CONFIG.RTORRENT_HOST,
                                mylar.CONFIG.RTORRENT_USERNAME,
                                mylar.CONFIG.RTORRENT_PASSWORD,
                                mylar.CONFIG.RTORRENT_AUTHENTICATION,
                                mylar.CONFIG.RTORRENT_VERIFY,
                                mylar.CONFIG.RTORRENT_RPC_URL,
                                mylar.CONFIG.RTORRENT_CA_BUNDLE):
         logger.error('[ERROR] Could not connect to %s -  exiting' % mylar.CONFIG.RTORRENT_HOST)
         sys.exit(-1)
Example #47
0
def pullsearch(comicapi, comicquery, offset, search_type):

    cnt = 1
    for x in comicquery:
        if cnt == 1:
            filterline = '%s' % x
        else:
            filterline += ',name:%s' % x
        cnt += 1

    PULLURL = mylar.CVURL + str(search_type) + 's?api_key=' + str(
        comicapi
    ) + '&filter=name:' + filterline + '&field_list=id,name,start_year,site_detail_url,count_of_issues,image,publisher,deck,description,first_issue,last_issue&format=xml&sort=date_last_updated:desc&offset=' + str(
        offset)  # 2012/22/02 - CVAPI flipped back to offset instead of page

    #all these imports are standard on most modern python implementations
    #logger.info('MB.PULLURL:' + PULLURL)

    #new CV API restriction - one api request / second.
    if mylar.CONFIG.CVAPI_RATE is None or mylar.CONFIG.CVAPI_RATE < 2:
        time.sleep(2)
    else:
        time.sleep(mylar.CONFIG.CVAPI_RATE)

    #download the file:
    payload = None

    try:
        r = requests.get(PULLURL,
                         params=payload,
                         verify=mylar.CONFIG.CV_VERIFY,
                         headers=mylar.CV_HEADERS)
    except Exception as e:
        logger.warn('Error fetching data from ComicVine: %s' % e)
        return

    try:
        dom = parseString(r.content)  #(data)
    except ExpatError:
        if 'Abnormal Traffic Detected' in r.content.decode('utf-8'):
            logger.error(
                'ComicVine has banned this server\'s IP address because it exceeded the API rate limit.'
            )
        else:
            logger.warn(
                '[WARNING] ComicVine is not responding correctly at the moment. This is usually due to some problems on their end. If you re-try things again in a few moments, it might work properly.'
            )
            mylar.BACKENDSTATUS_CV = 'down'
        return
    except Exception as e:
        logger.warn('[ERROR] Error returned from CV: %s' % e)
        return
    else:
        return dom
Example #48
0
def dbcheck():

    conn=sqlite3.connect(DB_FILE)
    c=conn.cursor()

    c.execute('CREATE TABLE IF NOT EXISTS comics (ComicID TEXT UNIQUE, ComicName TEXT, ComicSortName TEXT, ComicYear TEXT, DateAdded TEXT, Status TEXT, IncludeExtras INTEGER, Have INTEGER, Total INTEGER, ComicImage TEXT, ComicPublisher TEXT, ComicLocation TEXT, ComicPublished TEXT, LatestIssue TEXT, LatestDate TEXT, Description TEXT, QUALalt_vers TEXT, QUALtype TEXT, QUALscanner TEXT, QUALquality TEXT, LastUpdated TEXT)')
    c.execute('CREATE TABLE IF NOT EXISTS issues (IssueID TEXT, ComicName TEXT, IssueName TEXT, Issue_Number TEXT, DateAdded TEXT, Status TEXT, Type TEXT, ComicID, ArtworkURL Text, ReleaseDate TEXT, Location TEXT, IssueDate TEXT, Int_IssueNumber INT)')
    c.execute('CREATE TABLE IF NOT EXISTS snatched (IssueID TEXT, ComicName TEXT, Issue_Number TEXT, Size INTEGER, DateAdded TEXT, Status TEXT, FolderName TEXT, ComicID TEXT)')
    c.execute('CREATE TABLE IF NOT EXISTS upcoming (ComicName TEXT, IssueNumber TEXT, ComicID TEXT, IssueID TEXT, IssueDate TEXT, Status TEXT)')
    c.execute('CREATE TABLE IF NOT EXISTS nzblog (IssueID TEXT, NZBName TEXT)')
#    c.execute('CREATE TABLE IF NOT EXISTS weekly (SHIPDATE, PUBLISHER text, ISSUE text, COMIC VARCHAR(150), EXTRA text, STATUS text)')
#    c.execute('CREATE TABLE IF NOT EXISTS sablog (nzo_id TEXT, ComicName TEXT, ComicYEAR TEXT, ComicIssue TEXT, name TEXT, nzo_complete TEXT)')

    #new
    c.execute('DROP TABLE IF EXISTS exceptions')

    c.execute('CREATE TABLE IF NOT EXISTS exceptions (variloop TEXT, ComicID TEXT, NewComicID TEXT, GComicID TEXT)')

    # for Mylar-based Exception Updates....
    i = 0
    EXCEPTIONS = []
    EXCEPTIONS.append('exceptions.csv')
    EXCEPTIONS.append('custom_exceptions.csv')

    while (i <= 1):
    #EXCEPTIONS_FILE = os.path.join(DATA_DIR, 'exceptions.csv')
        EXCEPTIONS_FILE = os.path.join(DATA_DIR, EXCEPTIONS[i])

        if not os.path.exists(EXCEPTIONS_FILE):
            try:
                csvfile = open(str(EXCEPTIONS_FILE), "rb")
            except (OSError,IOError):
                if i == 1:
                    logger.error("No Custom Exceptions found. Using base exceptions only.")
                else:
                    logger.error("Could not locate " + str(EXCEPTIONS[i]) + " file. Make sure it's in datadir: " + DATA_DIR)
                break                
        else:
            csvfile = open(str(EXCEPTIONS_FILE), "rb")
        if i == 0:
            logger.info(u"Populating Base Exception listings into Mylar....")
        elif i == 1:
            logger.info(u"Populating Custom Exception listings into Mylar....")

        creader = csv.reader(csvfile, delimiter=',')

        for row in creader:
            try:
                c.execute("INSERT INTO exceptions VALUES (?,?,?,?);", row)
            except Exception, e:
                #print ("Error - invald arguments...-skipping")
                pass
        csvfile.close()
        i+=1
Example #49
0
def torsend2client(seriesname, issue, seriesyear, linkit, site):
    logger.info('matched on ' + seriesname)
    filename = helpers.filesafe(seriesname)
    #filename = re.sub('[\'\!\@\#\$\%\:\;\/\\=\?\.]', '',seriesname)
    filename = re.sub(' ', '_', filename)
    filename += "_" + str(issue) + "_" + str(seriesyear)
    if site == 'CBT':
        logger.info(linkit)
        linkit = str(linkit) + '&passkey=' + str(mylar.CBT_PASSKEY)

    if linkit[-7:] != "torrent": # and site != "KAT":
        filename += ".torrent"

    if mylar.TORRENT_LOCAL and mylar.LOCAL_WATCHDIR is not None:
        filepath = os.path.join(mylar.LOCAL_WATCHDIR, filename)
        logger.fdebug('filename for torrent set to : ' + filepath)
    elif mylar.TORRENT_SEEDBOX and mylar.SEEDBOX_WATCHDIR is not None:
        filepath = os.path.join(mylar.CACHE_DIR, filename)
        logger.fdebug('filename for torrent set to : ' + filepath)
    else:
        logger.error('No Local Watch Directory or Seedbox Watch Directory specified. Set it and try again.')
        return "fail"

    try:
        request = urllib2.Request(linkit)
        #request.add_header('User-Agent', str(mylar.USER_AGENT))
        request.add_header('Accept-encoding', 'gzip')

        if site == 'KAT':
            stfind = linkit.find('?')
            kat_referrer = linkit[:stfind]
            request.add_header('Referer', kat_referrer)
            logger.fdebug('KAT Referer set to :' + kat_referrer)


#        response = helpers.urlretrieve(urllib2.urlopen(request), filepath)
        response = urllib2.urlopen(request)
        logger.fdebug('retrieved response.')

        if site == 'KAT':
            if response.info()['content-encoding'] == 'gzip':#.get('Content-Encoding') == 'gzip':
                logger.fdebug('gzip detected')
                buf = StringIO(response.read())
                logger.fdebug('gzip buffered')
                f = gzip.GzipFile(fileobj=buf)
                logger.fdebug('gzip filed.')
                torrent = f.read()
                logger.fdebug('gzip read.')
        else:
            torrent = response.read()

    except Exception, e:
        logger.warn('Error fetching data from %s: %s' % (site, e))
        return "fail"
Example #50
0
File: test.py Project: 2mny/mylar
 def __init__(self):
     self.client = TorClient.TorrentClient()
     if not self.client.connect(mylar.CONFIG.RTORRENT_HOST,
                                mylar.CONFIG.RTORRENT_USERNAME,
                                mylar.CONFIG.RTORRENT_PASSWORD,
                                mylar.CONFIG.RTORRENT_AUTHENTICATION,
                                mylar.CONFIG.RTORRENT_VERIFY,
                                mylar.CONFIG.RTORRENT_SSL,
                                mylar.CONFIG.RTORRENT_RPC_URL,
                                mylar.CONFIG.RTORRENT_CA_BUNDLE):
         logger.error('could not connect to %s, exiting', mylar.CONFIG.RTORRENT_HOST)
         sys.exit(-1)
Example #51
0
def scanLibrary(scan=None, queue=None):
    valreturn = []
    if scan:
        try:
            soma, noids = libraryScan()
        except Exception, e:
            logger.error('Unable to complete the scan: %s' % e)
            return
        if soma == "Completed":
            logger.info('Sucessfully completed import.')
        else:
            logger.info('Starting mass importing...' + str(noids) + ' records.')
            #this is what it should do...
            #store soma (the list of comic_details from importing) into sql table so import can be whenever
            #display webpage showing results
            #allow user to select comic to add (one at a time)
            #call addComic off of the webpage to initiate the add.
            #return to result page to finish or continue adding.
            #....
            #threading.Thread(target=self.searchit).start()
            #threadthis = threadit.ThreadUrl()
            #result = threadthis.main(soma)
            myDB = db.DBConnection()
            sl = 0
            logger.fdebug("number of records: " + str(noids))
            while (sl < int(noids)):
                soma_sl = soma['comic_info'][sl]
                logger.fdebug("soma_sl: " + str(soma_sl))
                logger.fdebug("comicname: " + soma_sl['comicname'].encode('utf-8'))
                logger.fdebug("filename: " + soma_sl['comfilename'].encode('utf-8'))
                controlValue = {"impID":    soma_sl['impid']}
                newValue = {"ComicYear":        soma_sl['comicyear'],
                            "Status":           "Not Imported",
                            "ComicName":        soma_sl['comicname'].encode('utf-8'),
                            "DisplayName":      soma_sl['displayname'].encode('utf-8'),
                            "ComicFilename":    soma_sl['comfilename'].encode('utf-8'),
                            "ComicLocation":    soma_sl['comlocation'].encode('utf-8'),
                            "ImportDate":       helpers.today(),
                            "WatchMatch":       soma_sl['watchmatch']}
                myDB.upsert("importresults", newValue, controlValue)
                sl+=1
            # because we could be adding volumes/series that span years, we need to account for this
            # add the year to the db under the term, valid-years
            # add the issue to the db under the term, min-issue

            #locate metadata here.
            # unzip -z filename.cbz will show the comment field of the zip which contains the metadata.

        #self.importResults()
        valreturn.append({"somevalue":  'self.ie',
                          "result":     'success'})
        return queue.put(valreturn)
Example #52
0
 def start_torrent(self, hash):
     try:
         self.find_torrent(hash)
     except Exception as e:
         return False
     else:
         try:
             self.client.call('core.resume_torrent', hash)
         except Exception as e:
             logger.error('Torrent failed to start ' + e)
         else:
             logger.info('Torrent ' + hash + ' was started')
             return True
Example #53
0
File: deluge.py Project: 2mny/mylar
 def start_torrent(self, hash):
     try:
         self.find_torrent(hash)
     except Exception as e:
         return False
     else:
         try:
             self.client.call('core.resume_torrent', hash)
         except Exception as e:
             logger.error('Torrent failed to start ' + e)
         else:
             logger.info('Torrent ' + hash + ' was started')
             return True
Example #54
0
    def notify(self, snline=None, prline=None, prline2=None, snatched=None, sent_to=None, prov=None, module=None, method=None):
        if module is None:
            module = ''
        module += '[NOTIFIER]'
        
#        http_handler = HTTPSConnection("api.pushbullet.com")

#        if method == 'GET':
#            uri = '/v2/devices'
#        else:
#            method = 'POST'
#            uri = '/v2/pushes'

#        authString = base64.b64encode(self.apikey + ":")

        if method == 'GET':
            pass
#           http_handler.request(method, uri, None, headers={'Authorization': 'Basic %s:' % authString})
        else:
            if snatched:
                if snatched[-1] == '.': snatched = snatched[:-1]
                event = snline
                message = "Mylar has snatched: " + snatched + " from " + prov + " and has sent it to " + sent_to
            else:
                event = prline + ' complete!'
                message = prline2

            data = {'type': "note", #'device_iden': self.deviceid,
                    'title': event.encode('utf-8'), #"mylar",
                    'body': message.encode('utf-8')}

            if self.channel_tag:
                data['channel_tag'] = self.channel_tag

        r = self._session.post(self.PUSH_URL, data=json.dumps(data))
        dt = r.json()
        if r.status_code == 200:
            if method == 'GET':
                return dt
            else:
                logger.info(module + ' PushBullet notifications sent.')
                return {'status':  True,
                        'message': 'APIKEY verified OK / notification sent'}
        elif r.status_code >= 400 and r.status_code < 500:
            logger.error(module + ' PushBullet request failed: %s' % r.content)
            return {'status':  False,
                    'message': '[' + str(r.status_code) + '] ' + dt['error']['message']}
        else:
            logger.error(module + ' PushBullet notification failed serverside: %s' % r.content)
            return {'status':  False,
                    'message': '[' + str(r.status_code) + '] ' + dt['error']['message']}
Example #55
0
    def _send(self, data, module):

        r = self._session.post(self.NMA_URL, data=data)

        logger.fdebug('[NMA] Status code returned: ' + str(r.status_code))
        if r.status_code == 200:
            logger.info(module + ' NotifyMyAndroid notifications sent.')
            return True
        elif r.status_code >= 400 and r.status_code < 500:
            logger.error(module + ' NotifyMyAndroid request failed: %s' % r.content)
            return False
        else:
            logger.error(module + ' NotifyMyAndroid  notification failed serverside.')
            return False