示例#1
0
def getProgress(download_id):
    # get the progress/status of a download from it's download_id
    # return "" if not found
    hosturl = _hostURL()
    if hosturl:
        auth_cgi, task_cgi, sid = _login(hosturl)
        if sid:
            result = _getInfo(task_cgi, sid, download_id)  # type: dict
            _logout(auth_cgi, sid)
            if result:
                if 'status' in result:
                    status = result['status']
                else:
                    status = ''
                # can't see how to get a % from synology, so have to work it out ourselves...
                if 'additional' in result:
                    try:
                        files = result['additional']['file']
                    except KeyError:
                        files = []
                else:
                    files = []
                tot_size = 0
                got_size = 0
                for item in files:
                    tot_size += check_int(item['size'], 0)
                    got_size += check_int(item['size_downloaded'], 0)

                if tot_size:
                    pc = int((got_size * 100) / tot_size)
                else:
                    pc = 0
                return pc, status
    return -1, ""
示例#2
0
def authorUpdate():
    threadname = threading.currentThread().name
    if "Thread-" in threadname:
        threading.currentThread().name = "AUTHORUPDATE"
    # noinspection PyBroadException
    try:
        myDB = database.DBConnection()
        cmd = 'SELECT AuthorID, AuthorName, DateAdded from authors WHERE Status="Active" or Status="Loading"'
        cmd += ' or Status="Wanted" and DateAdded is not null order by DateAdded ASC'
        author = myDB.match(cmd)
        if author and check_int(lazylibrarian.CONFIG['CACHE_AGE'], 0):
            dtnow = datetime.datetime.now()
            diff = datecompare(dtnow.strftime("%Y-%m-%d"), author['DateAdded'])
            msg = 'Oldest author info (%s) is %s day%s old' % (author['AuthorName'], diff, plural(diff))
            if diff > check_int(lazylibrarian.CONFIG['CACHE_AGE'], 0):
                logger.info('Starting update for %s' % author['AuthorName'])
                authorid = author['AuthorID']
                logger.debug(msg)
                lazylibrarian.importer.addAuthorToDB(refresh=True, authorid=authorid)
            else:
                # don't nag. Show info message no more than every 12 hrs, debug message otherwise
                timenow = int(time.time())
                if check_int(lazylibrarian.AUTHORUPDATE_MSG, 0) + 43200 < timenow:
                    logger.info(msg)
                    lazylibrarian.AUTHORUPDATE_MSG = timenow
                else:
                    logger.debug(msg)

    except Exception:
        logger.error('Unhandled exception in AuthorUpdate: %s' % traceback.format_exc())
示例#3
0
def getProgress(download_id):
    # get the progress/status of a download from it's download_id
    # return "" if not found
    hosturl = _hostURL()
    if hosturl:
        auth_cgi, task_cgi, sid = _login(hosturl)
        if sid:
            result = _getInfo(task_cgi, sid, download_id)  # type: dict
            _logout(auth_cgi, sid)
            if result:
                if 'status' in result:
                    status = result['status']
                else:
                    status = ''
                # can't see how to get a % from synology, so have to work it out ourselves...
                if 'additional' in result:
                    try:
                        files = result['additional']['file']
                    except KeyError:
                        files = []
                else:
                    files = []
                tot_size = 0
                got_size = 0
                for item in files:
                    tot_size += check_int(item['size'], 0)
                    got_size += check_int(item['size_downloaded'], 0)

                if tot_size:
                    pc = int((got_size * 100) / tot_size)
                else:
                    pc = 0
                return pc, status
    return -1, ""
示例#4
0
def showJobs():
    result = [
        "Cache %i hit%s, %i miss, " %
        (check_int(lazylibrarian.CACHE_HIT,
                   0), plural(check_int(lazylibrarian.CACHE_HIT, 0)),
         check_int(lazylibrarian.CACHE_MISS, 0)),
        "Sleep %.3f goodreads, %.3f librarything" %
        (lazylibrarian.GR_SLEEP, lazylibrarian.LT_SLEEP)
    ]
    myDB = database.DBConnection()
    snatched = myDB.match(
        "SELECT count(*) as counter from wanted WHERE Status = 'Snatched'")
    wanted = myDB.match(
        "SELECT count(*) as counter FROM books WHERE Status = 'Wanted'")
    result.append("%i item%s marked as Snatched" %
                  (snatched['counter'], plural(snatched['counter'])))
    result.append("%i item%s marked as Wanted" %
                  (wanted['counter'], plural(wanted['counter'])))

    for job in lazylibrarian.SCHED.get_jobs():
        job = str(job)
        if "search_magazines" in job:
            jobname = "Magazine search"
        elif "checkForUpdates" in job:
            jobname = "Check LazyLibrarian version"
        elif "search_book" in job:
            jobname = "Book search"
        elif "search_rss_book" in job:
            jobname = "RSS book search"
        elif "processDir" in job:
            jobname = "Process downloads"
        elif "authorUpdate" in job:
            jobname = "Update authors"
        elif "sync_to_gr" in job:
            jobname = "Goodreads Sync"
        else:
            jobname = job.split(' ')[0].split('.')[2]

        # jobinterval = job.split('[')[1].split(']')[0]
        jobtime = job.split('at: ')[1].split('.')[0]
        jobtime = next_run(jobtime)
        timeparts = jobtime.split(' ')
        if timeparts[0] == '1' and timeparts[1].endswith('s'):
            timeparts[1] = timeparts[1][:-1]
        jobinfo = "%s: Next run in %s %s" % (jobname, timeparts[0],
                                             timeparts[1])
        result.append(jobinfo)

    cmd = 'SELECT AuthorID, AuthorName, DateAdded from authors WHERE Status="Active" or Status="Loading"'
    cmd += 'or Status="Wanted" order by DateAdded ASC'
    author = myDB.match(cmd)
    if author:
        dtnow = datetime.datetime.now()
        diff = datecompare(dtnow.strftime("%Y-%m-%d"), author['DateAdded'])
        result.append('Oldest author info (%s) is %s day%s old' %
                      (author['AuthorName'], diff, plural(diff)))

    return result
示例#5
0
def getLatestVersion_FromGit():
    # Don't call directly, use getLatestVersion as wrapper.
    # Also removed reference to global variable setting.
    latest_version = 'Unknown'

    # Can only work for non Windows driven installs, so check install type
    if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win':
        logmsg('debug', '(getLatestVersion_FromGit) Error - should not be called under a windows install')
        latest_version = 'WINDOWS INSTALL'
    else:
        # check current branch value of the local git repo as folks may pull from a branch not master
        branch = lazylibrarian.CONFIG['GIT_BRANCH']

        if branch == 'InvalidBranch':
            logmsg('debug', '(getLatestVersion_FromGit) - Failed to get a valid branch name from local repo')
        else:
            if branch == 'Package':  # check packages against master
                branch = 'master'
            # Get the latest commit available from github
            url = 'https://api.github.com/repos/%s/%s/commits/%s' % (
                lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], branch)
            logmsg('debug',
                   '(getLatestVersion_FromGit) Retrieving latest version information from github command=[%s]' % url)

            timestamp = check_int(lazylibrarian.CONFIG['GIT_UPDATED'], 0)
            age = ''
            if timestamp:
                # timestring for 'If-Modified-Since' needs to be english short day/month names and in gmt
                # we already have english month names stored in MONTHNAMES[] but need capitalising
                # so use hard coded versions here instead
                DAYNAMES = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
                MONNAMES = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
                tm = time.gmtime(timestamp)
                age = "%s, %02d %s %04d %02d:%02d:%02d GMT" %(DAYNAMES[tm.tm_wday], tm.tm_mday,
                    MONNAMES[tm.tm_mon], tm.tm_year, tm.tm_hour, tm.tm_min, tm.tm_sec)
            try:
                headers = {'User-Agent': USER_AGENT}
                if age:
                    logmsg('debug', '(getLatestVersion_FromGit) Checking if modified since %s' % age)
                    headers.update({'If-Modified-Since': age})
                proxies = proxyList()
                timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
                r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies)

                if str(r.status_code).startswith('2'):
                    git = r.json()
                    latest_version = git['sha']
                    logmsg('debug', '(getLatestVersion_FromGit) Branch [%s] Latest Version has been set to [%s]' % (
                        branch, latest_version))
                elif str(r.status_code) == '304':
                    latest_version = lazylibrarian.CONFIG['CURRENT_VERSION']
                    logmsg('debug', '(getLatestVersion_FromGit) Not modified, currently on Latest Version')
            except Exception as e:
                logmsg('warn', '(getLatestVersion_FromGit) Could not get the latest commit from github')
                logmsg('debug', 'git %s for %s: %s' % (type(e).__name__, url, str(e)))
                latest_version = 'Not_Available_From_GitHUB'

    return latest_version
示例#6
0
    def _Series(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Series', 'id': 'Series', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Series' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchseries.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Series'))
        cmd = "SELECT SeriesName,SeriesID,Have,Total from Series WHERE CAST(Have AS INTEGER) > 0 "
        if 'query' in kwargs:
            cmd += "AND SeriesName LIKE '%" + kwargs['query'] + "%' "
        cmd += "order by SeriesName"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for series in page:
            cmd = "SELECT books.BookID,SeriesNum from books,member where SeriesID=? "
            cmd += "and books.bookid = member.bookid order by CAST(SeriesNum AS INTEGER)"
            firstbook = myDB.match(cmd, (series['SeriesID'],))
            if firstbook:
                cmd = 'SELECT AuthorName from authors,books WHERE authors.authorid = books.authorid AND books.bookid=?'
                res = myDB.match(cmd, (firstbook['BookID'],))
                author = res['AuthorName']
            else:
                author = 'Unknown'
            totalbooks = check_int(series['Total'], 0)
            havebooks = check_int(series['Have'], 0)
            sername = makeUnicode(series['SeriesName'])
            entries.append(
                {
                    'title': escape('%s (%s/%s) %s' % (sername, havebooks, totalbooks, author)),
                    'id': escape('series:%s' % series['SeriesID']),
                    'updated': now(),
                    'content': escape('%s (%s)' % (sername, havebooks)),
                    'href': '%s?cmd=Members&amp;seriesid=%s' % (self.opdsroot, series['SeriesID']),
                    'kind': 'navigation',
                    'rel': 'subsection',
                }
            )
        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Series&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Series&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s series" % len(entries))
        self.data = feed
        return
示例#7
0
    def _Series(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Series', 'id': 'Series', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Series' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchseries.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Series'))
        cmd = "SELECT SeriesName,SeriesID,Have,Total from Series WHERE CAST(Have AS INTEGER) > 0 "
        if 'query' in kwargs:
            cmd += "AND SeriesName LIKE '%" + kwargs['query'] + "%' "
        cmd += "order by SeriesName"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for series in page:
            cmd = "SELECT books.BookID,SeriesNum from books,member where SeriesID=? "
            cmd += "and books.bookid = member.bookid order by CAST(SeriesNum AS INTEGER)"
            firstbook = myDB.match(cmd, (series['SeriesID'],))
            if firstbook:
                cmd = 'SELECT AuthorName from authors,books WHERE authors.authorid = books.authorid AND books.bookid=?'
                res = myDB.match(cmd, (firstbook['BookID'],))
                author = res['AuthorName']
            else:
                author = 'Unknown'
            totalbooks = check_int(series['Total'], 0)
            havebooks = check_int(series['Have'], 0)
            sername = makeUnicode(series['SeriesName'])
            entries.append(
                {
                    'title': escape('%s (%s/%s) %s' % (sername, havebooks, totalbooks, author)),
                    'id': escape('series:%s' % series['SeriesID']),
                    'updated': now(),
                    'content': escape('%s (%s)' % (sername, havebooks)),
                    'href': '%s?cmd=Members&amp;seriesid=%s' % (self.opdsroot, series['SeriesID']),
                    'kind': 'navigation',
                    'rel': 'subsection',
                }
            )
        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Series&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Series&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s series" % len(entries))
        self.data = feed
        return
示例#8
0
def fetchURL(URL, headers=None, retry=True, raw=None):
    """ Return the result of fetching a URL and True if success
        Otherwise return error message and False
        Return data as raw/bytes in python2 or if raw == True
        On python3 default to unicode, need to set raw=True for images/data
        Allow one retry on timeout by default"""

    if raw is None:
        if PY2:
            raw = True
        else:
            raw = False

    if headers is None:
        # some sites insist on having a user-agent, default is to add one
        # if you don't want any headers, send headers=[]
        headers = {'User-Agent': USER_AGENT}
    proxies = proxyList()
    try:
        # jackett query all indexers needs a longer timeout
        # /torznab/all/api?q=  or v2.0/indexers/all/results/torznab/api?q=
        if '/torznab/' in URL and ('/all/' in URL or '/aggregate/' in URL):
            timeout = check_int(lazylibrarian.CONFIG['HTTP_EXT_TIMEOUT'], 90)
        else:
            timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)

        r = requests.get(URL, headers=headers, timeout=timeout, proxies=proxies)

        if str(r.status_code).startswith('2'):  # (200 OK etc)
            if raw:
                return r.content, True
            try:
                result = r.content.decode('utf-8')
            except UnicodeDecodeError:
                result = r.content.decode('latin-1')
            return result, True

        # noinspection PyBroadException
        try:
            # noinspection PyProtectedMember
            msg = requests.status_codes._codes[r.status_code][0]
        except Exception:
            msg = str(r.content)
        return "Response status %s: %s" % (r.status_code, msg), False
    except requests.exceptions.Timeout as e:
        if not retry:
            logger.error("fetchURL: Timeout getting response from %s" % URL)
            return "Timeout %s" % str(e), False
        logger.debug("fetchURL: retrying - got timeout on %s" % URL)
        result, success = fetchURL(URL, headers=headers, retry=False, raw=False)
        return result, success
    except Exception as e:
        if hasattr(e, 'reason'):
            return "Exception %s: Reason: %s" % (type(e).__name__, str(e.reason)), False
        return "Exception %s: %s" % (type(e).__name__, str(e)), False
示例#9
0
    def _Authors(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Authors', 'id': 'Authors', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Authors' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchauthors.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Authors'))
        cmd = "SELECT AuthorName,AuthorID,HaveBooks,TotalBooks,DateAdded from Authors WHERE "
        if 'query' in kwargs:
            cmd += "AuthorName LIKE '%" + kwargs['query'] + "%' AND "
        cmd += "CAST(HaveBooks AS INTEGER) > 0 order by AuthorName"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for author in page:
            totalbooks = check_int(author['TotalBooks'], 0)
            havebooks = check_int(author['HaveBooks'], 0)
            lastupdated = author['DateAdded']
            name = makeUnicode(author['AuthorName'])
            entry = {
                    'title': escape('%s (%s/%s)' % (name, havebooks, totalbooks)),
                    'id': escape('author:%s' % author['AuthorID']),
                    'updated': opdstime(lastupdated),
                    'content': escape('%s (%s)' % (name, havebooks)),
                    'href': '%s?cmd=Author&amp;authorid=%s' % (self.opdsroot, author['AuthorID']),
                    'author': escape('%s' % name),
                    'kind': 'navigation',
                    'rel': 'subsection',
                }
            # removed authorimg as it stops navigation ??
            # if lazylibrarian.CONFIG['OPDS_METAINFO']:
            #    entry['image'] = self.searchroot + '/' + author['AuthorImg']
            entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Authors&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Authors&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s author%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#10
0
    def _Authors(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Authors', 'id': 'Authors', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Authors' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchauthors.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Authors'))
        cmd = "SELECT AuthorName,AuthorID,HaveBooks,TotalBooks,DateAdded from Authors WHERE "
        if 'query' in kwargs:
            cmd += "AuthorName LIKE '%" + kwargs['query'] + "%' AND "
        cmd += "CAST(HaveBooks AS INTEGER) > 0 order by AuthorName"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for author in page:
            totalbooks = check_int(author['TotalBooks'], 0)
            havebooks = check_int(author['HaveBooks'], 0)
            lastupdated = author['DateAdded']
            name = makeUnicode(author['AuthorName'])
            entry = {
                    'title': escape('%s (%s/%s)' % (name, havebooks, totalbooks)),
                    'id': escape('author:%s' % author['AuthorID']),
                    'updated': opdstime(lastupdated),
                    'content': escape('%s (%s)' % (name, havebooks)),
                    'href': '%s?cmd=Author&amp;authorid=%s' % (self.opdsroot, author['AuthorID']),
                    'author': escape('%s' % name),
                    'kind': 'navigation',
                    'rel': 'subsection',
                }
            # removed authorimg as it stops navigation ??
            # if lazylibrarian.CONFIG['OPDS_METAINFO']:
            #    entry['image'] = self.searchroot + '/' + author['AuthorImg']
            entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Authors&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Authors&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s author%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#11
0
def setTorrentPause(result):
    logger.debug('Deluge: Pausing torrent')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {
            "method": "core.pause_torrent",
            "params": [[result['hash']]],
            "id": 9
        }

        response = requests.post(delugeweb_url,
                                 json=post_json,
                                 cookies=delugeweb_auth,
                                 verify=deluge_verify_cert,
                                 headers=headers,
                                 timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        return not response.json()['error']
    except Exception as err:
        logger.error('Deluge %s: setTorrentPause failed: %s' %
                     (type(err).__name__, str(err)))
        return False
示例#12
0
def _add_torrent_file(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Adding file')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        # content is torrent file contents that needs to be encoded to base64
        post_json = {"method": "core.add_torrent_file",
                     "params": [result['name'] + '.torrent', b64encode(result['content']), {}],
                     "id": 2}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)

        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        result['hash'] = response.json()['result']
        msg = 'Deluge: Response was %s' % result['hash']
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug(msg)
        if 'was None' in msg:
            logger.error('Deluge: Adding torrent file failed: Is the WebUI running?')
        return response.json()['result']
    except Exception as err:
        logger.error('Deluge %s: Adding torrent file failed: %s' % (type(err).__name__, str(err)))
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            formatted_lines = traceback.format_exc().splitlines()
            logger.debug('; '.join(formatted_lines))
        return False
示例#13
0
def getCommitDifferenceFromGit():
    # See how many commits behind we are
    # Takes current latest version value and tries to diff it with the latest version in the current branch.
    commit_list = ''
    commits = -1
    if lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_GitHUB':
        commits = 0  # don't report a commit diff as we don't know anything
        commit_list = 'Unable to get latest version from GitHub'
        logmsg('info', commit_list)
    elif lazylibrarian.CONFIG['CURRENT_VERSION'] and commits != 0:
        url = 'https://api.github.com/repos/%s/LazyLibrarian/compare/%s...%s' % (
            lazylibrarian.CONFIG['GIT_USER'],
            lazylibrarian.CONFIG['CURRENT_VERSION'],
            lazylibrarian.CONFIG['LATEST_VERSION'])
        logmsg('debug',
               'Check for differences between local & repo by [%s]' % url)

        try:
            headers = {'User-Agent': USER_AGENT}
            proxies = proxyList()
            timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
            r = requests.get(url,
                             timeout=timeout,
                             headers=headers,
                             proxies=proxies)
            git = r.json()
            if 'total_commits' in git:
                commits = int(git['total_commits'])
                msg = 'Github: Status [%s] - Ahead [%s] - Behind [%s] - Total Commits [%s]' % (
                    git['status'], git['ahead_by'], git['behind_by'],
                    git['total_commits'])
                logmsg('debug', msg)
            else:
                logmsg(
                    'warn', 'Could not get difference status from GitHub: %s' %
                    str(git))

            if commits > 0:
                for item in git['commits']:
                    commit_list = "%s\n%s" % (item['commit']['message'],
                                              commit_list)
        except Exception as e:
            logmsg(
                'warn', 'Could not get difference status from GitHub: %s' %
                type(e).__name__)

    if commits > 1:
        logmsg('info',
               'New version is available. You are %s commits behind' % commits)
    elif commits == 1:
        logmsg('info', 'New version is available. You are one commit behind')
    elif commits == 0:
        logmsg('info', 'Lazylibrarian is up to date')
    else:
        logmsg(
            'info',
            'Unknown version of lazylibrarian. Run the updater to identify your version'
        )

    return commits, commit_list
示例#14
0
def setSeedRatio(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Setting seed ratio')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        ratio = None
        if result['ratio']:
            ratio = result['ratio']

        if not ratio:
            return True

        post_json = {"method": "core.set_torrent_stop_at_ratio", "params": [result['hash'], True], "id": 5}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        post_json = {"method": "core.set_torrent_stop_ratio", "params": [result['hash'], float(ratio)], "id": 6}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        return not response.json()['error']
    except Exception as err:
        logger.error('Deluge %s: Setting seedratio failed: %s' % (type(err).__name__, str(err)))
        return False
示例#15
0
def removeTorrent(torrentid, remove_data=False):
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)

    try:
        logger.debug('Deluge: Removing torrent %s' % str(torrentid))
        post_json = {
            "method": "core.remove_torrent",
            "params": [torrentid, remove_data],
            "id": 25
        }

        response = requests.post(delugeweb_url,
                                 json=post_json,
                                 cookies=delugeweb_auth,
                                 verify=deluge_verify_cert,
                                 headers=headers,
                                 timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        result = response.json()['result']
        return result
    except Exception as err:
        logger.debug('Deluge: Could not delete torrent %s: %s' %
                     (type(err).__name__, str(err)))
        return False
示例#16
0
def _add_torrent_url(result):
    logger.debug('Deluge: Adding URL')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {
            "method": "core.add_torrent_url",
            "params": [result['url'], {}],
            "id": 32
        }

        response = requests.post(delugeweb_url,
                                 json=post_json,
                                 cookies=delugeweb_auth,
                                 verify=deluge_verify_cert,
                                 headers=headers,
                                 timeout=timeout)

        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        result['hash'] = response.json()['result']
        msg = 'Deluge: Response was %s' % result['hash']
        logger.debug(msg)
        if 'was None' in msg:
            logger.error(
                'Deluge: Adding torrent URL failed: Is the WebUI running?')
        return response.json()['result']
    except Exception as err:
        logger.error('Deluge %s: Adding torrent URL failed: %s' %
                     (type(err).__name__, str(err)))
        return False
示例#17
0
    def __init__(
        self,
        base_url='',  # lazylibrarian.CONFIG['UTORRENT_HOST'],
        username='',  # lazylibrarian.CONFIG['UTORRENT_USER'],
        password='',
    ):  # lazylibrarian.CONFIG['UTORRENT_PASS']):

        host = lazylibrarian.CONFIG['UTORRENT_HOST']
        port = check_int(lazylibrarian.CONFIG['UTORRENT_PORT'], 0)
        if not host or not port:
            logger.error('Invalid Utorrent host or port, check your config')

        if not host.startswith('http'):
            host = 'http://' + host

        if host.endswith('/'):
            host = host[:-1]

        if host.endswith('/gui'):
            host = host[:-4]

        host = "%s:%s" % (host, port)
        self.base_url = host
        self.username = lazylibrarian.CONFIG['UTORRENT_USER']
        self.password = lazylibrarian.CONFIG['UTORRENT_PASS']
        self.opener = self._make_opener('uTorrent', self.base_url,
                                        self.username, self.password)
        self.token = self._get_token()
示例#18
0
def fetchURL(URL, headers=None, retry=True):
    """ Return the result of fetching a URL and True if success
        Otherwise return error message and False
        Allow one retry on timeout by default"""

    if headers is None:
        # some sites insist on having a user-agent, default is to add one
        # if you don't want any headers, send headers=[]
        headers = {'User-Agent': USER_AGENT}
    proxies = proxyList()
    try:
        timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
        r = requests.get(URL, headers=headers, timeout=timeout, proxies=proxies)

        if str(r.status_code).startswith('2'):  # (200 OK etc)
            return r.content, True
        # noinspection PyBroadException
        try:
            # noinspection PyProtectedMember
            msg = requests.status_codes._codes[r.status_code][0]
        except Exception:
            msg = str(r.content)
        return "Response status %s: %s" % (r.status_code, msg), False
    except requests.exceptions.Timeout as e:
        if not retry:
            logger.error(u"fetchURL: Timeout getting response from %s" % URL)
            return "Timeout %s" % str(e), False
        logger.debug(u"fetchURL: retrying - got timeout on %s" % URL)
        result, success = fetchURL(URL, headers=headers, retry=False)
        return result, success
    except Exception as e:
        if hasattr(e, 'reason'):
            return "Exception %s: Reason: %s" % (type(e).__name__, str(e.reason)), False
        return "Exception %s: %s" % (type(e).__name__, str(e)), False
示例#19
0
 def _grFollowAll(self):
     myDB = database.DBConnection()
     cmd = 'SELECT AuthorName,AuthorID,GRfollow FROM authors where '
     cmd += 'Status="Active" or Status="Wanted" or Status="Loading"'
     authors = myDB.select(cmd)
     count = 0
     for author in authors:
         followid = check_int(author['GRfollow'], 0)
         if followid > 0:
             logger.debug('%s is already followed' % author['AuthorName'])
         elif author['GRfollow'] == "0":
             logger.debug('%s is manually unfollowed' %
                          author['AuthorName'])
         else:
             res = grfollow(author['AuthorID'], True)
             if res.startswith('Unable'):
                 logger.warn(res)
             try:
                 followid = res.split("followid=")[1]
                 logger.debug('%s marked followed' % author['AuthorName'])
                 count += 1
             except IndexError:
                 followid = ''
             myDB.action('UPDATE authors SET GRfollow=? WHERE AuthorID=?',
                         (followid, author['AuthorID']))
     self.data = "Added follow to %s author%s" % (count, plural(count))
示例#20
0
def _add_torrent_url(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Adding URL')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {"method": "core.add_torrent_url", "params": [result['url'], {}], "id": 32}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)

        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        result['hash'] = response.json()['result']
        msg = 'Deluge: Response was %s' % result['hash']
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug(msg)
        if 'was None' in msg:
            logger.error('Deluge: Adding torrent URL failed: Is the WebUI running?')
        return response.json()['result']
    except Exception as err:
        logger.error('Deluge %s: Adding torrent URL failed: %s' % (type(err).__name__, str(err)))
        return False
示例#21
0
def setSeedRatio(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Setting seed ratio')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        ratio = None
        if result['ratio']:
            ratio = result['ratio']

        if not ratio:
            return True

        post_json = {"method": "core.set_torrent_stop_at_ratio", "params": [result['hash'], True], "id": 5}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        post_json = {"method": "core.set_torrent_stop_ratio", "params": [result['hash'], float(ratio)], "id": 6}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        return not response.json()['error']
    except Exception as err:
        logger.error('Deluge %s: Setting seedratio failed: %s' % (type(err).__name__, str(err)))
        return False
示例#22
0
def _add_torrent_file(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Adding file')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        # content is torrent file contents that needs to be encoded to base64
        post_json = {"method": "core.add_torrent_file",
                     "params": [result['name'] + '.torrent', b64encode(result['content']), {}],
                     "id": 2}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)

        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        result['hash'] = response.json()['result']
        msg = 'Deluge: Response was %s' % result['hash']
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug(msg)
        if 'was None' in msg:
            logger.error('Deluge: Adding torrent file failed: Is the WebUI running?')
        return response.json()['result']
    except Exception as err:
        logger.error('Deluge %s: Adding torrent file failed: %s' % (type(err).__name__, str(err)))
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            formatted_lines = traceback.format_exc().splitlines()
            logger.debug('; '.join(formatted_lines))
        return False
示例#23
0
    def _notify(message, event, force=False):

        # suppress notifications if the notifier is disabled but the notify options are checked
        if not lazylibrarian.CONFIG['USE_EMAIL'] and not force:
            return False

        subject = event
        text = message
        message = MIMEText(message, 'plain', "utf-8")
        message['Subject'] = subject
        message['From'] = email.utils.formataddr(
            ('LazyLibrarian', lazylibrarian.CONFIG['EMAIL_FROM']))
        message['To'] = lazylibrarian.CONFIG['EMAIL_TO']

        logger.debug('Email notification: %s' % message['Subject'])
        logger.debug('Email from: %s' % message['From'])
        logger.debug('Email to: %s' % message['To'])
        logger.debug('Email text: %s' % text)

        try:
            if lazylibrarian.CONFIG['EMAIL_SSL']:
                mailserver = smtplib.SMTP_SSL(
                    lazylibrarian.CONFIG['EMAIL_SMTP_SERVER'],
                    check_int(lazylibrarian.CONFIG['EMAIL_SMTP_PORT'], 465))
            else:
                mailserver = smtplib.SMTP(
                    lazylibrarian.CONFIG['EMAIL_SMTP_SERVER'],
                    check_int(lazylibrarian.CONFIG['EMAIL_SMTP_PORT'], 25))

            if lazylibrarian.CONFIG['EMAIL_TLS'] == 'True':
                mailserver.starttls()
            else:
                mailserver.ehlo()

            if lazylibrarian.CONFIG['EMAIL_SMTP_USER']:
                mailserver.login(lazylibrarian.CONFIG['EMAIL_SMTP_USER'],
                                 lazylibrarian.CONFIG['EMAIL_SMTP_PASSWORD'])

            mailserver.sendmail(lazylibrarian.CONFIG['EMAIL_FROM'],
                                lazylibrarian.CONFIG['EMAIL_TO'],
                                message.as_string())
            mailserver.quit()
            return True

        except Exception as e:
            logger.warn('Error sending Email: %s' % e)
            return False
示例#24
0
    def _sendAndroidPN(self, title, msg, url, username, broadcast):

        # build up the URL and parameters
        msg = msg.strip()
        if PY2:
            msg = msg.encode(lazylibrarian.SYS_ENCODING)

        data = {
            'action': "send",
            'broadcast': broadcast,
            'uri': "",
            'title': title,
            'username': username,
            'message': msg,
        }
        proxies = proxyList()
        # send the request
        try:
            timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
            r = requests.get(url, params=data, timeout=timeout, proxies=proxies)
            status = str(r.status_code)
            if status.startswith('2'):
                logger.debug("ANDROIDPN: Notification successful.")
                return True

            # HTTP status 404 if the provided email address isn't a AndroidPN user.
            if status == '404':
                logger.warn("ANDROIDPN: Username is wrong/not a AndroidPN email. AndroidPN will send an email to it")
            # For HTTP status code 401's, it is because you are passing in either an
            # invalid token, or the user has not added your service.
            elif status == '401':
                subscribeNote = self._sendAndroidPN(title, msg, url, username, broadcast)
                if subscribeNote:
                    logger.debug("ANDROIDPN: Subscription sent")
                    return True
                else:
                    logger.error("ANDROIDPN: Subscription could not be sent")

            # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
            elif status == '400':
                logger.error("ANDROIDPN: Wrong data sent to AndroidPN")
            else:
                logger.error("ANDROIDPN: Got error code %s" % status)
            return False

        except Exception as e:
            # URLError only returns a reason, not a code. HTTPError gives a code
            # FIXME: Python 2.5 hack, it wrongly reports 201 as an error
            if hasattr(e, 'code') and e.code == 201:
                logger.debug("ANDROIDPN: Notification successful.")
                return True

            # if we get an error back that doesn't have an error code then who knows what's really happening
            if not hasattr(e, 'code'):
                logger.error("ANDROIDPN: Notification failed.")
            else:
                # noinspection PyUnresolvedReferences
                logger.error("ANDROIDPN: Notification failed. Error code: " + str(e.code))
            return False
示例#25
0
def getTorrentStatus(torrentid, data):
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {"method": "web.get_torrent_status",
                     "params": [torrentid, ["total_done", "message", "state"]],
                     "id": 22}
        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(str(response.text))

        try:
            total_done = response.json()['result']['total_done']
            if data[0] == 'progress' and total_done == 0:
                return response.json()
        except KeyError:
            total_done = 0
            pass

        tries = 0
        while total_done == 0 and tries < 10:
            tries += 1
            response = requests.post(delugeweb_url, json=post_json, timeout=timeout,
                                     verify=deluge_verify_cert, cookies=delugeweb_auth, headers=headers)
            try:
                total_done = response.json()['result']['total_done']
            except KeyError:
                pass

        post_json = {"method": "web.get_torrent_status",
                     "params": [torrentid,
                                [
                                    data
                                    # "progress",
                                    # "save_path",
                                    # "total_size",
                                    # "num_files",
                                    # "files",
                                    # "message",
                                    # "tracker",
                                    # "comment"
                                ]
                                ],
                     "id": 23}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)
        return response.json()

    except Exception as err:
        logger.debug('Deluge %s: Could not get torrent info %s: %s' % (data, type(err).__name__, str(err)))
        return False
示例#26
0
def getTorrentStatus(torrentid, data):
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {"method": "web.get_torrent_status",
                     "params": [torrentid, ["total_done", "message", "state"]],
                     "id": 22}
        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(str(response.text))

        try:
            total_done = response.json()['result']['total_done']
            if data[0] == 'progress' and total_done == 0:
                return response.json()
        except KeyError:
            total_done = 0
            pass

        tries = 0
        while total_done == 0 and tries < 10:
            tries += 1
            response = requests.post(delugeweb_url, json=post_json, timeout=timeout,
                                     verify=deluge_verify_cert, cookies=delugeweb_auth, headers=headers)
            try:
                total_done = response.json()['result']['total_done']
            except KeyError:
                pass

        post_json = {"method": "web.get_torrent_status",
                     "params": [torrentid,
                                [
                                    data
                                    # "progress",
                                    # "save_path",
                                    # "total_size",
                                    # "num_files",
                                    # "files",
                                    # "message",
                                    # "tracker",
                                    # "comment"
                                ]
                                ],
                     "id": 23}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)
        return response.json()

    except Exception as err:
        logger.debug('Deluge %s: Could not get torrent info %s: %s' % (data, type(err).__name__, str(err)))
        return False
示例#27
0
    def _sendAndroidPN(self, title, msg, url, username, broadcast):

        # build up the URL and parameters
        msg = msg.strip()
        if PY2:
            msg = msg.encode(lazylibrarian.SYS_ENCODING)

        data = {
            'action': "send",
            'broadcast': broadcast,
            'uri': "",
            'title': title,
            'username': username,
            'message': msg,
        }
        proxies = proxyList()
        # send the request
        try:
            timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
            r = requests.get(url, params=data, timeout=timeout, proxies=proxies)
            status = str(r.status_code)
            if status.startswith('2'):
                logger.debug("ANDROIDPN: Notification successful.")
                return True

            # HTTP status 404 if the provided email address isn't a AndroidPN user.
            if status == '404':
                logger.warn("ANDROIDPN: Username is wrong/not a AndroidPN email. AndroidPN will send an email to it")
            # For HTTP status code 401's, it is because you are passing in either an
            # invalid token, or the user has not added your service.
            elif status == '401':
                subscribeNote = self._sendAndroidPN(title, msg, url, username, broadcast)
                if subscribeNote:
                    logger.debug("ANDROIDPN: Subscription sent")
                    return True
                else:
                    logger.error("ANDROIDPN: Subscription could not be sent")

            # If you receive an HTTP status code of 400, it is because you failed to send the proper parameters
            elif status == '400':
                logger.error("ANDROIDPN: Wrong data sent to AndroidPN")
            else:
                logger.error("ANDROIDPN: Got error code %s" % status)
            return False

        except Exception as e:
            # URLError only returns a reason, not a code. HTTPError gives a code
            # FIXME: Python 2.5 hack, it wrongly reports 201 as an error
            if hasattr(e, 'code') and e.code == 201:
                logger.debug("ANDROIDPN: Notification successful.")
                return True

            # if we get an error back that doesn't have an error code then who knows what's really happening
            if not hasattr(e, 'code'):
                logger.error("ANDROIDPN: Notification failed.")
            else:
                # noinspection PyUnresolvedReferences
                logger.error("ANDROIDPN: Notification failed. Error code: " + str(e.code))
            return False
示例#28
0
def setTorrentPath(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Setting download path')
    if not any(delugeweb_auth):
        _get_auth()

    dl_dir = lazylibrarian.CONFIG['DELUGE_DIR']

    if not dl_dir:
        return True

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {
            "method": "core.set_torrent_move_completed",
            "params": [result['hash'], True],
            "id": 7
        }

        response = requests.post(delugeweb_url,
                                 json=post_json,
                                 cookies=delugeweb_auth,
                                 verify=deluge_verify_cert,
                                 headers=headers,
                                 timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        if not os.path.isdir(dl_dir):
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug(
                    'Deluge: %s directory doesn\'t exist, let\'s create it' %
                    dl_dir)
            _ = mymakedirs(dl_dir)

        post_json = {
            "method": "core.set_torrent_move_completed_path",
            "params": [result['hash'], dl_dir],
            "id": 8
        }

        response = requests.post(delugeweb_url,
                                 json=post_json,
                                 cookies=delugeweb_auth,
                                 verify=deluge_verify_cert,
                                 headers=headers,
                                 timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        return not response.json()['error']
    except Exception as err:
        logger.error('Deluge %s: setTorrentPath failed: %s' %
                     (type(err).__name__, str(err)))
        return False
示例#29
0
    def _Magazine(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        if 'magid' not in kwargs:
            self.data = self._error_with_message('No Magazine Provided')
            return
        links = []
        entries = []
        title = ''
        cmd = "SELECT Title,IssueID,IssueDate,IssueAcquired,IssueFile from issues "
        cmd += "WHERE Title='%s' order by IssueDate DESC"
        results = myDB.select(cmd % kwargs['magid'])
        page = results[index:(index + self.PAGE_SIZE)]
        for issue in page:
            title = makeUnicode(issue['Title'])
            entry = {'title': escape('%s (%s)' % (title, issue['IssueDate'])),
                     'id': escape('issue:%s' % issue['IssueID']),
                     'updated': opdstime(issue['IssueAcquired']),
                     'content': escape('%s - %s' % (title, issue['IssueDate'])),
                     'href': '%s?cmd=Serve&amp;issueid=%s' % (self.opdsroot, quote_plus(issue['IssueID'])),
                     'kind': 'acquisition',
                     'rel': 'file',
                     'type': mimeType(issue['IssueFile'])}
            if lazylibrarian.CONFIG['OPDS_METAINFO']:
                fname = os.path.splitext(issue['IssueFile'])[0]
                res = cache_img('magazine', issue['IssueID'], fname + '.jpg')
                entry['image'] = self.searchroot + '/' + res[0]
            entries.append(entry)

        feed = {}
        title = '%s (%s)' % (escape(title), len(entries))
        feed['title'] = 'LazyLibrarian OPDS - %s' % title
        feed['id'] = 'magazine:%s' % escape(kwargs['magid'])
        feed['updated'] = now()
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Magazine&amp;magid=%s' % (self.opdsroot, quote_plus(kwargs['magid'])),
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Magazine&amp;magid=%s&amp;index=%s' % (self.opdsroot, quote_plus(kwargs['magid']),
                                                                            index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Magazine&amp;magid=%s&amp;index=%s' % (self.opdsroot, quote_plus(kwargs['magid']),
                                                                            index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s issue%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#30
0
    def _Magazine(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        if 'magid' not in kwargs:
            self.data = self._error_with_message('No Magazine Provided')
            return
        links = []
        entries = []
        title = ''
        cmd = "SELECT Title,IssueID,IssueDate,IssueAcquired,IssueFile from issues "
        cmd += "WHERE Title='%s' order by IssueDate DESC"
        results = myDB.select(cmd % kwargs['magid'])
        page = results[index:(index + self.PAGE_SIZE)]
        for issue in page:
            title = makeUnicode(issue['Title'])
            entry = {'title': escape('%s (%s)' % (title, issue['IssueDate'])),
                     'id': escape('issue:%s' % issue['IssueID']),
                     'updated': opdstime(issue['IssueAcquired']),
                     'content': escape('%s - %s' % (title, issue['IssueDate'])),
                     'href': '%s?cmd=Serve&amp;issueid=%s' % (self.opdsroot, quote_plus(issue['IssueID'])),
                     'kind': 'acquisition',
                     'rel': 'file',
                     'type': mimeType(issue['IssueFile'])}
            if lazylibrarian.CONFIG['OPDS_METAINFO']:
                fname = os.path.splitext(issue['IssueFile'])[0]
                res = cache_img('magazine', issue['IssueID'], fname + '.jpg')
                entry['image'] = self.searchroot + '/' + res[0]
            entries.append(entry)

        feed = {}
        title = '%s (%s)' % (escape(title), len(entries))
        feed['title'] = 'LazyLibrarian OPDS - %s' % title
        feed['id'] = 'magazine:%s' % escape(kwargs['magid'])
        feed['updated'] = now()
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Magazine&amp;magid=%s' % (self.opdsroot, quote_plus(kwargs['magid'])),
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Magazine&amp;magid=%s&amp;index=%s' % (self.opdsroot, quote_plus(kwargs['magid']),
                                                                            index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Magazine&amp;magid=%s&amp;index=%s' % (self.opdsroot, quote_plus(kwargs['magid']),
                                                                            index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s issue%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#31
0
    def _RecentAudio(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Recent AudioBooks', 'id': 'Recent AudioBooks', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=RecentAudio' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchbooks.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Books'))

        cmd = "select BookName,BookID,AudioLibrary,BookDate,BookImg,BookDesc,BookAdded,AuthorID from books WHERE "
        if 'query' in kwargs:
            cmd += "BookName LIKE '%" + kwargs['query'] + "%' AND "
        cmd += "AudioStatus='Open' order by AudioLibrary DESC"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for book in page:
            title = makeUnicode(book['BookName'])
            entry = {'title': escape(title),
                     'id': escape('audio:%s' % book['BookID']),
                     'updated': opdstime(book['AudioLibrary']),
                     'href': '%s?cmd=Serve&amp;audioid=%s' % (self.opdsroot, quote_plus(book['BookID'])),
                     'kind': 'acquisition',
                     'rel': 'file',
                     'type': mimeType("we_send.zip")}
            if lazylibrarian.CONFIG['OPDS_METAINFO']:
                author = myDB.match("SELECT AuthorName from authors WHERE AuthorID='%s'" % book['AuthorID'])
                author = makeUnicode(author['AuthorName'])
                entry['image'] = self.searchroot + '/' + book['BookImg']
                entry['content'] = escape('%s - %s' % (title, book['BookDesc']))
                entry['author'] = escape('%s' % author)
            else:
                entry['content'] = escape('%s (%s)' % (title, book['BookAdded']))
            entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=RecentAudio&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=RecentAudio&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s result%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#32
0
    def _RecentAudio(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Recent AudioBooks', 'id': 'Recent AudioBooks', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=RecentAudio' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchbooks.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Books'))

        cmd = "select BookName,BookID,AudioLibrary,BookDate,BookImg,BookDesc,BookAdded,AuthorID from books WHERE "
        if 'query' in kwargs:
            cmd += "BookName LIKE '%" + kwargs['query'] + "%' AND "
        cmd += "AudioStatus='Open' order by AudioLibrary DESC"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for book in page:
            title = makeUnicode(book['BookName'])
            entry = {'title': escape(title),
                     'id': escape('audio:%s' % book['BookID']),
                     'updated': opdstime(book['AudioLibrary']),
                     'href': '%s?cmd=Serve&amp;audioid=%s' % (self.opdsroot, quote_plus(book['BookID'])),
                     'kind': 'acquisition',
                     'rel': 'file',
                     'type': mimeType("we_send.zip")}
            if lazylibrarian.CONFIG['OPDS_METAINFO']:
                author = myDB.match("SELECT AuthorName from authors WHERE AuthorID='%s'" % book['AuthorID'])
                author = makeUnicode(author['AuthorName'])
                entry['image'] = self.searchroot + '/' + book['BookImg']
                entry['content'] = escape('%s - %s' % (title, book['BookDesc']))
                entry['author'] = escape('%s' % author)
            else:
                entry['content'] = escape('%s (%s)' % (title, book['BookAdded']))
            entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=RecentAudio&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=RecentAudio&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s result%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#33
0
def _hostURL():
    # Build webapi_url from config settings
    host = lazylibrarian.CONFIG['SYNOLOGY_HOST']
    port = check_int(lazylibrarian.CONFIG['SYNOLOGY_PORT'], 0)
    if not host or not port:
        logger.debug("Invalid Synology host or port, check your config")
        return False
    if not host.startswith("http://") and not host.startswith("https://"):
        host = 'http://' + host
    if host.endswith('/'):
        host = host[:-1]
    return "%s:%s/webapi/" % (host, port)
示例#34
0
    def _Magazines(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Magazines', 'id': 'Magazines', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Magazines' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchmagazines.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Magazines'))
        cmd = 'select magazines.*,(select count(*) as counter from issues where magazines.title = issues.title)'
        cmd += ' as Iss_Cnt from magazines '
        if 'query' in kwargs:
            cmd += "WHERE magazines.title LIKE '%" + kwargs['query'] + "%' "
        cmd += 'order by magazines.title'
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for mag in page:
            if mag['Iss_Cnt'] > 0:
                title = makeUnicode(mag['Title'])
                entry = {
                    'title': escape('%s (%s)' % (title, mag['Iss_Cnt'])),
                    'id': escape('magazine:%s' % title),
                    'updated': opdstime(mag['LastAcquired']),
                    'content': escape('%s' % title),
                    'href': '%s?cmd=Magazine&amp;magid=%s' % (self.opdsroot, quote_plus(title)),
                    'kind': 'navigation',
                    'rel': 'subsection',
                }
                # disabled cover image as it stops navigation?
                # if lazylibrarian.CONFIG['OPDS_METAINFO']:
                #     res = cache_img('magazine', md5_utf8(mag['LatestCover']), mag['LatestCover'], refresh=True)
                #     entry['image'] = self.searchroot + '/' + res[0]
                entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Magazines&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Magazines&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s magazine%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#35
0
def _hostURL():
    # Build webapi_url from config settings
    host = lazylibrarian.CONFIG['SYNOLOGY_HOST']
    port = check_int(lazylibrarian.CONFIG['SYNOLOGY_PORT'], 0)
    if not host or not port:
        logger.debug("Invalid Synology host or port, check your config")
        return False
    if not host.startswith("http://") and not host.startswith("https://"):
        host = 'http://' + host
    if host.endswith('/'):
        host = host[:-1]
    return "%s:%s/webapi/" % (host, port)
示例#36
0
    def _RecentMags(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Recent Magazines', 'id': 'Recent Magazines', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=RecentMags' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchmagazines.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Magazines'))
        cmd = "select Title,IssueID,IssueAcquired,IssueDate,IssueFile from issues "
        cmd += "where IssueFile != '' "
        if 'query' in kwargs:
            cmd += "AND Title LIKE '%" + kwargs['query'] + "%' "
        cmd += "order by IssueAcquired DESC"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for mag in page:
            title = makeUnicode(mag['Title'])
            entry = {'title': escape('%s' % mag['IssueDate']),
                     'id': escape('issue:%s' % mag['IssueID']),
                     'updated': opdstime(mag['IssueAcquired']),
                     'content': escape('%s - %s' % (title, mag['IssueDate'])),
                     'href': '%s?cmd=Serve&amp;issueid=%s' % (self.opdsroot, quote_plus(mag['IssueID'])),
                     'kind': 'acquisition',
                     'rel': 'file',
                     'author': escape(title),
                     'type': mimeType(mag['IssueFile'])}
            if lazylibrarian.CONFIG['OPDS_METAINFO']:
                fname = os.path.splitext(mag['IssueFile'])[0]
                res = cache_img('magazine', mag['IssueID'], fname + '.jpg')
                entry['image'] = self.searchroot + '/' + res[0]
            entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=RecentMags&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=RecentMags&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s issue%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#37
0
    def _RecentMags(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Recent Magazines', 'id': 'Recent Magazines', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=RecentMags' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchmagazines.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Magazines'))
        cmd = "select Title,IssueID,IssueAcquired,IssueDate,IssueFile from issues "
        cmd += "where IssueFile != '' "
        if 'query' in kwargs:
            cmd += "AND Title LIKE '%" + kwargs['query'] + "%' "
        cmd += "order by IssueAcquired DESC"
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for mag in page:
            title = makeUnicode(mag['Title'])
            entry = {'title': escape('%s' % mag['IssueDate']),
                     'id': escape('issue:%s' % mag['IssueID']),
                     'updated': opdstime(mag['IssueAcquired']),
                     'content': escape('%s - %s' % (title, mag['IssueDate'])),
                     'href': '%s?cmd=Serve&amp;issueid=%s' % (self.opdsroot, quote_plus(mag['IssueID'])),
                     'kind': 'acquisition',
                     'rel': 'file',
                     'author': escape(title),
                     'type': mimeType(mag['IssueFile'])}
            if lazylibrarian.CONFIG['OPDS_METAINFO']:
                fname = os.path.splitext(mag['IssueFile'])[0]
                res = cache_img('magazine', mag['IssueID'], fname + '.jpg')
                entry['image'] = self.searchroot + '/' + res[0]
            entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=RecentMags&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=RecentMags&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s issue%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#38
0
    def _Magazines(self, **kwargs):
        index = 0
        if 'index' in kwargs:
            index = check_int(kwargs['index'], 0)
        myDB = database.DBConnection()
        feed = {'title': 'LazyLibrarian OPDS - Magazines', 'id': 'Magazines', 'updated': now()}
        links = []
        entries = []
        links.append(getLink(href=self.opdsroot, ftype='application/atom+xml; profile=opds-catalog; kind=navigation',
                             rel='start', title='Home'))
        links.append(getLink(href='%s?cmd=Magazines' % self.opdsroot,
                             ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='self'))
        links.append(getLink(href='%s/opensearchmagazines.xml' % self.searchroot,
                             ftype='application/opensearchdescription+xml', rel='search', title='Search Magazines'))
        cmd = 'select magazines.*,(select count(*) as counter from issues where magazines.title = issues.title)'
        cmd += ' as Iss_Cnt from magazines '
        if 'query' in kwargs:
            cmd += "WHERE magazines.title LIKE '%" + kwargs['query'] + "%' "
        cmd += 'order by magazines.title'
        results = myDB.select(cmd)
        page = results[index:(index + self.PAGE_SIZE)]
        for mag in page:
            if mag['Iss_Cnt'] > 0:
                title = makeUnicode(mag['Title'])
                entry = {
                    'title': escape('%s (%s)' % (title, mag['Iss_Cnt'])),
                    'id': escape('magazine:%s' % title),
                    'updated': opdstime(mag['LastAcquired']),
                    'content': escape('%s' % title),
                    'href': '%s?cmd=Magazine&amp;magid=%s' % (self.opdsroot, quote_plus(title)),
                    'kind': 'navigation',
                    'rel': 'subsection',
                }
                if lazylibrarian.CONFIG['OPDS_METAINFO']:
                    res = cache_img('magazine', md5_utf8(mag['LatestCover']), mag['LatestCover'], refresh=True)
                    entry['image'] = self.searchroot + '/' + res[0]
                entries.append(entry)

        if len(results) > (index + self.PAGE_SIZE):
            links.append(
                getLink(href='%s?cmd=Magazines&amp;index=%s' % (self.opdsroot, index + self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='next'))
        if index >= self.PAGE_SIZE:
            links.append(
                getLink(href='%s?cmd=Magazines&amp;index=%s' % (self.opdsroot, index - self.PAGE_SIZE),
                        ftype='application/atom+xml; profile=opds-catalog; kind=navigation', rel='previous'))

        feed['links'] = links
        feed['entries'] = entries
        logger.debug("Returning %s magazine%s" % (len(entries), plural(len(entries))))
        self.data = feed
        return
示例#39
0
def setSeries(serieslist=None, bookid=None):
    """ set series details in series/member tables from the supplied dict
        and a displayable summary in book table
        serieslist is a tuple (SeriesID, SeriesNum, SeriesName) """
    myDB = database.DBConnection()
    if bookid:
        # delete any old series-member entries
        myDB.action('DELETE from member WHERE BookID=?', (bookid, ))
        for item in serieslist:
            match = myDB.match(
                'SELECT SeriesID from series where SeriesName=? COLLATE NOCASE',
                (item[2], ))
            if match:
                seriesid = match['SeriesID']
            else:
                # new series, need to set status and get SeriesID
                if item[0]:
                    seriesid = item[0]
                else:
                    # no seriesid so generate it (row count + 1)
                    cnt = myDB.match("select count(*) as counter from series")
                    res = check_int(cnt['counter'], 0)
                    seriesid = str(res + 1)
                myDB.action('INSERT into series VALUES (?, ?, ?, ?, ?)',
                            (seriesid, item[2], "Active", 0, 0),
                            suppress='UNIQUE')
                # don't ask what other books are in the series - leave for user to query if series wanted
                # _ = getSeriesMembers(match['SeriesID'])
            book = myDB.match(
                'SELECT AuthorID,WorkID from books where BookID=?', (bookid, ))
            if seriesid and book:
                controlValueDict = {"BookID": bookid, "SeriesID": seriesid}
                newValueDict = {"SeriesNum": item[1], "WorkID": book['WorkID']}
                myDB.upsert("member", newValueDict, controlValueDict)
                myDB.action(
                    'INSERT INTO seriesauthors ("SeriesID", "AuthorID") VALUES (?, ?)',
                    (seriesid, book['AuthorID']),
                    suppress='UNIQUE')
            else:
                logger.debug('Unable to set series for book %s, %s' %
                             (bookid, item))

        series = ''
        for item in serieslist:
            newseries = "%s %s" % (item[2], item[1])
            newseries.strip()
            if series and newseries:
                series += '<br>'
            series += newseries
        myDB.action('UPDATE books SET SeriesDisplay=? WHERE BookID=?',
                    (series, bookid))
示例#40
0
def getLatestVersion_FromGit():
    # Don't call directly, use getLatestVersion as wrapper.
    # Also removed reference to global variable setting.
    latest_version = 'Unknown'

    # Can only work for non Windows driven installs, so check install type
    if lazylibrarian.CONFIG['INSTALL_TYPE'] == 'win':
        logmsg('debug', '(getLatestVersion_FromGit) Error - should not be called under a windows install')
        latest_version = 'WINDOWS INSTALL'
    else:
        # check current branch value of the local git repo as folks may pull from a branch not master
        branch = lazylibrarian.CONFIG['GIT_BRANCH']

        if branch == 'InvalidBranch':
            logmsg('debug', '(getLatestVersion_FromGit) - Failed to get a valid branch name from local repo')
        else:
            if branch == 'Package':  # check packages against master
                branch = 'master'
            # Get the latest commit available from github
            url = 'https://api.github.com/repos/%s/%s/commits/%s' % (
                lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['GIT_REPO'], branch)
            logmsg('debug',
                   '(getLatestVersion_FromGit) Retrieving latest version information from github command=[%s]' % url)

            age = lazylibrarian.CONFIG['GIT_UPDATED']
            try:
                headers = {'User-Agent': USER_AGENT}
                if age:
                    logmsg('debug', '(getLatestVersion_FromGit) Checking if modified since %s' % age)
                    headers.update({'If-Modified-Since': age})
                proxies = proxyList()
                timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
                r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies)

                if str(r.status_code).startswith('2'):
                    git = r.json()
                    latest_version = git['sha']
                    logmsg('debug', '(getLatestVersion_FromGit) Branch [%s] Latest Version has been set to [%s]' % (
                        branch, latest_version))
                elif str(r.status_code) == '304':
                    latest_version = lazylibrarian.CONFIG['CURRENT_VERSION']
                    logmsg('debug', '(getLatestVersion_FromGit) Not modified, currently on Latest Version')
            except Exception as e:
                logmsg('warn', '(getLatestVersion_FromGit) Could not get the latest commit from github')
                logmsg('debug', 'git %s for %s: %s' % (type(e).__name__, url, str(e)))
                latest_version = 'Not_Available_From_GitHUB'

    return latest_version
示例#41
0
def BlockProvider(who, why):
    delay = check_int(lazylibrarian.CONFIG['BLOCKLIST_TIMER'], 3600)
    if delay == 0:
        logger.debug('Not blocking %s,%s as timer is zero' % (who, why))
    else:
        mins = int(delay / 60) + (delay % 60 > 0)
        logger.info("Blocking provider %s for %s minutes because %s" %
                    (who, mins, why))
        timenow = int(time.time())
        for entry in lazylibrarian.PROVIDER_BLOCKLIST:
            if entry["name"] == who:
                lazylibrarian.PROVIDER_BLOCKLIST.remove(entry)
        newentry = {"name": who, "resume": timenow + delay, "reason": why}
        lazylibrarian.PROVIDER_BLOCKLIST.append(newentry)
    logger.debug("Provider Blocklist contains %s entries" %
                 len(lazylibrarian.PROVIDER_BLOCKLIST))
示例#42
0
def BlockProvider(who, why):
    delay = check_int(lazylibrarian.CONFIG['BLOCKLIST_TIMER'], 3600)
    if len(why) > 40:
        why = why[:40] + '...'
    if delay == 0:
        logger.debug('Not blocking %s,%s as timer is zero' % (who, why))
    else:
        mins = int(delay / 60) + (delay % 60 > 0)
        logger.info("Blocking provider %s for %s minutes because %s" % (who, mins, why))
        timenow = int(time.time())
        for entry in lazylibrarian.PROVIDER_BLOCKLIST:
            if entry["name"] == who:
                lazylibrarian.PROVIDER_BLOCKLIST.remove(entry)
        newentry = {"name": who, "resume": timenow + delay, "reason": why}
        lazylibrarian.PROVIDER_BLOCKLIST.append(newentry)
    logger.debug("Provider Blocklist contains %s entries" % len(lazylibrarian.PROVIDER_BLOCKLIST))
示例#43
0
def getCommitDifferenceFromGit():
    # See how many commits behind we are
    # Takes current latest version value and tries to diff it with the latest version in the current branch.
    commit_list = ''
    commits = -1
    if lazylibrarian.CONFIG['LATEST_VERSION'] == 'Not_Available_From_GitHUB':
        commits = 0  # don't report a commit diff as we don't know anything
        commit_list = 'Unable to get latest version from GitHub'
        logmsg('info', commit_list)
    elif lazylibrarian.CONFIG['CURRENT_VERSION'] and commits != 0:
        url = 'https://api.github.com/repos/%s/LazyLibrarian/compare/%s...%s' % (
            lazylibrarian.CONFIG['GIT_USER'], lazylibrarian.CONFIG['CURRENT_VERSION'],
            lazylibrarian.CONFIG['LATEST_VERSION'])
        logmsg('debug', 'Check for differences between local & repo by [%s]' % url)

        try:
            headers = {'User-Agent': USER_AGENT}
            proxies = proxyList()
            timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
            r = requests.get(url, timeout=timeout, headers=headers, proxies=proxies)
            git = r.json()
            if 'total_commits' in git:
                commits = int(git['total_commits'])
                msg = 'Github: Status [%s] - Ahead [%s] - Behind [%s] - Total Commits [%s]' % (
                    git['status'], git['ahead_by'], git['behind_by'], git['total_commits'])
                logmsg('debug', msg)
            else:
                logmsg('warn', 'Could not get difference status from GitHub: %s' % str(git))

            if commits > 0:
                for item in git['commits']:
                    commit_list = "%s\n%s" % (item['commit']['message'], commit_list)
        except Exception as e:
            logmsg('warn', 'Could not get difference status from GitHub: %s' % type(e).__name__)

    if commits > 1:
        logmsg('info', 'New version is available. You are %s commits behind' % commits)
    elif commits == 1:
        logmsg('info', 'New version is available. You are one commit behind')
    elif commits == 0:
        logmsg('info', 'Lazylibrarian is up to date')
    else:
        logmsg('info', 'Unknown version of lazylibrarian. Run the updater to identify your version')

    return commits, commit_list
示例#44
0
    def log(message, level):

        logger = logging.getLogger('lazylibrarian')

        threadname = threading.currentThread().getName()

        # Get the frame data of the method that made the original logger call
        if len(inspect.stack()) > 2:
            frame = inspect.getframeinfo(inspect.stack()[2][0])
            program = os.path.basename(frame.filename)
            method = frame.function
            lineno = frame.lineno
        else:
            program = ""
            method = ""
            lineno = ""

        if 'windows' in platform.system().lower(
        ):  # windows cp1252 can't handle some accents
            message = formatter.unaccented(message)
        elif PY2:
            message = formatter.safe_unicode(message)
            message = message.encode(lazylibrarian.SYS_ENCODING)

        if level != 'DEBUG' or lazylibrarian.LOGLEVEL >= 2:
            # Limit the size of the "in-memory" log, as gets slow if too long
            lazylibrarian.LOGLIST.insert(0,
                                         (formatter.now(), level, threadname,
                                          program, method, lineno, message))
            if len(lazylibrarian.LOGLIST) > formatter.check_int(
                    lazylibrarian.CONFIG['LOGLIMIT'], 500):
                del lazylibrarian.LOGLIST[-1]

        message = "%s : %s:%s:%s : %s" % (threadname, program, method, lineno,
                                          message)

        if level == 'DEBUG':
            logger.debug(message)
        elif level == 'INFO':
            logger.info(message)
        elif level == 'WARNING':
            logger.warning(message)
        else:
            logger.error(message)
示例#45
0
def thingLang(isbn):
    # try searching librarything for a language code using the isbn
    # if no language found, librarything return value is "invalid" or "unknown"
    # librarything returns plain text, not xml
    BOOK_URL = 'http://www.librarything.com/api/thingLang.php?isbn=' + isbn
    proxies = proxyList()
    booklang = ''
    try:
        librarything_wait()
        timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
        r = requests.get(BOOK_URL, timeout=timeout, proxies=proxies)
        resp = r.text
        logger.debug("LibraryThing reports language [%s] for %s" % (resp, isbn))
        if 'invalid' not in resp and 'unknown' not in resp and '<' not in resp:
            booklang = resp
    except Exception as e:
        logger.error("%s finding language: %s" % (type(e).__name__, str(e)))
    finally:
        return booklang
示例#46
0
def setSeries(serieslist=None, bookid=None):
    """ set series details in series/member tables from the supplied dict
        and a displayable summary in book table
        serieslist is a tuple (SeriesID, SeriesNum, SeriesName) """
    myDB = database.DBConnection()
    if bookid:
        # delete any old series-member entries
        myDB.action('DELETE from member WHERE BookID=?', (bookid,))
        for item in serieslist:
            match = myDB.match('SELECT SeriesID from series where SeriesName=? COLLATE NOCASE', (item[2],))
            if match:
                seriesid = match['SeriesID']
            else:
                # new series, need to set status and get SeriesID
                if item[0]:
                    seriesid = item[0]
                else:
                    # no seriesid so generate it (row count + 1)
                    cnt = myDB.match("select count(*) as counter from series")
                    res = check_int(cnt['counter'], 0)
                    seriesid = str(res + 1)
                myDB.action('INSERT into series VALUES (?, ?, ?, ?, ?)',
                            (seriesid, item[2], "Active", 0, 0), suppress='UNIQUE')
                # don't ask what other books are in the series - leave for user to query if series wanted
                # _ = getSeriesMembers(match['SeriesID'])
            book = myDB.match('SELECT AuthorID,WorkID from books where BookID=?', (bookid,))
            if seriesid and book:
                controlValueDict = {"BookID": bookid, "SeriesID": seriesid}
                newValueDict = {"SeriesNum": item[1], "WorkID": book['WorkID']}
                myDB.upsert("member", newValueDict, controlValueDict)
                myDB.action('INSERT INTO seriesauthors ("SeriesID", "AuthorID") VALUES (?, ?)',
                            (seriesid, book['AuthorID']), suppress='UNIQUE')
            else:
                logger.debug('Unable to set series for book %s, %s' % (bookid, item))

        series = ''
        for item in serieslist:
            newseries = "%s %s" % (item[2], item[1])
            newseries.strip()
            if series and newseries:
                series += '<br>'
            series += newseries
        myDB.action('UPDATE books SET SeriesDisplay=? WHERE BookID=?', (series, bookid))
示例#47
0
def thingLang(isbn):
    # try searching librarything for a language code using the isbn
    # if no language found, librarything return value is "invalid" or "unknown"
    # librarything returns plain text, not xml
    BOOK_URL = 'http://www.librarything.com/api/thingLang.php?isbn=' + isbn
    proxies = proxyList()
    booklang = ''
    try:
        librarything_wait()
        timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
        r = requests.get(BOOK_URL, timeout=timeout, proxies=proxies)
        resp = r.text
        logger.debug("LibraryThing reports language [%s] for %s" % (resp, isbn))
        if resp != 'invalid' and resp != 'unknown':
            booklang = resp
    except Exception as e:
        logger.error("%s finding language: %s" % (type(e).__name__, str(e)))
    finally:
        return booklang
示例#48
0
def setTorrentPause(result):
    logger.debug('Deluge: Pausing torrent')
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {"method": "core.pause_torrent", "params": [[result['hash']]], "id": 9}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        return not response.json()['error']
    except Exception as err:
        logger.error('Deluge %s: setTorrentPause failed: %s' % (type(err).__name__, str(err)))
        return False
示例#49
0
def removeTorrent(torrentid, remove_data=False):
    if not any(delugeweb_auth):
        _get_auth()

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)

    try:
        logger.debug('Deluge: Removing torrent %s' % str(torrentid))
        post_json = {"method": "core.remove_torrent", "params": [torrentid, remove_data], "id": 25}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        result = response.json()['result']
        return result
    except Exception as err:
        logger.debug('Deluge: Could not delete torrent %s: %s' % (type(err).__name__, str(err)))
        return False
示例#50
0
    def log(message, level):

        logger = logging.getLogger('lazylibrarian')

        threadname = threading.currentThread().getName()

        # Get the frame data of the method that made the original logger call
        if len(inspect.stack()) > 2:
            frame = inspect.getframeinfo(inspect.stack()[2][0])
            program = os.path.basename(frame.filename)
            method = frame.function
            lineno = frame.lineno
        else:
            program = ""
            method = ""
            lineno = ""

        if 'windows' in platform.system().lower():  # windows cp1252 can't handle some accents
            message = formatter.unaccented(message)
        elif PY2:
            message = formatter.safe_unicode(message)
            message = message.encode(lazylibrarian.SYS_ENCODING)

        if level != 'DEBUG' or lazylibrarian.LOGLEVEL >= 2:
            # Limit the size of the "in-memory" log, as gets slow if too long
            lazylibrarian.LOGLIST.insert(0, (formatter.now(), level, threadname, program, method, lineno, message))
            if len(lazylibrarian.LOGLIST) > formatter.check_int(lazylibrarian.CONFIG['LOGLIMIT'], 500):
                del lazylibrarian.LOGLIST[-1]

        message = "%s : %s:%s:%s : %s" % (threadname, program, method, lineno, message)

        if level == 'DEBUG':
            logger.debug(message)
        elif level == 'INFO':
            logger.info(message)
        elif level == 'WARNING':
            logger.warning(message)
        else:
            logger.error(message)
示例#51
0
def setTorrentPath(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Setting download path')
    if not any(delugeweb_auth):
        _get_auth()

    dl_dir = lazylibrarian.CONFIG['DELUGE_DIR']

    if not dl_dir:
        return True

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        post_json = {"method": "core.set_torrent_move_completed", "params": [result['hash'], True], "id": 7}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        if not os.path.isdir(dl_dir):
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug('Deluge: %s directory doesn\'t exist, let\'s create it' % dl_dir)
            _ = mymakedirs(dl_dir)

        post_json = {"method": "core.set_torrent_move_completed_path", "params": [result['hash'], dl_dir], "id": 8}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        return not response.json()['error']
    except Exception as err:
        logger.error('Deluge %s: setTorrentPath failed: %s' % (type(err).__name__, str(err)))
        return False
示例#52
0
    def __init__(self, base_url='',  # lazylibrarian.CONFIG['UTORRENT_HOST'],
                 username='',  # lazylibrarian.CONFIG['UTORRENT_USER'],
                 password='',):  # lazylibrarian.CONFIG['UTORRENT_PASS']):

        host = lazylibrarian.CONFIG['UTORRENT_HOST']
        port = check_int(lazylibrarian.CONFIG['UTORRENT_PORT'], 0)
        if not host or not port:
            logger.error('Invalid Utorrent host or port, check your config')

        if not host.startswith("http://") and not host.startswith("https://"):
            host = 'http://' + host

        if host.endswith('/'):
            host = host[:-1]

        if host.endswith('/gui'):
            host = host[:-4]

        host = "%s:%s" % (host, port)
        self.base_url = host
        self.username = lazylibrarian.CONFIG['UTORRENT_USER']
        self.password = lazylibrarian.CONFIG['UTORRENT_PASS']
        self.opener = self._make_opener('uTorrent', self.base_url, self.username, self.password)
        self.token = self._get_token()
示例#53
0
    def __init__(self):

        host = lazylibrarian.CONFIG['QBITTORRENT_HOST']
        port = check_int(lazylibrarian.CONFIG['QBITTORRENT_PORT'], 0)
        if not host or not port:
            logger.error('Invalid Qbittorrent host or port, check your config')

        if not host.startswith("http://") and not host.startswith("https://"):
            host = 'http://' + host

        if host.endswith('/'):
            host = host[:-1]

        if host.endswith('/gui'):
            host = host[:-4]

        host = "%s:%s" % (host, port)
        self.base_url = host
        self.username = lazylibrarian.CONFIG['QBITTORRENT_USER']
        self.password = lazylibrarian.CONFIG['QBITTORRENT_PASS']
        self.cookiejar = http_cookiejar.CookieJar()
        self.opener = self._make_opener()
        self._get_sid(self.base_url, self.username, self.password)
        self.api = self._api_version()
示例#54
0
def SABnzbd(title=None, nzburl=None, remove_data=False):

    if nzburl in ['delete', 'delhistory'] and title == 'unknown':
        logger.debug('%s function unavailable in this version of sabnzbd, no nzo_ids' % nzburl)
        return False

    hostname = lazylibrarian.CONFIG['SAB_HOST']
    port = check_int(lazylibrarian.CONFIG['SAB_PORT'], 0)
    if not hostname or not port:
        logger.error('Invalid sabnzbd host or port, check your config')
        return False

    if hostname.endswith('/'):
        hostname = hostname[:-1]
    if not hostname.startswith("http://") and not hostname.startswith("https://"):
        hostname = 'http://' + hostname

    HOST = "%s:%s" % (hostname, port)

    if lazylibrarian.CONFIG['SAB_SUBDIR']:
        HOST = HOST + "/" + lazylibrarian.CONFIG['SAB_SUBDIR']

    params = {}
    if nzburl == 'auth' or nzburl == 'get_cats':
        # connection test, check auth mode or get_cats
        params['mode'] = nzburl
        params['output'] = 'json'
        if lazylibrarian.CONFIG['SAB_API']:
            params['apikey'] = lazylibrarian.CONFIG['SAB_API']
        title = 'LL.(%s)' % nzburl
    elif nzburl == 'queue':
        params['mode'] = 'queue'
        params['output'] = 'json'
        if lazylibrarian.CONFIG['SAB_USER']:
            params['ma_username'] = lazylibrarian.CONFIG['SAB_USER']
        if lazylibrarian.CONFIG['SAB_PASS']:
            params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS']
        if lazylibrarian.CONFIG['SAB_API']:
            params['apikey'] = lazylibrarian.CONFIG['SAB_API']
        title = 'LL.(Queue)'
    elif nzburl == 'history':
        params['mode'] = 'history'
        params['output'] = 'json'
        if lazylibrarian.CONFIG['SAB_USER']:
            params['ma_username'] = lazylibrarian.CONFIG['SAB_USER']
        if lazylibrarian.CONFIG['SAB_PASS']:
            params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS']
        if lazylibrarian.CONFIG['SAB_API']:
            params['apikey'] = lazylibrarian.CONFIG['SAB_API']
        title = 'LL.(History)'
    elif nzburl == 'delete':
        # only deletes tasks if still in the queue, ie NOT completed tasks
        params['mode'] = 'queue'
        params['output'] = 'json'
        params['name'] = nzburl
        params['value'] = title
        if lazylibrarian.CONFIG['SAB_USER']:
            params['ma_username'] = lazylibrarian.CONFIG['SAB_USER']
        if lazylibrarian.CONFIG['SAB_PASS']:
            params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS']
        if lazylibrarian.CONFIG['SAB_API']:
            params['apikey'] = lazylibrarian.CONFIG['SAB_API']
        if remove_data:
            params['del_files'] = 1
        title = 'LL.(Delete) ' + title
    elif nzburl == 'delhistory':
        params['mode'] = 'history'
        params['output'] = 'json'
        params['name'] = 'delete'
        params['value'] = title
        if lazylibrarian.CONFIG['SAB_USER']:
            params['ma_username'] = lazylibrarian.CONFIG['SAB_USER']
        if lazylibrarian.CONFIG['SAB_PASS']:
            params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS']
        if lazylibrarian.CONFIG['SAB_API']:
            params['apikey'] = lazylibrarian.CONFIG['SAB_API']
        if remove_data:
            params['del_files'] = 1
        title = 'LL.(DelHistory) ' + title
    else:
        params['mode'] = 'addurl'
        params['output'] = 'json'
        if nzburl:
            params['name'] = nzburl
        if title:
            params['nzbname'] = title
        if lazylibrarian.CONFIG['SAB_USER']:
            params['ma_username'] = lazylibrarian.CONFIG['SAB_USER']
        if lazylibrarian.CONFIG['SAB_PASS']:
            params['ma_password'] = lazylibrarian.CONFIG['SAB_PASS']
        if lazylibrarian.CONFIG['SAB_API']:
            params['apikey'] = lazylibrarian.CONFIG['SAB_API']
        if lazylibrarian.CONFIG['SAB_CAT']:
            params['cat'] = lazylibrarian.CONFIG['SAB_CAT']
        if lazylibrarian.CONFIG['USENET_RETENTION']:
            params["maxage"] = lazylibrarian.CONFIG['USENET_RETENTION']

# FUTURE-CODE
#    if lazylibrarian.SAB_PRIO:
#        params["priority"] = lazylibrarian.SAB_PRIO
#    if lazylibrarian.SAB_PP:
#        params["script"] = lazylibrarian.SAB_SCRIPT

    URL = HOST + "/api?" + urlencode(params)

    # to debug because of api
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Request url for <a href="%s">SABnzbd</a>' % URL)
    proxies = proxyList()
    try:
        timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
        r = requests.get(URL, timeout=timeout, proxies=proxies)
        result = r.json()
    except requests.exceptions.Timeout:
        logger.error("Timeout connecting to SAB with URL: %s" % URL)
        return False
    except Exception as e:
        if hasattr(e, 'reason'):
            errmsg = e.reason
        elif hasattr(e, 'strerror'):
            errmsg = e.strerror
        else:
            errmsg = str(e)

        logger.error("Unable to connect to SAB with URL: %s, %s" % (URL, errmsg))
        return False
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug("Result text from SAB: " + str(result))

    if title:
        title = unaccented_str(title)
        if title.startswith('LL.('):
            return result
    if result['status'] is True:
        logger.info("%s sent to SAB successfully." % title)
        # sab versions earlier than 0.8.0 don't return nzo_ids
        if 'nzo_ids' in result:
            if result['nzo_ids']:  # check its not empty
                return result['nzo_ids'][0]
        return 'unknown'
    elif result['status'] is False:
        logger.error("SAB returned Error: %s" % result['error'])
        return False
    else:
        logger.error("Unknown error: " + str(result))
        return False
示例#55
0
def processResultList(resultlist, book, searchtype):
    myDB = database.DBConnection()
    dictrepl = {
        "...": "",
        ".": " ",
        " & ": " ",
        " = ": " ",
        "?": "",
        "$": "s",
        " + ": " ",
        '"': "",
        ",": " ",
        "*": "",
        "(": "",
        ")": "",
        "[": "",
        "]": "",
        "#": "",
        "0": "",
        "1": "",
        "2": "",
        "3": "",
        "4": "",
        "5": "",
        "6": "",
        "7": "",
        "8": "",
        "9": "",
        "'": "",
        ":": "",
        "!": "",
        "-": " ",
        "\s\s": " ",
    }
    # ' the ': ' ', ' a ': ' ', ' and ': ' ',
    # ' to ': ' ', ' of ': ' ', ' for ': ' ', ' my ': ' ', ' in ': ' ', ' at ': ' ', ' with ': ' '}

    dic = {
        "...": "",
        ".": " ",
        " & ": " ",
        " = ": " ",
        "?": "",
        "$": "s",
        " + ": " ",
        '"': "",
        ",": "",
        "*": "",
        ":": "",
        ";": "",
        "'": "",
    }

    match_ratio = int(lazylibrarian.MATCH_RATIO)
    reject_list = formatter.getList(lazylibrarian.REJECT_WORDS)
    author = formatter.latinToAscii(formatter.replace_all(book["authorName"], dic))
    title = formatter.latinToAscii(formatter.replace_all(book["bookName"], dic))

    matches = []
    for nzb in resultlist:
        nzb_Title = formatter.latinToAscii(formatter.replace_all(nzb["nzbtitle"], dictrepl)).strip()
        nzb_Title = re.sub(r"\s\s+", " ", nzb_Title)  # remove extra whitespace

        nzbAuthor_match = fuzz.token_set_ratio(author, nzb_Title)
        nzbBook_match = fuzz.token_set_ratio(title, nzb_Title)
        logger.debug(u"NZB author/book Match: %s/%s for %s" % (nzbAuthor_match, nzbBook_match, nzb_Title))

        rejected = False
        for word in reject_list:
            if word in nzb_Title.lower() and not word in author.lower() and not word in title.lower():
                rejected = True
                logger.debug("Rejecting %s, contains %s" % (nzb_Title, word))
                break

        nzbsize_temp = nzb["nzbsize"]  # Need to cater for when this is NONE (Issue 35)
        if nzbsize_temp is None:
            nzbsize_temp = 1000
        nzbsize = round(float(nzbsize_temp) / 1048576, 2)

        maxsize = formatter.check_int(lazylibrarian.REJECT_MAXSIZE, 0)
        if maxsize and nzbsize > maxsize:
            rejected = True
            logger.debug("Rejecting %s, too large" % nzb_Title)

        if nzbAuthor_match >= match_ratio and nzbBook_match >= match_ratio and not rejected:
            # logger.debug(u'Found NZB: %s using %s search' % (nzb['nzbtitle'], searchtype))
            bookid = book["bookid"]
            nzbTitle = (author + " - " + title + " LL.(" + book["bookid"] + ")").strip()
            nzburl = nzb["nzburl"]
            nzbprov = nzb["nzbprov"]
            nzbdate_temp = nzb["nzbdate"]
            nzbdate = formatter.nzbdate2format(nzbdate_temp)
            nzbmode = nzb["nzbmode"]
            controlValueDict = {"NZBurl": nzburl}
            newValueDict = {
                "NZBprov": nzbprov,
                "BookID": bookid,
                "NZBdate": formatter.now(),  # when we asked for it
                "NZBsize": nzbsize,
                "NZBtitle": nzbTitle,
                "NZBmode": nzbmode,
                "Status": "Skipped",
            }

            score = (nzbBook_match + nzbAuthor_match) / 2  # as a percentage
            # lose a point for each extra word in the title so we get the closest match
            words = len(formatter.getList(nzb_Title))
            words -= len(formatter.getList(author))
            words -= len(formatter.getList(title))
            score -= abs(words)
            matches.append([score, nzb_Title, newValueDict, controlValueDict])

    if matches:
        highest = max(matches, key=lambda x: x[0])
        score = highest[0]
        nzb_Title = highest[1]
        newValueDict = highest[2]
        controlValueDict = highest[3]
        logger.info(u"Best match NZB (%s%%): %s using %s search" % (score, nzb_Title, searchtype))

        myDB.upsert("wanted", newValueDict, controlValueDict)

        snatchedbooks = myDB.action(
            'SELECT * from books WHERE BookID="%s" and Status="Snatched"' % newValueDict["BookID"]
        ).fetchone()
        if not snatchedbooks:
            if nzbmode == "torznab":
                snatch = TORDownloadMethod(
                    newValueDict["BookID"],
                    newValueDict["NZBprov"],
                    newValueDict["NZBtitle"],
                    controlValueDict["NZBurl"],
                )
            else:
                snatch = NZBDownloadMethod(
                    newValueDict["BookID"],
                    newValueDict["NZBprov"],
                    newValueDict["NZBtitle"],
                    controlValueDict["NZBurl"],
                )
            if snatch:
                notifiers.notify_snatch(newValueDict["NZBtitle"] + " at " + formatter.now())
                common.schedule_job(action="Start", target="processDir")
                return True

    logger.debug(
        "No nzb's found for "
        + (book["authorName"] + " " + book["bookName"]).strip()
        + " using searchtype "
        + searchtype
    )
    return False
示例#56
0
def search_wishlist():
    try:
        threadname = threading.currentThread().name
        if "Thread-" in threadname:
            threading.currentThread().name = "SEARCHWISHLIST"

        myDB = database.DBConnection()

        resultlist, wishproviders = IterateOverWishLists()
        new_books = 0
        if not wishproviders:
            logger.debug('No wishlists are set')
            scheduleJob(action='Stop', target='search_wishlist')
            return  # No point in continuing

        # for each item in resultlist, add to database if necessary, and mark as wanted
        logger.debug('Processing %s item%s in wishlists' % (len(resultlist), plural(len(resultlist))))
        for book in resultlist:
            # we get rss_author, rss_title, maybe rss_isbn, rss_bookid (goodreads bookid)
            # we can just use bookid if goodreads, or try isbn and name matching on author/title if not
            # eg NYTimes wishlist
            if 'E' in book['types']:
                ebook_status = "Wanted"
            else:
                ebook_status = "Skipped"
            if 'A' in book['types']:
                audio_status = "Wanted"
            else:
                audio_status = "Skipped"
            if lazylibrarian.CONFIG['BOOK_API'] == "GoodReads" and book['rss_bookid']:
                cmd = 'select books.Status as Status,AudioStatus,authors.Status as AuthorStatus,'
                cmd += 'AuthorName,BookName,Requester,AudioRequester from books,authors '
                cmd += 'where books.AuthorID = authors.AuthorID and bookid=?'
                bookmatch = myDB.match(cmd, (book['rss_bookid'],))
                if bookmatch:
                    cmd = 'SELECT SeriesName,Status from series,member '
                    cmd += 'where series.SeriesID=member.SeriesID and member.BookID=?'
                    series = myDB.select(cmd, (book['rss_bookid'],))
                    reject_series = None
                    for ser in series:
                        if ser['Status'] in ['Paused', 'Ignored']:
                            reject_series = {"Name": ser['SeriesName'], "Status": ser['Status']}
                            break
                    bookname = bookmatch['BookName']
                    if bookmatch['Status'] in ['Open', 'Wanted', 'Have']:
                        logger.info('Found book %s, already marked %s' % (bookname, bookmatch['Status']))
                        if bookmatch["Requester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["Requester"]:
                                newValueDict = {"Requester": bookmatch["Requester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": book['rss_bookid']}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"Requester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": book['rss_bookid']}
                            myDB.upsert("books", newValueDict, controlValueDict)
                    elif bookmatch['AuthorStatus'] in ['Paused', 'Ignored']:
                        logger.info('Found book %s, but author is %s' % (bookname, bookmatch['AuthorStatus']))
                    elif reject_series:
                        logger.info('Found book %s, but series "%s" is %s' %
                                    (bookname, reject_series['Name'], reject_series['Status']))
                    elif ebook_status == "Wanted":  # skipped/ignored
                        logger.info('Found book %s, marking as "Wanted"' % bookname)
                        controlValueDict = {"BookID": book['rss_bookid']}
                        newValueDict = {"Status": "Wanted"}
                        myDB.upsert("books", newValueDict, controlValueDict)
                        new_books += 1
                        if bookmatch["Requester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["Requester"]:
                                newValueDict = {"Requester": bookmatch["Requester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": book['rss_bookid']}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"Requester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": book['rss_bookid']}
                            myDB.upsert("books", newValueDict, controlValueDict)
                    if bookmatch['AudioStatus'] in ['Open', 'Wanted', 'Have']:
                        logger.info('Found audiobook %s, already marked %s' % (bookname, bookmatch['AudioStatus']))
                        if bookmatch["AudioRequester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["AudioRequester"]:
                                newValueDict = {"AudioRequester": bookmatch["AudioRequester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": book['rss_bookid']}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"AudioRequester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": book['rss_bookid']}
                            myDB.upsert("books", newValueDict, controlValueDict)
                    elif bookmatch['AuthorStatus'] in ['Paused', 'Ignored']:
                        logger.info('Found book %s, but author is %s' % (bookname, bookmatch['AuthorStatus']))
                    elif reject_series:
                        logger.info('Found book %s, but series "%s" is %s' %
                                    (bookname, reject_series['Name'], reject_series['Status']))
                    elif audio_status == "Wanted":  # skipped/ignored
                        logger.info('Found audiobook %s, marking as "Wanted"' % bookname)
                        controlValueDict = {"BookID": book['rss_bookid']}
                        newValueDict = {"AudioStatus": "Wanted"}
                        myDB.upsert("books", newValueDict, controlValueDict)
                        new_books += 1
                        if bookmatch["AudioRequester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["AudioRequester"]:
                                newValueDict = {"AudioRequester": bookmatch["AudioRequester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": book['rss_bookid']}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"AudioRequester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": book['rss_bookid']}
                            myDB.upsert("books", newValueDict, controlValueDict)
                else:
                    import_book(book['rss_bookid'], ebook_status, audio_status)
                    new_books += 1
                    newValueDict = {"Requester": book["dispname"] + ' '}
                    controlValueDict = {"BookID": book['rss_bookid']}
                    myDB.upsert("books", newValueDict, controlValueDict)
                    newValueDict = {"AudioRequester": book["dispname"] + ' '}
                    controlValueDict = {"BookID": book['rss_bookid']}
                    myDB.upsert("books", newValueDict, controlValueDict)
            else:
                item = {}
                results = None
                item['Title'] = book['rss_title']
                if book['rss_bookid']:
                    item['BookID'] = book['rss_bookid']
                if book['rss_isbn']:
                    item['ISBN'] = book['rss_isbn']
                bookmatch = finditem(item, book['rss_author'])
                if bookmatch:  # it's already in the database
                    authorname = bookmatch['AuthorName']
                    bookname = bookmatch['BookName']
                    bookid = bookmatch['BookID']
                    auth_res = myDB.match('SELECT Status from authors WHERE authorname=?', (authorname,))
                    if auth_res:
                        auth_status = auth_res['Status']
                    else:
                        auth_status = 'Unknown'
                    cmd = 'SELECT SeriesName,Status from series,member '
                    cmd += 'where series.SeriesID=member.SeriesID and member.BookID=?'
                    series = myDB.select(cmd, (book['rss_bookid'],))
                    reject_series = None
                    for ser in series:
                        if ser['Status'] in ['Paused', 'Ignored']:
                            reject_series = {"Name": ser['SeriesName'], "Status": ser['Status']}
                            break
                    if bookmatch['Status'] in ['Open', 'Wanted', 'Have']:
                        logger.info(
                            'Found book %s by %s, already marked as "%s"' % (bookname, authorname, bookmatch['Status']))
                        if bookmatch["Requester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["Requester"]:
                                newValueDict = {"Requester": bookmatch["Requester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": bookid}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"Requester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": bookid}
                            myDB.upsert("books", newValueDict, controlValueDict)
                    elif auth_status in ['Paused', 'Ignored']:
                        logger.info('Found book %s, but author is "%s"' % (bookname, auth_status))
                    elif reject_series:
                        logger.info('Found book %s, but series "%s" is %s' %
                                    (bookname, reject_series['Name'], reject_series['Status']))
                    elif ebook_status == 'Wanted':  # skipped/ignored
                        logger.info('Found book %s by %s, marking as "Wanted"' % (bookname, authorname))
                        controlValueDict = {"BookID": bookid}
                        newValueDict = {"Status": "Wanted"}
                        myDB.upsert("books", newValueDict, controlValueDict)
                        new_books += 1
                        if bookmatch["Requester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["Requester"]:
                                newValueDict = {"Requester": bookmatch["Requester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": bookid}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"Requester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": bookid}
                            myDB.upsert("books", newValueDict, controlValueDict)
                    if bookmatch['AudioStatus'] in ['Open', 'Wanted', 'Have']:
                        logger.info(
                            'Found audiobook %s by %s, already marked as "%s"' %
                            (bookname, authorname, bookmatch['AudioStatus']))
                        if bookmatch["AudioRequester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["AudioRequester"]:
                                newValueDict = {"AudioRequester": bookmatch["AudioRequester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": bookid}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"AudioRequester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": bookid}
                            myDB.upsert("books", newValueDict, controlValueDict)
                    elif auth_status in ['Paused', 'Ignored']:
                        logger.info('Found book %s, but author is "%s"' % (bookname, auth_status))
                    elif reject_series:
                        logger.info('Found book %s, but series "%s" is %s' %
                                    (bookname, reject_series['Name'], reject_series['Status']))
                    elif audio_status == 'Wanted':  # skipped/ignored
                        logger.info('Found audiobook %s by %s, marking as "Wanted"' % (bookname, authorname))
                        controlValueDict = {"BookID": bookid}
                        newValueDict = {"AudioStatus": "Wanted"}
                        myDB.upsert("books", newValueDict, controlValueDict)
                        new_books += 1
                        if bookmatch["AudioRequester"]:  # Already on a wishlist
                            if book["dispname"] not in bookmatch["AudioRequester"]:
                                newValueDict = {"AudioRequester": bookmatch["AudioRequester"] + book["dispname"] + ' '}
                                controlValueDict = {"BookID": bookid}
                                myDB.upsert("books", newValueDict, controlValueDict)
                        else:
                            newValueDict = {"AudioRequester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": bookid}
                            myDB.upsert("books", newValueDict, controlValueDict)
                else:  # not in database yet
                    if book['rss_isbn']:
                        results = search_for(book['rss_isbn'])
                    if results:
                        result = results[0]  # type: dict
                        if result['isbn_fuzz'] > check_int(lazylibrarian.CONFIG['MATCH_RATIO'], 90):
                            logger.info("Found (%s%%) %s: %s" %
                                        (result['isbn_fuzz'], result['authorname'], result['bookname']))
                            import_book(result['bookid'], ebook_status, audio_status)
                            new_books += 1
                            newValueDict = {"Requester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": result['bookid']}
                            myDB.upsert("books", newValueDict, controlValueDict)
                            newValueDict = {"AudioRequester": book["dispname"] + ' '}
                            myDB.upsert("books", newValueDict, controlValueDict)
                            bookmatch = True
                    if not results:
                        searchterm = "%s <ll> %s" % (item['Title'], formatAuthorName(book['rss_author']))
                        results = search_for(unaccented(searchterm))
                    if results:
                        result = results[0]  # type: dict
                        if result['author_fuzz'] > check_int(lazylibrarian.CONFIG['MATCH_RATIO'], 90) \
                                and result['book_fuzz'] > check_int(lazylibrarian.CONFIG['MATCH_RATIO'], 90):
                            logger.info("Found (%s%% %s%%) %s: %s" % (result['author_fuzz'], result['book_fuzz'],
                                                                      result['authorname'], result['bookname']))
                            import_book(result['bookid'], ebook_status, audio_status)
                            new_books += 1
                            newValueDict = {"Requester": book["dispname"] + ' '}
                            controlValueDict = {"BookID": result['bookid']}
                            myDB.upsert("books", newValueDict, controlValueDict)
                            newValueDict = {"AudioRequester": book["dispname"] + ' '}
                            myDB.upsert("books", newValueDict, controlValueDict)
                            bookmatch = True

                    if not bookmatch:
                        msg = "Skipping book %s by %s" % (item['Title'], book['rss_author'])
                        if not results:
                            msg += ', No results returned'
                            logger.warn(msg)
                        else:
                            msg += ', No match found'
                            logger.warn(msg)
                            result = results[0]  # type: dict
                            msg = "Closest match (%s%% %s%%) %s: %s" % (result['author_fuzz'], result['book_fuzz'],
                                                                        result['authorname'], result['bookname'])
                        logger.warn(msg)
        if new_books:
            logger.info("Wishlist marked %s book%s as Wanted" % (new_books, plural(new_books)))

    except Exception:
        logger.error('Unhandled exception in search_wishlist: %s' % traceback.format_exc())
    finally:
        threading.currentThread().name = "WEBSERVER"
示例#57
0
def _get_auth():
    global delugeweb_auth, delugeweb_url, headers, deluge_verify_cert
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Authenticating...')
    delugeweb_auth = {}
    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)

    delugeweb_cert = lazylibrarian.CONFIG['DELUGE_CERT']
    delugeweb_host = lazylibrarian.CONFIG['DELUGE_HOST']
    delugeweb_port = check_int(lazylibrarian.CONFIG['DELUGE_PORT'], 0)
    if not delugeweb_host or not delugeweb_port:
        logger.error('Invalid delugeweb host or port, check your config')
        return None

    delugeweb_password = lazylibrarian.CONFIG['DELUGE_PASS']

    if not delugeweb_host.startswith("http"):
        delugeweb_host = 'http://%s' % delugeweb_host

    if delugeweb_cert is None or delugeweb_cert.strip() == '':
        deluge_verify_cert = False
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Deluge: FYI no SSL certificate configured')
    else:
        deluge_verify_cert = delugeweb_cert
        delugeweb_host = delugeweb_host.replace('http:', 'https:')
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Deluge: Using certificate %s, host is now %s' % (deluge_verify_cert, delugeweb_host))

    if delugeweb_host.endswith('/'):
        delugeweb_host = delugeweb_host[:-1]

    delugeweb_host = "%s:%s" % (delugeweb_host, delugeweb_port)
    delugeweb_url = delugeweb_host + '/json'

    post_json = {"method": "auth.login", "params": [delugeweb_password], "id": 1}

    try:
        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth, timeout=timeout,
                                 verify=deluge_verify_cert, headers=headers)
    except requests.ConnectionError:
        try:
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug('Deluge: Connection failed, let\'s try HTTPS just in case')
            response = requests.post(delugeweb_url.replace('http:', 'https:'), json=post_json, timeout=timeout,
                                     cookies=delugeweb_auth, verify=deluge_verify_cert, headers=headers)
            # If the previous line didn't fail, change delugeweb_url for the rest of this session
            logger.error('Deluge: Switching to HTTPS, certificate won\'t be verified NO CERTIFICATE WAS CONFIGURED')
            delugeweb_url = delugeweb_url.replace('http:', 'https:')
        except Exception as e:
            logger.error('Deluge: Authentication failed: %s' % str(e))
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                formatted_lines = traceback.format_exc().splitlines()
                logger.debug('; '.join(formatted_lines))
            return None
    except Exception as err:
        logger.error('Deluge %s: auth.login returned %s' % (type(err).__name__, str(err)))
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            formatted_lines = traceback.format_exc().splitlines()
            logger.debug('; '.join(formatted_lines))
        return None

    auth = response.json()["result"]
    auth_error = response.json()["error"]
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Authentication result: %s, Error: %s' % (auth, auth_error))
    delugeweb_auth = response.cookies
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Authentication cookies: %s' % str(delugeweb_auth.get_dict()))
    post_json = {"method": "web.connected", "params": [], "id": 10}

    try:
        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

    except Exception as err:
        logger.debug('Deluge %s: web.connected returned %s' % (type(err).__name__, str(err)))
        delugeweb_auth = {}
        return None

    connected = response.json()['result']
    connected_error = response.json()['error']
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Connection result: %s, Error: %s' % (connected, connected_error))

    if not connected:
        post_json = {"method": "web.get_hosts", "params": [], "id": 11}

        try:
            response = requests.post(delugeweb_url, json=post_json, verify=deluge_verify_cert,
                                     cookies=delugeweb_auth, headers=headers)
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug('Status code: %s' % response.status_code)
                logger.debug(response.text)

        except Exception as err:
            logger.debug('Deluge %s: web.get_hosts returned %s' % (type(err).__name__, str(err)))
            delugeweb_auth = {}
            return None

        delugeweb_hosts = response.json()['result']

        # Check if delugeweb_hosts is None before checking its length
        if not delugeweb_hosts or len(delugeweb_hosts) == 0:
            logger.error('Deluge: WebUI does not contain daemons')
            delugeweb_auth = {}
            return None

        post_json = {"method": "web.connect", "params": [delugeweb_hosts[0][0]], "id": 11}

        try:
            response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                     verify=deluge_verify_cert, headers=headers)
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug('Status code: %s' % response.status_code)
                logger.debug(response.text)

        except Exception as err:
            logger.debug('Deluge %s: web.connect returned %s' % (type(err).__name__, str(err)))
            delugeweb_auth = {}
            return None

        post_json = {"method": "web.connected", "params": [], "id": 10}

        try:
            response = requests.post(delugeweb_url, json=post_json, verify=deluge_verify_cert,
                                     cookies=delugeweb_auth, headers=headers)
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug('Status code: %s' % response.status_code)
                logger.debug(response.text)

        except Exception as err:
            logger.debug('Deluge %s: web.connected returned %s' % (type(err).__name__, str(err)))
            delugeweb_auth = {}
            return None

        connected = response.json()['result']

        if not connected:
            logger.error('Deluge: WebUI could not connect to daemon')
            delugeweb_auth = {}
            return None

    return auth
示例#58
0
def setTorrentLabel(result):
    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
        logger.debug('Deluge: Setting label')
    label = lazylibrarian.CONFIG['DELUGE_LABEL']
    if not any(delugeweb_auth):
        _get_auth()

    if not label:
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Deluge: No Label set')
        return True

    if ' ' in label:
        logger.error('Deluge: Invalid label. Label can\'t contain spaces - replacing with underscores')
        label = label.replace(' ', '_')

    timeout = check_int(lazylibrarian.CONFIG['HTTP_TIMEOUT'], 30)
    try:
        # check if label already exists and create it if not
        post_json = {"method": 'label.get_labels', "params": [], "id": 3}

        response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                 verify=deluge_verify_cert, headers=headers, timeout=timeout)
        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug('Status code: %s' % response.status_code)
            logger.debug(response.text)

        labels = response.json()['result']

        if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
            logger.debug("Valid labels: %s" % str(labels))

        if response.json()['error'] is None:
            label = label.lower()  # deluge lowercases labels
            if label not in labels:
                try:
                    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                        logger.debug('Deluge: %s label doesn\'t exist in Deluge, let\'s add it' % label)
                    post_json = {"method": 'label.add', "params": [label], "id": 4}
                    response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                             verify=deluge_verify_cert, headers=headers, timeout=timeout)
                    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                        logger.debug('Status code: %s' % response.status_code)
                        logger.debug(response.text)
                    logger.debug('Deluge: %s label added to Deluge' % label)

                except Exception as err:
                    logger.error('Deluge %s: Setting label failed: %s' % (type(err).__name__, str(err)))
                    if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                        formatted_lines = traceback.format_exc().splitlines()
                        logger.debug('; '.join(formatted_lines))
                    if not result:
                        return False
            else:
                if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                    logger.debug("Label [%s] is valid" % label)

            if not result:
                return True

            # add label to torrent
            post_json = {"method": 'label.set_torrent', "params": [result['hash'], label], "id": 5}

            response = requests.post(delugeweb_url, json=post_json, cookies=delugeweb_auth,
                                     verify=deluge_verify_cert, headers=headers, timeout=timeout)
            if lazylibrarian.LOGLEVEL & lazylibrarian.log_dlcomms:
                logger.debug('Status code: %s' % response.status_code)
                logger.debug(response.text)
            logger.debug('Deluge: %s label added to torrent' % label)
            return not response.json()['error']
        else:
            logger.debug('Deluge: Label plugin not detected')
            return False
    except Exception as err:
        logger.error('Deluge %s: Adding label failed: %s' % (type(err).__name__, str(err)))
        return False
示例#59
0
def processResultList(resultlist, book, searchtype):
    myDB = database.DBConnection()
    dictrepl = {'...': '', '.': ' ', ' & ': ' ', ' = ': ' ', '?': '', '$': 's', ' + ': ' ', '"': '',
                ',': ' ', '*': '', '(': '', ')': '', '[': '', ']': '', '#': '', '0': '', '1': '', '2': '',
                '3': '', '4': '', '5': '', '6': '', '7': '', '8': '', '9': '', '\'': '', ':': '', '!': '',
                '-': ' ', '\s\s': ' '}

    dic = {'...': '', '.': ' ', ' & ': ' ', ' = ': ' ', '?': '', '$': 's', ' + ': ' ', '"': '',
           ',': '', '*': '', ':': '', ';': ''}

    match_ratio = int(lazylibrarian.MATCH_RATIO)
    reject_list = getList(lazylibrarian.REJECT_WORDS)
    author = unaccented_str(replace_all(book['authorName'], dic))
    title = unaccented_str(replace_all(book['bookName'], dic))

    matches = []
    for tor in resultlist:
        torTitle = unaccented_str(tor['tor_title'])
        torTitle = replace_all(torTitle, dictrepl).strip()
        torTitle = re.sub(r"\s\s+", " ", torTitle)  # remove extra whitespace

        torAuthor_match = fuzz.token_set_ratio(author, torTitle)
        torBook_match = fuzz.token_set_ratio(title, torTitle)
        logger.debug(u"TOR author/book Match: %s/%s for %s" % (torAuthor_match, torBook_match, torTitle))
        tor_url = tor['tor_url']

        rejected = False

        already_failed = myDB.match('SELECT * from wanted WHERE NZBurl="%s" and Status="Failed"' % tor_url)
        if already_failed:
            logger.debug("Rejecting %s, blacklisted at %s" % (torTitle, already_failed['NZBprov']))
            rejected = True

        if not rejected:
            for word in reject_list:
                if word in torTitle.lower() and word not in author.lower() and word not in title.lower():
                    rejected = True
                    logger.debug("Rejecting %s, contains %s" % (torTitle, word))
                    break

        tor_size_temp = tor['tor_size']  # Need to cater for when this is NONE (Issue 35)
        tor_size_temp = check_int(tor_size_temp, 1000)
        tor_size = round(float(tor_size_temp) / 1048576, 2)

        maxsize = check_int(lazylibrarian.REJECT_MAXSIZE, 0)
        if not rejected:
            if maxsize and tor_size > maxsize:
                rejected = True
                logger.debug("Rejecting %s, too large" % torTitle)

        if not rejected:
            bookid = book['bookid']
            tor_Title = (author + ' - ' + title + ' LL.(' + book['bookid'] + ')').strip()

            controlValueDict = {"NZBurl": tor_url}
            newValueDict = {
                "NZBprov": tor['tor_prov'],
                "BookID": bookid,
                "NZBdate": now(),  # when we asked for it
                "NZBsize": tor_size,
                "NZBtitle": tor_Title,
                "NZBmode": "torrent",
                "Status": "Skipped"
            }

            score = (torBook_match + torAuthor_match) / 2  # as a percentage
            # lose a point for each extra word in the title so we get the closest match
            words = len(getList(torTitle))
            words -= len(getList(author))
            words -= len(getList(title))
            score -= abs(words)
            matches.append([score, torTitle, newValueDict, controlValueDict])

    if matches:
        highest = max(matches, key=lambda x: x[0])
        score = highest[0]
        nzb_Title = highest[1]
        newValueDict = highest[2]
        controlValueDict = highest[3]

        if score < match_ratio:
            logger.info(u'Nearest TOR match (%s%%): %s using %s search for %s %s' %
                        (score, nzb_Title, searchtype, author, title))
            return False

        logger.info(u'Best TOR match (%s%%): %s using %s search' %
                    (score, nzb_Title, searchtype))

        snatchedbooks = myDB.match('SELECT * from books WHERE BookID="%s" and Status="Snatched"' %
                                   newValueDict["BookID"])
        if snatchedbooks:
            logger.debug('%s already marked snatched' % nzb_Title)
            return True  # someone else found it, not us
        else:
            myDB.upsert("wanted", newValueDict, controlValueDict)
            if newValueDict["NZBprov"] == 'libgen':
                # for libgen we use direct download links
                snatch = DirectDownloadMethod(newValueDict["BookID"], newValueDict["NZBprov"],
                                              newValueDict["NZBtitle"], controlValueDict["NZBurl"], nzb_Title)
            else:
                snatch = TORDownloadMethod(newValueDict["BookID"], newValueDict["NZBprov"],
                                           newValueDict["NZBtitle"], controlValueDict["NZBurl"])
            if snatch:
                logger.info('Downloading %s from %s' % (newValueDict["NZBtitle"], newValueDict["NZBprov"]))
                notify_snatch("%s from %s at %s" %
                              (newValueDict["NZBtitle"], newValueDict["NZBprov"], now()))
                scheduleJob(action='Start', target='processDir')
                return True + True  # we found it
    else:
        logger.debug("No torrent's found for [%s] using searchtype %s" % (book["searchterm"], searchtype))
    return False