Esempio n. 1
0
def announce_scrape(user_key):
    infohashes = request.args.getlist('info_hash')

    torrents = {
        'files': {}
    }

    if infohashes:
        for infohash in infohashes:
            torrent = r.table('torrents').get(infohash).run()

            try:
                complete = len(torrent['seeders'])
            except TypeError:
                complete = 0

            try:
                incomplete = len(torrent['leechers'])
            except TypeError:
                incomplete = 0

            try:
                downloaded = torrent['completed']
            except TypeError:
                downloaded = 0

            t = {
                'complete': complete,
                'downloaded': downloaded,
                'incomplete': incomplete,
            }
            torrents['files'][infohash.encode('utf-8')] = t
    else:
        allTorrents = r.table('torrents').run()
        for t in allTorrents:
            try:
                complete = len(t['seeders'])
            except TypeError:
                complete = 0

            try:
                incomplete = len(t['leechers'])
            except TypeError:
                incomplete = 0

            try:
                downloaded = t['completed']
            except TypeError:
                downloaded = 0

            torrents['files'][t['id'].encode('utf-8')] = {
                'complete': complete,
                'downloaded': downloaded,
                'incomplete': incomplete
            }

    return Response(bencode.encode(torrents))
Esempio n. 2
0
 def get_hash(url, mode='torrent'):
     if url.startswith('magnet'):
         return url.split('&')[0].split(':')[-1]
     else:
         try:
             torrent = Url.open(url, stream=True).content
             metadata = bencode.decode(torrent)
             hashcontents = bencode.encode(metadata['info'])
             return hashlib.sha1(hashcontents).hexdigest()
         except Exception as e:  #noqa
             return None
Esempio n. 3
0
def CalcTorrentHash(torrent):
    if torrent and torrent.startswith('magnet'):
        hashid = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
        if len(hashid) == 32:
            hashid = b16encode(b32decode(hashid)).lower()
    else:
        # noinspection PyTypeChecker
        info = dict(
            decode(torrent))["info"]  # python3 decode returns OrderedDict
        hashid = sha1(encode(info)).hexdigest()
    logger.debug('Torrent Hash: ' + hashid)
    return hashid
Esempio n. 4
0
def CalcTorrentHash(torrent):
    # torrent could be a unicode magnet link or a bytes object torrent file contents
    if makeUnicode(torrent[:6]) == 'magnet':
        # torrent = makeUnicode(torrent)
        hashid = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
        if len(hashid) == 32:
            hashid = b16encode(b32decode(hashid)).lower()
    else:
        # noinspection PyTypeChecker
        info = dict(decode(torrent))["info"]  # python3 decode returns OrderedDict
        hashid = sha1(encode(info)).hexdigest()
    logger.debug('Torrent Hash: ' + hashid)
    return hashid
Esempio n. 5
0
def CalcTorrentHash(torrent):
    # torrent could be a unicode magnet link or a bytes object torrent file contents
    if makeUnicode(torrent[:6]) == 'magnet':
        # torrent = makeUnicode(torrent)
        hashid = re.findall('urn:btih:([\w]{32,40})', torrent)[0]
        if len(hashid) == 32:
            hashid = b16encode(b32decode(hashid)).lower()
    else:
        # noinspection PyTypeChecker
        info = dict(decode(torrent))["info"]  # python3 decode returns OrderedDict
        hashid = sha1(encode(info)).hexdigest()
    logger.debug('Torrent Hash: ' + hashid)
    return hashid
Esempio n. 6
0
def DirectDownloadMethod(bookid=None,
                         dl_title=None,
                         dl_url=None,
                         library='eBook'):
    myDB = database.DBConnection()
    downloadID = False
    Source = "DIRECT"

    logger.debug("Starting Direct Download for [%s]" % dl_title)
    proxies = proxyList()
    headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT}
    try:
        r = requests.get(dl_url, headers=headers, timeout=90, proxies=proxies)
    except requests.exceptions.Timeout:
        logger.warn('Timeout fetching file from url: %s' % dl_url)
        return False
    except Exception as e:
        if hasattr(e, 'reason'):
            logger.warn('%s fetching file from url: %s, %s' %
                        (type(e).__name__, dl_url, e.reason))
        else:
            logger.warn('%s fetching file from url: %s, %s' %
                        (type(e).__name__, dl_url, str(e)))
        return False

    if not str(r.status_code).startswith('2'):
        logger.debug("Got a %s response for %s" % (r.status_code, dl_url))
    elif len(r.content) < 1000:
        logger.debug("Only got %s bytes for %s, rejecting" %
                     (len(r.content), dl_title))
    else:
        extn = ''
        basename = ''
        if ' ' in dl_title:
            basename, extn = dl_title.rsplit(
                ' ', 1)  # last word is often the extension - but not always...
        if extn and extn in getList(lazylibrarian.CONFIG['EBOOK_TYPE']):
            dl_title = '.'.join(dl_title.rsplit(' ', 1))
        elif magic:
            mtype = magic.from_buffer(r.content)
            if 'EPUB' in mtype:
                extn = '.epub'
            elif 'Mobipocket' in mtype:  # also true for azw and azw3, does it matter?
                extn = '.mobi'
            elif 'PDF' in mtype:
                extn = '.pdf'
            else:
                logger.debug("magic reports %s" % mtype)
            basename = dl_title
        else:
            logger.warn("Don't know the filetype for %s" % dl_title)
            basename = dl_title

        logger.debug("File download got %s bytes for %s" %
                     (len(r.content), dl_title))
        destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), basename)
        # destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), '%s LL.(%s)' % (basename, bookid))
        if not os.path.isdir(destdir):
            _ = mymakedirs(destdir)

        try:
            hashid = dl_url.split("md5=")[1].split("&")[0]
        except IndexError:
            hashid = sha1(encode(dl_url)).hexdigest()

        destfile = os.path.join(destdir, basename + extn)
        try:
            with open(destfile, 'wb') as bookfile:
                bookfile.write(r.content)
            setperm(destfile)
            downloadID = hashid
        except Exception as e:
            logger.error("%s writing book to %s, %s" %
                         (type(e).__name__, destfile, e))

    if downloadID:
        logger.debug('File %s has been downloaded from %s' %
                     (dl_title, dl_url))
        if library == 'eBook':
            myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?',
                        (bookid, ))
        elif library == 'AudioBook':
            myDB.action(
                'UPDATE books SET audiostatus="Snatched" WHERE BookID=?',
                (bookid, ))
        myDB.action(
            'UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?',
            (Source, downloadID, dl_url))
        return True
    else:
        logger.error('Failed to download file @ <a href="%s">%s</a>' %
                     (dl_url, dl_url))
        myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?',
                    (dl_url, ))
        return False
Esempio n. 7
0
def DirectDownloadMethod(bookid=None, dl_title=None, dl_url=None, library='eBook'):
    myDB = database.DBConnection()
    downloadID = False
    Source = "DIRECT"

    logger.debug("Starting Direct Download for [%s]" % dl_title)
    proxies = proxyList()
    headers = {'Accept-encoding': 'gzip', 'User-Agent': USER_AGENT}
    try:
        r = requests.get(dl_url, headers=headers, timeout=90, proxies=proxies)
    except requests.exceptions.Timeout:
        logger.warn('Timeout fetching file from url: %s' % dl_url)
        return False
    except Exception as e:
        if hasattr(e, 'reason'):
            logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, dl_url, e.reason))
        else:
            logger.warn('%s fetching file from url: %s, %s' % (type(e).__name__, dl_url, str(e)))
        return False

    if not str(r.status_code).startswith('2'):
        logger.debug("Got a %s response for %s" % (r.status_code, dl_url))
    elif len(r.content) < 1000:
        logger.debug("Only got %s bytes for %s, rejecting" % (len(r.content), dl_title))
    else:
        extn = ''
        basename = ''
        if ' ' in dl_title:
            basename, extn = dl_title.rsplit(' ', 1)  # last word is often the extension - but not always...
        if extn and extn in getList(lazylibrarian.CONFIG['EBOOK_TYPE']):
            dl_title = '.'.join(dl_title.rsplit(' ', 1))
        elif magic:
            mtype = magic.from_buffer(r.content)
            if 'EPUB' in mtype:
                extn = '.epub'
            elif 'Mobipocket' in mtype:  # also true for azw and azw3, does it matter?
                extn = '.mobi'
            elif 'PDF' in mtype:
                extn = '.pdf'
            else:
                logger.debug("magic reports %s" % mtype)
            basename = dl_title
        else:
            logger.warn("Don't know the filetype for %s" % dl_title)
            basename = dl_title

        logger.debug("File download got %s bytes for %s" % (len(r.content), dl_title))
        destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), basename)
        # destdir = os.path.join(lazylibrarian.DIRECTORY('Download'), '%s LL.(%s)' % (basename, bookid))
        if not os.path.isdir(destdir):
            _ = mymakedirs(destdir)

        try:
            hashid = dl_url.split("md5=")[1].split("&")[0]
        except IndexError:
            hashid = sha1(encode(dl_url)).hexdigest()

        destfile = os.path.join(destdir, basename + extn)
        try:
            with open(destfile, 'wb') as bookfile:
                bookfile.write(r.content)
            setperm(destfile)
            downloadID = hashid
        except Exception as e:
            logger.error("%s writing book to %s, %s" % (type(e).__name__, destfile, e))

    if downloadID:
        logger.debug('File %s has been downloaded from %s' % (dl_title, dl_url))
        if library == 'eBook':
            myDB.action('UPDATE books SET status="Snatched" WHERE BookID=?', (bookid,))
        elif library == 'AudioBook':
            myDB.action('UPDATE books SET audiostatus="Snatched" WHERE BookID=?', (bookid,))
        myDB.action('UPDATE wanted SET status="Snatched", Source=?, DownloadID=? WHERE NZBurl=?',
                    (Source, downloadID, dl_url))
        return True
    else:
        logger.error('Failed to download file @ <a href="%s">%s</a>' % (dl_url, dl_url))
        myDB.action('UPDATE wanted SET status="Failed" WHERE NZBurl=?', (dl_url,))
        return False
Esempio n. 8
0
def announce_request(user_key):
    """TODO:

    * Check if torrent exists
        - check via <info_hash> (this will always be unique)
        - if exists we update said torrent
        - if not exists create new torrent entry
        - add peer to completed list if completed
        - remove if disconnected
        - add peer to leech list if none of the above

    * Update peer in user table
        - get peer using <user_key>
        - add torrent to peers torrent list
        - update peers ip
        - update peers peer_id
        - update peers port
        - update peers total download and upload 
            -- each peer has a list of torrents currently active on
            -- <info_hash>: {upload: <>, download: <>}
            -- check upload and download for torrent compare to given <uploaded> and <left>
            -- use difference to upload peers total upload/download

    """

    data = {
            'info_hash': request.args.get('info_hash', None),
            'peer_id': request.args.get('peer_id', None),
            'port': request.args.get('port', None),
            'uploaded': request.args.get('uploaded', None),
            'downloaded': request.args.get('downloaded', None),
            'left': request.args.get('left', None),
            'compact': request.args.get('compact', None), # 1 - return compact result; 0 - no compact
            'no_peer_id': request.args.get('no_peer_id', None),
            'event': request.args.get('event', None),
            'ip': request.args.get('ip', request.remote_addr), #optional
            'numwant': request.args.get('numwant', 30), #optional
            'key': request.args.get('key', None), #optional for public, needed for private
            'trackerid': request.args.get('trackerid', 'TRACKER'), #optional, no idea what this is for
    }

    for k, v in data.iteritems():
        if k not in ['trackerid', 'event', 'no_peer_id']:
            if not v:
                return bencode.encode({'failure reason': 'missing ' + k})
        else:
            pass

    if data.get('compact', None):
        if data['compact'] == '0':
            return bencode.encode({'failure reason': 'This tracker only supports compact responses'})

    #data['info_hash'] = hashlib.sha1(data['info_hash'].encode('utf-8')).hexdigest()

    #check if torrent exists
    if not r.table('torrents').get(data['info_hash']).run():
        #torrent doesnt exist; create it
        #NOTE: do we want to have the frontend create the torrent entries for security purposes? --elgruntox
        r.table('torrents').insert({
            'id': data['info_hash'],
            'peer_list': {},
            'seeders': [],
            'leechers': [],
            'completed': 0
        }).run()

    #check if user exists
    if not r.table('users').get(user_key).run():
        #user doesnt exist; create it
        r.table('users').insert({
            'id': user_key,
            'last_ip': data['ip'],
            'last_port': data['port'],
            'seeding': [],
            'leeching': [],
            'torrents': {},
            'total_upload': 0,
            'total_downloaded': 0,
        }).run()

    # so I guess torrent clients are mostly shit and dont send a 'completed' event half the time
    # check if nothing left to download; if nothing left i guess theyre seeding? -- elgruntox
    if data['left'] == '0':
        user = r.table('users').get(user_key).run()
        if data['info_hash'] in user['leeching']:
            user['leeching'].remove(data['info_hash'])
        user['seeding'].append(data['info_hash'])
        user['seeding'] = list(set(user['seeding']))
        r.table('users').get(user_key).replace(user).run()

        torrent = r.table('torrents').get(data['info_hash']).run()
        if data['peer_id'] in torrent['leechers']:
            torrent['leechers'].remove(data['peer_id'])
        torrent['seeders'].append(data['peer_id'])
        torrent['completed'] = torrent['completed'] + 1
        torrent['seeders'] = list(set(torrent['seeders']))

        r.table('torrents').get(data['info_hash']).replace(torrent).run()

    #check if torrent has data specified; if no event just a regular check and we do nothing
    if data['event']:
        if data['event'] == 'started':

            # add torrent to users torrent/leeching list; add peer to torrents peer_list
            user = r.table('users').get(user_key).run()

            # remove user from torrents seeding list if in it
            if data['peer_id'] in user['seeding']:
                user['seeding'].remove(data['peer_id'])

            # update users leeching list
            user['leeching'].append(data['info_hash'])
            user['leeching'] = list(set(user['leeching']))

            # update torrent dict
            user['torrents'][data['info_hash']] = {
                'uploaded': 0,
                'downloaded': 0
            }

            # update and save user to db
            r.table('users').get(user_key).replace(user).run()

            # update torrents leeching list
            torrent = r.table('torrents').get(data['info_hash']).run()
            if data['peer_id'] in torrent['leechers']:
                pass
            else:
                torrent['leechers'].append(data['peer_id'])

            # update torrents peer_list
            torrent['peer_list'][data['peer_id']] = {}
            torrent['peer_list'][data['peer_id']]['ip'] = data['ip']
            torrent['peer_list'][data['peer_id']]['port'] = data['port']

            # save torrent data to db
            r.table('torrents').get(data['info_hash']).replace(torrent).run()            


        elif data['event'] == 'stopped':
            # remove torrent from users seeding or leeching list
            user = r.table('users').get(user_key).run()

            # check if in leeching or seeding list; pass if not found
            if data['info_hash'] in user['seeding']:
                user['seeding'].remove(data['info_hash'])
            elif data['info_hash'] in user['leeching']:
                user['leeching'].remove(data['info_hash'])
            else:
                pass

            # update and save user
            r.table('users').get(user_key).replace(user).run()

            # update torrents leeching/seeding/peer list
            torrent = r.table('torrents').get(data['info_hash']).run()

            if torrent['peer_list'].get(data['peer_id'], None):
                del torrent['peer_list'][data['peer_id']]
            if data['peer_id'] in torrent['seeders']:
                torrent['seeders'].remove(data['peer_id'])
            if data['peer_id'] in torrent['leechers']:
                torrent['leechers'].remove(data['peer_id'])

            # update and save data to db
            r.table('torrents').get(data['info_hash']).replace(torrent).run()

        elif data['event'] == 'completed':
            user = r.table('users').get(user_key).run()
            if data['info_hash'] in user['leeching']:
                user['leeching'].remove(data['peer_id'])
            user['seeding'].append(data['info_hash'])
            user['seeding'] = list(set(user['seeding']))

            r.table('users').get(user_key).replace(user).run()

            torrent = r.table('torrents').get(data['info_hash']).run()
            if data['peer_id'] in torrent['leechers']:
                torrent['leechers'].remove(data['peer_id'])
            torrent['seeders'].append(data['peer_id'])
            torrent['completed'] = torrent['completed'] + 1
            torrent['seeders'] = list(set(torrent['seeders']))

            r.table('torrents').get(data['info_hash']).replace(torrent).run()
        else:
            #malformed request; error out
            return bencode.encode({'failure reason': 'invalid event specified'})

    # update users upload and download stats
    user = r.table('users').get(user_key).run()
    if user['torrents'].get(data['info_hash'], None):
        uploaded = int(data['uploaded']) - user['torrents'][data['info_hash']].get('uploaded', 0)
        downloaded = int(data['downloaded']) - user['torrents'][data['info_hash']].get('downloaded', 0)
        user['torrents'][data['info_hash']]['uploaded'] = user['torrents'][data['info_hash']]['uploaded'] + uploaded
        user['torrents'][data['info_hash']]['downloaded'] = user['torrents'][data['info_hash']]['downloaded'] + downloaded
        user['total_upload'] = user['total_upload'] + uploaded
        user['total_downloaded'] = user['total_downloaded'] + downloaded
    else:
        user['torrents'][data['info_hash']] = {}
        user['torrents'][data['info_hash']]['uploaded'] = int(data['uploaded'])
        user['torrents'][data['info_hash']]['downloaded'] = int(data['downloaded'])
        user['total_upload'] = user['total_upload'] + int(data['uploaded'])
        user['total_downloaded'] = user['total_downloaded'] + int(data['downloaded'])

    r.table('users').get('user_key').update(user).run()

    # generate response
    torrent = r.table('torrents').get(data['info_hash']).run()

    peer_list = []
    for peerid, values in torrent['peer_list'].iteritems():
        peer = (peerid, torrent['peer_list'][peerid]['ip'], torrent['peer_list'][peerid]['port'])
        peer_list.append(peer)

    ares = {}
    ares['interval'] = 30
    ares['min interval'] = 30
    ares['trackerid'] = data['trackerid']
    ares['complete'] = len(torrent['seeders'])
    ares['incomplete'] = len(torrent['leechers'])
    ares['peers'] = bencode.make_compact_peer_list(peer_list)

    return Response(bencode.encode(ares))