Ejemplo n.º 1
0
def hash(filename):
    for file_fuck in filename:
        torrent_file = f'{torrent_folder}{file_fuck}'
        objTorrentFile = open(torrent_file, 'rb')
        decodedDict = bencoding.bdecode(objTorrentFile.read())
        objTorrentFile.close()
        info_hash = hashlib.sha1(bencoding.bencode(
            decodedDict[b"info"])).hexdigest()
        list2.append(f'{info_hash}==>{torrent_file}')
Ejemplo n.º 2
0
    def add_torrent(self, torrent):
        with open(torrent, "rb") as file:
            data = bdecode(file.read())
        info = data[b"info"]
        hash = sha1(bencode(info)).hexdigest()
        if hash in self.torrent_hashes or []:
            raise TorrentAlreadyPresentException(hash)

        with open(torrent, "rb") as file:
            filedump = encodestring(file.read())
        filename = basename(torrent)
        result = self.client.call("core.add_torrent_file", filename, filedump, {})
        logger.debug("Returning {}", result.decode("utf-8"))
        if result.decode("utf-8") != hash:
            raise Exception(result.decode("utf-8"))
        self._torrent_hashes = None
        return True
    def add_torrent(self, torrent):
        with open(torrent, "rb") as file:
            data = bdecode(file.read())
        info = data[b"info"]
        hash = sha1(bencode(info)).hexdigest()
        if hash in self.torrent_hashes:
            raise TorrentAlreadyPresentInCollectionException(hash)

        host = min(self.hosts, key=lambda host: host.torrent_count)
        if host.torrent_count >= self.max_torrents:
            raise AllDelugeHostsInCollectionFullException()

        if host.add_torrent(torrent):
            logger.success("Added {} to {}", torrent, host.display)
            self._torrent_hashes = None
            return True
        return False
Ejemplo n.º 4
0
def add_from_file(update: Update, context: CallbackContext):
    logger.info('application/x-bittorrent document from %s',
                update.effective_user.first_name)

    document = update.message.document
    if document.mime_type != "application/x-bittorrent" and not document.file_name.lower(
    ).endswith(".torrent"):
        logger.info('invalid document from %s (mime type: %s; file name: %s)',
                    update.effective_user.full_name, document.mime_type,
                    document.file_name)

        update.message.reply_markdown(
            'Please send me a valid torrent file (`.torrent` extension or `application/x-bittorrent` mime type)',
            quote=True)
        return

    file_id = document.file_id
    torrent_file = context.bot.get_file(file_id)

    file_path = './downloads/{}'.format(document.file_name)
    torrent_file.download(file_path)

    kwargs = get_qbt_request_kwargs()

    with open(file_path, 'rb') as f:
        # https://stackoverflow.com/a/46270711
        decoded_dict = bencoding.bdecode(f.read())
        torrent_hash = hashlib.sha1(bencoding.bencode(
            decoded_dict[b"info"])).hexdigest()

        f.seek(0)

        # this method always returns an empty json:
        # https://python-qbittorrent.readthedocs.io/en/latest/modules/api.html#qbittorrent.client.Client.download_from_file
        qb.download_from_file(f, **kwargs)

    update.message.reply_text('Torrent added',
                              quote=True,
                              reply_markup=kb.short_markup(torrent_hash))

    os.remove(file_path)

    notify_addition(update.effective_chat.id, context.bot,
                    update.effective_user, document.file_name
                    or "[unknown file name]")
Ejemplo n.º 5
0
    def __init__(self, torrent_file):
        self.torrent_data = bencoding.bdecode(open(torrent_file, 'rb').read())
        self.announce = self.torrent_data['announce']
        self.info = self.torrent_data['info']
        self.info_hash = hashlib.sha1(bencoding.bencode(self.torrent_data['info'])).digest()
        self.peer_id = 'liutorrent1234567890'
        self.uploaded = 0
        self.downloaded = 0
        self.port = 6881  # how do I choose a port? randomly within the unreserved range?
        self.filename = os.path.join(os.getcwd(), self.info['name'].decode('utf-8'))  # for now, single file only
        # handshake: <pstrlen><pstr><reserved><info_hash><peer_id>
        self.handshake = ''.join([chr(19), 'BitTorrent protocol', chr(0) * 8, self.info_hash, self.peer_id])

        self.length = self.info['length'] if 'length' in self.info.keys() \
            else sum([f['length'] for f in self.info['files']])
        self.piece_len = self.info['piece length']
        self.block_len = BLOCK_LEN

        self.last_piece_len = self.length % self.piece_len
        self.num_pieces = self.length / self.piece_len + 1 * (self.last_piece_len != 0)
        self.last_piece = self.num_pieces - 1  # TODO: do I need that?
        self.last_block_len = self.piece_len % self.block_len
        self.blocks_per_piece = self.piece_len / self.block_len + 1 * (self.last_block_len != 0)
        #Pieces/need_blocks data: need_pieces BitArray represents the pieces that I need and have not requested;
        #need_blocks is a list of BitArray, each of which keeps track of blocks not yet requested
        self.need_pieces = BitArray(bin='1' * self.num_pieces)
        self.need_blocks = [BitArray(bin='1' * self.blocks_per_piece) for i in range(self.num_pieces)]
        self.have_pieces = BitArray(bin='0' * self.num_pieces)
        self.have_blocks = [BitArray(bin='0' * self.blocks_per_piece) for i in range(self.num_pieces)]
        self.pieces = defaultdict(self.blocklist)  # index : array of blocks
        self.piece_hashes = self.get_piece_hashes()
        self.initialize_file(self.filename)

        self.info_from_tracker = self.update_info_from_tracker()
        self.peers = self.get_peers()
        self.active_peers = []

        self.num_connected = 0
        self.max_connections = MAX_CONNECTIONS
Ejemplo n.º 6
0
    def get_info_hash(self):
        torrent_bytes = self.session.get(
            f"{self.DOWNLOAD_URL}{self.topic['id']}")
        if not torrent_bytes.ok:
            raise TVShowsSkipTopicError(
                'Couldn\t get torrent file', self.topic['title'])
            return

        torrent_bytes = torrent_bytes.content
        _decoded = bdecode(torrent_bytes)[b'info']
        info_hash = hashlib.sha1(bencode(_decoded)).hexdigest().upper()

        if not info_hash:
            raise TVShowsSkipTopicError(
                'Couldn\t calculate torrent hash', self.topic['title'])
            return

        if info_hash == self.topic['info_hash']:
            raise TVShowsSkipTopicError(
                'Hashes are equal', self.topic['title'])
            return

        manager.update_file(self.topic, torrent_bytes)
        return info_hash
Ejemplo n.º 7
0
def database_builder():
    list_database = []
    list_origin_database = []
    list_torrents_dir = [
        f for f in os.listdir('torrents')
        if os.path.isfile(os.path.join('torrents', f))
    ]
    origin_database_loaded = False
    database_loaded = False
    full_loaded = False
    if os.path.isfile('anime_database.json'):
        anime_database_file = open('anime_database.json', encoding='utf-8')
        anime_database_obj = json.load(anime_database_file)
    else:
        color.color_print(Fore.CYAN, '[INFO]',
                          'MISSING OFFLINE DATABASE. DOWNLOADING...')
        color.color_print(
            Fore.CYAN, '[INFO]',
            'THANK YOU FOR MANAMI PROJECT TO PROVIDE ME A OFFLINE DATABASE')
        urllib.request.urlretrieve(
            'https://raw.githubusercontent.com/manami-project/anime-offline-database/master/anime-offline-database.json',
            'anime_database.json')
        color.color_print(Fore.CYAN, '[INFO]', 'DOWNLOAD COMPLETED\n')
    if os.path.isfile('output\\database_original.json'):
        origin_database_json_file = open('output\\database_original.json',
                                         encoding='utf-8')
        try:
            origin_database_obj = json.load(origin_database_json_file)
            origin_database_loaded = True
        except:
            origin_database_loaded = False
        database_json_file = open('output\\database.json', encoding='utf-8')
        try:
            database_obj = json.load(database_json_file)
            database_loaded = True
        except:
            database_loaded = False
    if origin_database_loaded and database_loaded:
        full_loaded = True
        list_database = database_obj
        list_origin_database = origin_database_obj

    added_new_state = False
    color.color_print(Fore.LIGHTMAGENTA_EX, '[COPYRIGHT]',
                      'MANAMI PROJECT: ANIME OFFLINE DATABASE')
    color.color_print(Fore.LIGHTMAGENTA_EX, '[COPYRIGHT]',
                      'IGORCMOURA: ANITOPY')
    color.color_print(Fore.LIGHTMAGENTA_EX, '[COPYRIGHT]', 'JCUL: BENCODE\n')
    color.color_print(Fore.YELLOW, '[PROCESSING]', 'PARSE TORRENTS\n')
    for i in list_torrents_dir:
        torrent_filename = i
        torrent_full_path = 'torrents\\' + i
        with open(torrent_full_path, 'rb') as fh:
            torrent_data = fh.read()

        if not search_database(list_database, i) or not full_loaded:
            torrent = bencode.decode(torrent_data)
            torrent_announces = []
            torrent_files = []
            torrent_creation_date = ''
            torrent_hash = ''
            torrent_magnet = ''
            torrent_total_length = 0
            for im in torrent:
                torrent_creation_date = (
                    datetime.utcfromtimestamp(int(im[b'creation date'])) -
                    timedelta(hours=9)).strftime('%Y-%m-%d %H:%M:%S')
                torrent_temp_announce = []

                for imfw in im[b'announce-list']:
                    torrent_temp_announce.append(imfw[0].decode("utf-8"))
                    torrent_hash = str(
                        hashlib.sha1(bencoding.bencode(
                            im[b'info'])).hexdigest())
                    magnet_temp = 'magnet:?xt=urn:btih:{}'.format(torrent_hash)
                    torrent_magnet = magnet_temp
                torrent_announces = torrent_temp_announce
                if b'files' in im[b'info']:
                    for imf in im[b'info'][b'files']:
                        torrent_files.append({
                            'name':
                            imf[b'path'][0].decode("utf-8"),
                            'size':
                            format_size_units(imf[b'length'])
                        })
                        torrent_total_length += imf[b'length']
                else:
                    torrent_total_length = im[b'info'][b'length']
                    torrent_files.append({
                        'name':
                        im[b'info'][b'name'].decode("utf-8"),
                        'size':
                        format_size_units(im[b'info'][b'length'])
                    })

            torrent_size = format_size_units(torrent_total_length)

            info_id = random_string_digits(10)

            result_anitopy = anitopy.parse(torrent_filename)
            anime_db_result = search_anime(anime_database_obj,
                                           result_anitopy['anime_title'])

            json_data_for_add = {}
            json_data_for_add['id'] = info_id
            json_data_for_add['file_name'] = torrent_filename
            json_data_for_add['title'] = result_anitopy['anime_title']
            if 'episode_number' in result_anitopy:
                json_data_for_add['episode'] = result_anitopy['episode_number']
            else:
                json_data_for_add['episode'] = None

            json_data_for_add['hash'] = torrent_hash
            json_data_for_add['size'] = torrent_size
            if 'video_resolution' in result_anitopy:
                json_data_for_add['resolution'] = result_anitopy[
                    'video_resolution']
            else:
                json_data_for_add['resolution'] = None
            if 'video_term' in result_anitopy:
                json_data_for_add['video_codec'] = result_anitopy['video_term']
            else:
                json_data_for_add['video_codec'] = None
            if 'audio_term' in result_anitopy:
                json_data_for_add['audio_codec'] = result_anitopy['audio_term']
            else:
                json_data_for_add['audio_codec'] = None
            if 'release_group' in result_anitopy:
                json_data_for_add['release_group'] = result_anitopy[
                    'release_group']
            else:
                json_data_for_add['release_group'] = None
            json_data_for_add['created_date'] = torrent_creation_date
            json_data_for_add['magnet_url'] = torrent_magnet
            json_data_for_add[
                'torrent_url'] = 'https://anime.cryental.dev/download/' + info_id + '.torrent'
            json_data_for_add['extra'] = {
                'announces': torrent_announces,
                'files': torrent_files
            }

            if not anime_db_result:
                json_data_for_add['metadata'] = None
            else:
                info_type = None
                info_episodes = None
                info_picture = None
                info_thumbnail = None
                info_status = None
                if 'type' in anime_db_result:
                    info_type = anime_db_result['type']
                if 'episodes' in anime_db_result:
                    info_episodes = anime_db_result['episodes']
                if 'picture' in anime_db_result:
                    info_picture = anime_db_result['picture']
                if 'thumbnail' in anime_db_result:
                    info_thumbnail = anime_db_result['thumbnail']
                if 'status' in anime_db_result:
                    info_status = anime_db_result['status']
                json_data_for_add['metadata'] = {
                    'type': info_type,
                    'episodes': info_episodes,
                    'picture': info_picture,
                    'thumbnail': info_thumbnail,
                    'status': info_status
                }
            list_database.append(json_data_for_add)
            if not search_database(list_origin_database, i) or not full_loaded:
                json_original_data_for_add = {}
                json_original_data_for_add['id'] = info_id
                json_original_data_for_add['file_name'] = torrent_filename
                json_original_data_for_add['hash'] = torrent_hash
                with open(torrent_full_path, "rb") as f:
                    encodedZip = base64.b64encode(f.read())
                    json_original_data_for_add['raw_data'] = encodedZip.decode(
                    )
                json_original_data_for_add[
                    'created_date'] = torrent_creation_date
                list_origin_database.append(json_original_data_for_add)
            added_new_state = True
            color.color_print(Fore.YELLOW, '[PROCESSED] ', i)
        else:
            print(Fore.LIGHTRED_EX + '[SKIPPED] ' + Style.RESET_ALL + i)

    if added_new_state or not full_loaded:
        color.color_print(Fore.YELLOW, '[PROCESSING]', 'SORTING LIST')
        list_database.sort(key=sortSecond, reverse=True)
        color.color_print(Fore.YELLOW, '[PROCESSING]', 'SORTING ORIGINAL LIST')
        list_origin_database.sort(key=sortSecond, reverse=True)
        color.color_print(Fore.YELLOW, '[PROCESSING]', 'DISK ACCESSING')
        with open('output\\database.json', 'w') as outfile:
            color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING LIST')
            json.dump(list_database, outfile)

        color.color_print(Fore.YELLOW, '[PROCESSING]', 'DISK ACCESSING')
        with open('output\\database_original.json', 'w') as outfile:
            color.color_print(Fore.YELLOW, '[PROCESSING]',
                              'WRITING LIST ORIGINAL')
            json.dump(list_origin_database, outfile)

        color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING UPDATED DATE')
        today = datetime.now()
        new_days = open('output\\updated_on.txt', 'w')
        new_days.write(today.strftime("%Y-%m-%d %H:%M:%S"))
        new_days.close()

        color.color_print(Fore.YELLOW, '[PROCESSING]', 'WRITING HASH FILES')
        database_md5 = str(md5('output\\database.json'))
        origin_database_md5 = str(md5('output\\database_original.json'))
        updated_md5 = str(md5('output\\updated_on.txt'))
        with open('output\\database.json.md5', 'w') as outfile:
            json.dump(database_md5, outfile)
        with open('output\\database_original.json.md5', 'w') as outfile:
            json.dump(origin_database_md5, outfile)
        with open('output\\updated_on.txt.md5', 'w') as outfile:
            json.dump(updated_md5, outfile)
    color.color_print(Fore.YELLOW, '[DONE]', 'COMPLETED\n')
Ejemplo n.º 8
0
 def get_torr_info_hash(data):
     import hashlib, bencoding
     bencode_dict = bencoding.bdecode(data)
     return hashlib.sha1(bencoding.bencode(
         bencode_dict[b"info"])).hexdigest()
Ejemplo n.º 9
0
 def get_info_hash(self):
     info = bencoding.bencode(self.data[b'info'])
     info_hash = hashlib.sha1(info).digest()
     print(info_hash)
     return info_hash
Ejemplo n.º 10
0
    def __init__(self, file_path):
        if os.path.isfile(file_path) and file_path.split('.')[-1] == 'torrent':
            with open(file_path, 'rb') as f:
                self.metaData = bencoding.bdecode(f.read())
        else:
            raise ValueError('Invalid torrent file')

        self._announce = self.metaData[b'announce']

        if b'private' in self.metaData[b'info']:
            self._isPrivate = True if int(
                self.metaData[b'info'][b'private']) == 1 else False
        else:
            self._isPrivate = False

        self._pieces = self.metaData[b'info'][b'pieces']

        self._piece_length = self.metaData[b'info'][b'piece length']

        if b'announce-list' not in self.metaData:
            self._trackers = [self._announce]
        else:
            self._trackers = self.metaData[b'announce-list']
            self._trackers = [
                tracker for sublist in self._trackers for tracker in sublist
                if b'ipv6' not in tracker
            ]

        if b'creation date' in self.metaData:
            self._creationDate = self.metaData[b'creation date']

        if b'comment' in self.metaData:
            self._comment = self.metaData[b'comment']

        if b'created by' in self.metaData:
            self._createdBy = self.metaData[b'created by']

        if b'encoding' in self.metaData:
            self._encoding = self.metaData[b'encoding']

        if b'files' not in self.metaData[b'info']:
            self.mode = 'single'
            self._total_length = self.metaData[b'info'][b'length']
            if b'md5sum' in self.metaData:
                self._md5sum = self.metaData[b'info'][b'md5sum']
        else:
            self.mode = 'multiple'
            self.files = self.metaData[b'info'][b'files']
            # Man Made stuff here onwards

            # self.Fractures stores File finish indexes
            self.files, self._total_length, self.fractures = self.__parse_files(
            )

        self.number_of_pieces = math.ceil(self._total_length /
                                          self._piece_length)

        print("MODE:", self.mode)
        print("TOTAL LENGTH:", self._total_length)
        print("PIECE_LEN:", self._piece_length)
        print("NO. OF PIECES:", self.number_of_pieces)
        print("LAST PIECE LEN:", self._total_length % self._piece_length)
        print("NO. OF PIECE HASHES", len(self._pieces) / 20)

        self.name = self.metaData[b'info'][b'name']  # Usage depends on mode

        self.info_hash = sha1(bencoding.bencode(
            self.metaData[b'info'])).digest()

        self.peers = []
Ejemplo n.º 11
0
import os, bencoding, hashlib
for f_name in os.listdir('1'):
    if f_name.endswith('.torrent'):
        #     print(f_name)

        objTorrentFile = open("1/%s" % f_name, "rb")
        decodedDict = bencoding.bdecode(objTorrentFile.read())

        info_hash = hashlib.sha1(bencoding.bencode(
            decodedDict[b"info"])).hexdigest()

        print('magnet:?xt=urn:btih:%s' % info_hash)
Ejemplo n.º 12
0
def _get_hash_file(path):
    with open(path, "rb") as f:
        decodedDict = bdecode(f.read())
    hash_ = sha1(bencode(decodedDict[b'info'])).hexdigest()
    return str(hash_)
Ejemplo n.º 13
0
arguments = sys.argv
if (len(arguments) > 1):
    if ("--help" in arguments):
        print("-t <torrent file> -mu <max_upload in MB> -md <max_down in MB> -su <min_speed in KB/s> <max_speed in KB/s> -sd <min_speed in KB/s> <max_speed in KB/s>")
        sys.exit()
    else:
        max_down = 0
        max_upload = 0
        speed_up = (800, 3000)
        speed_down = (800, 2000)

        if ("-f" in arguments):
            torrent_file = open(arguments[arguments.index("-f") + 1], "rb")
            torrent_decoded = bencoding.bdecode(torrent_file.read())
            announce_params["info_hash"] = codecs.decode(hashlib.sha1(bencoding.bencode(torrent_decoded[b"info"])).hexdigest(), "hex")
            torrent_length = (len(torrent_decoded[b"info"][b"pieces"]) / 20) * torrent_decoded[b"info"][b"piece length"]
            announce_url = torrent_decoded[b"announce"]
            print("torrent size: %iMB" % (torrent_length // 1024 // 1024))
            max_upload = random.randrange(40, 80) * torrent_length // 100
            max_down = random.randrange(1, 8) * torrent_length // 100

        if ("-mu" in arguments):
            max_upload = int(arguments[arguments.index("-mu") + 1]) * 1024 * 1024
        if ("-md" in arguments):
            max_down = int(arguments[arguments.index("-md") + 1]) * 1024 * 1024
        if ("-su" in arguments):
            speed_up = (int(arguments[arguments.index("-su") + 1]), int(arguments[arguments.index("-su") + 2]))
        if ("-sd" in arguments):
            speed_down = (int(arguments[arguments.index("-sd") + 1]), int(arguments[arguments.index("-sd") + 2]))
else:
def infohash(fname):
    objTorrentFile = open(fname, "rb")
    decodedDict = bencoding.bdecode(objTorrentFile.read())
    info_hash = hashlib.sha1(bencoding.bencode(
        decodedDict[b"info"])).hexdigest()
    return info_hash