Example #1
0
    def construct_message(self):
        args = {
            'ping': {
                'id': self.nodeid
            },
            'find_node': {
                'id': self.nodeid,
                'target': self.target
            },
            'get_peers': {
                'id': self.nodeid,
                'info_hash': self.infohash
            },
            'announce_peer': {
                'id': self.nodeid,
                'implied_port': self.implied_port,
                'info_hash': self.infohash,
                'port': self.port,
                'token': self.token
            }
        }.get(self.query_type, None)

        if args == None:
            raise RuntimeError('Invalid DHT query type: {}'.format(
                self.query_type))

        return bencode({
            't': self.tid,
            'y': 'q',
            'q': self.query_type,
            'a': args
        })
Example #2
0
def get_torrent_stats(url):
    response = requests.get(url)
    data = bencodepy.decode(response.content)
    files = data[b'info'][b'files']
    size = 0
    for file in files:
        size += file[b'length']
    size = size * 1e-9
    size = round(size, 2)
    info_hash = hashlib.sha1(bencodepy.bencode(data[b"info"])).hexdigest()

    trackers_list = data[b'announce-list']

    stats = {'seeds': 0, 'peers': 0}

    for tracker_url in trackers_list:
        tracker_url = tracker_url[0].decode('utf-8')
        result = scrape(tracker_url, [info_hash])
        if not result:
            continue
        if result[info_hash]['seeds'] is None or result[info_hash][
                'peers'] is None:
            continue
        stats['seeds'] = max(stats['seeds'], result[info_hash]['seeds'])
        stats['peers'] = max(stats['peers'], result[info_hash]['peers'])
    stats['size_gb'] = size
    return stats
Example #3
0
    def test_verify_torrent_contents_keyerror2(self):
        del self.torrent_data_dict[b'info']
        self.torrent_data = bencode(self.torrent_data_dict)

        with self.assertRaises(KeyError):
            verify_torrent_contents(self.torrent_data,
                                    dirname(self.torrent_data_path))
Example #4
0
    def parse_extended_message(self):
        extended_message_type = self.field[0]
        message = self.field[1:]

        if extended_message_type == 0:
            try:
                message = bdecode(message)
            except DecodingError:
                self.error.set()
                return

            if b'm' not in message:
                logger.debug('"m" not in extended handshake.')
                self.error.set()
                return

            self.extended_message_types = message[b'm']

            if b'ut_metadata' not in self.extended_message_types:
                logger.debug('Peer does not support metadata protocol.')
                self.error.set()
                return

            if b'metadata_size' not in message:
                logger.debug('Peer did not send "metadata_size" in extended handshake.')
                self.error.set()
                return

            self.metadata_size = message[b'metadata_size']
            logger.info('metadata size: {}'.format(self.metadata_size))
            self.extended_handshake_complete.set()

            self.write_message(15, b'') # have none
            logger.debug('Sent HAVE NONE.')
            self.write_message(0, b'') # choke
            logger.debug('Sent CHOKE.')
            self.write_message(3, b'') # not interesete
            logger.debug('Sent NOT INTERESTED.')
        elif extended_message_type == self.extended_message_types[b'ut_metadata']:
            original_message = message
            try:
                message = bdecode(message)
            except DecodingError:
                self.error.set()
                return

            if message[b'msg_type'] == 0:
                reply = {
                    'msg_type': 2,
                    'piece': message[b'piece']
                }
            elif message[b'msg_type'] == 2:
                logger.debug('Request for metadata rejected.')
                return
            elif message[b'msg_type'] == 1:
                size = len(original_message) - len(bencode(message))
                logger.debug('Got a metadata block of size: {}'.format(size))

                self.metadata_block = original_message[-size:]
                self.metadata_block_received.set()
Example #5
0
    def parse_extended_message(self):
        extended_message_type = self.field[0]
        message = self.field[1:]

        if extended_message_type == 0:
            try:
                message = bdecode(message)
            except DecodingError:
                self.error.set()
                return

            if b'm' not in message:
                logger.debug('"m" not in extended handshake.')
                self.error.set()
                return

            self.extended_message_types = message[b'm']

            if b'ut_metadata' not in self.extended_message_types:
                logger.debug('Peer does not support metadata protocol.')
                self.error.set()
                return

            if b'metadata_size' not in message:
                logger.debug('Peer did not send "metadata_size" in extended handshake.')
                self.error.set()
                return

            self.metadata_size = message[b'metadata_size']
            logger.info('metadata size: {}'.format(self.metadata_size))
            self.extended_handshake_complete.set()

            self.write_message(15, b'') # have none
            logger.debug('Sent HAVE NONE.')
            self.write_message(0, b'') # choke
            logger.debug('Sent CHOKE.')
            self.write_message(3, b'') # not interesete
            logger.debug('Sent NOT INTERESTED.')
        elif extended_message_type == self.extended_message_types[b'ut_metadata']:
            original_message = message
            try:
                message = bdecode(message)
            except DecodingError:
                self.error.set()
                return

            if message[b'msg_type'] == 0:
                reply = {
                    'msg_type': 2,
                    'piece': message[b'piece']
                }
            elif message[b'msg_type'] == 2:
                logger.debug('Request for metadata rejected.')
                return
            elif message[b'msg_type'] == 1:
                size = len(original_message) - len(bencode(message))
                logger.debug('Got a metadata block of size: {}'.format(size))

                self.metadata_block = original_message[-size:]
                self.metadata_block_received.set()
Example #6
0
    def construct_message(self):
        args = {
            'ping': {
                'id': self.nodeid
            },
            'find_node': {
                'id': self.nodeid,
                'target': self.target
            },
            'get_peers': {
                'id': self.nodeid,
                'info_hash': self.infohash
            },
            'announce_peer': {
                'id': self.nodeid,
                'implied_port': self.implied_port,
                'info_hash': self.infohash,
                'port': self.port,
                'token': self.token
            }
        }.get(self.query_type, None)

        if args == None:
            raise RuntimeError('Invalid DHT query type: {}'.format(
                self.query_type))

        return bencode({
            't': self.tid,
            'y': 'q',
            'q': self.query_type,
            'a': args
        })
Example #7
0
 def get_metadata_block(self, n):
     message = {
         'msg_type': 0,
         'piece': n
     }
     logger.info('Requesting piece {} of metadata.'.format(n))
     msg = bencode(message)
     self.write_extended_message(self.extended_message_types[b'ut_metadata'], msg)
Example #8
0
 def get_metadata_block(self, n):
     message = {
         'msg_type': 0,
         'piece': n
     }
     logger.info('Requesting piece {} of metadata.'.format(n))
     msg = bencode(message)
     self.write_extended_message(self.extended_message_types[b'ut_metadata'], msg)
Example #9
0
    def parse_field(self):
        if self.state == HANDSHAKE:
            if not self.field[:20] == b'\x13BitTorrent protocol':
                logger.debug('Invalid pstr.')
                self.error.set()
                return

            if int.from_bytes(self.field[20:28],
                              byteorder='big') & 0x0000000000100000 == 0:
                logger.debug('Peer does not support extension protocol.')
                self.error.set()
                return

            if int.from_bytes(self.field[20:28],
                              byteorder='big') & 0x0000000000000004 == 0:
                logger.debug('Peer does not support fast protocol.')
                self.error.set()
                return

            self.state = MESSAGE_LEN
            self.field_len = 4
            self.handshake_complete.set()

            extended_handshake = bencode({
                'm': {
                    b'ut_metadata': 2
                },
                'v': 'S.P.E.W.'
            })
            self.write_extended_message(0, extended_handshake)
            logger.debug('Sent extended handshake.')
        elif self.state == MESSAGE_LEN:
            self.message_len = int.from_bytes(self.field, byteorder='big')
            if self.message_len == 0:
                self.state = MESSAGE_LEN
                self.field = 4
            else:
                self.state = MESSAGE_TYPE
                self.field_len = 1
        elif self.state == MESSAGE_TYPE:
            self.message_type = int.from_bytes(self.field, byteorder='big')
            if self.message_len == 1:
                self.state = MESSAGE_LEN
                self.field = 4
            else:
                self.message_len -= 1
                self.field_len = self.message_len
                self.state = MESSAGE_PAYLOAD
        elif self.state == MESSAGE_PAYLOAD:
            self.parse_message()
            self.field_len = 4
            self.state = MESSAGE_LEN
        else:
            logger.error('Invalid state.')
            self.error.set()

        self.field = b''
Example #10
0
    def _decode(self, torrent_file):
        with open(torrent_file, 'rb') as f:
            self.torrent = f.read()
        odict = bdecode(self.torrent)

        self.announce = odict[b'announce'].decode('utf-8')
        self.info = odict[b'info']
        self.length = self._length(self.info)
        self.hash = sha1(bencode(self.info)).digest()
Example #11
0
def peer_tracker():
    with open("./file.torrent", "rb") as f:
        raw_data = f.read()
        data = bc.decode(raw_data)
        print(data.keys())
    info_hash = hashlib.sha1(bc.bencode(data[b"info"])).hexdigest()
    print(data[b"announce-list"])

    return info_hash
Example #12
0
    def parse_field(self):
        if self.state == HANDSHAKE:
            if not self.field[:20] == b'\x13BitTorrent protocol':
                logger.debug('Invalid pstr.')
                self.error.set()
                return

            if int.from_bytes(self.field[20:28], byteorder='big') & 0x0000000000100000 == 0:
                logger.debug('Peer does not support extension protocol.')
                self.error.set()
                return

            if int.from_bytes(self.field[20:28], byteorder='big') & 0x0000000000000004 == 0:
                logger.debug('Peer does not support fast protocol.')
                self.error.set()
                return

            self.state = MESSAGE_LEN
            self.field_len = 4
            self.handshake_complete.set()

            extended_handshake = bencode({
                'm': {b'ut_metadata': 2},
                'v': 'S.P.E.W.'
            })
            self.write_extended_message(0, extended_handshake)
            logger.debug('Sent extended handshake.')
        elif self.state == MESSAGE_LEN:
            self.message_len = int.from_bytes(self.field, byteorder='big')
            if self.message_len == 0:
                self.state = MESSAGE_LEN
                self.field = 4
            else:
                self.state = MESSAGE_TYPE
                self.field_len = 1
        elif self.state == MESSAGE_TYPE:
            self.message_type = int.from_bytes(self.field, byteorder='big')
            if self.message_len == 1:
                self.state = MESSAGE_LEN
                self.field = 4
            else:
                self.message_len -= 1
                self.field_len = self.message_len
                self.state = MESSAGE_PAYLOAD
        elif self.state == MESSAGE_PAYLOAD:
            self.parse_message()
            self.field_len = 4
            self.state = MESSAGE_LEN
        else:
            logger.error('Invalid state.')
            self.error.set()

        self.field = b''
Example #13
0
    def setUp(self):
        """ A torrent generator! """
        self.torrent_data_path = mkdtemp(prefix='test-torrent-verification-')
        self.torrent_name = basename(self.torrent_data_path)

        all_data = create_random_data(self.FILE_SIZE * 2)
        pieces = b''

        self.file1 = self._mktemp(contents=all_data[0:self.FILE_SIZE],
                                  dir=self.torrent_data_path)
        self.file2 = self._mktemp(contents=all_data[self.FILE_SIZE:],
                                  dir=self.torrent_data_path)

        for i in range(0, self.FILE_SIZE * 2, self.PIECE_LENGTH):
            s = sha1()
            s.update(all_data[i:i + self.PIECE_LENGTH])
            pieces += s.digest()

        self.torrent_data_dict = {
            b'announce': 'https://fake.com',
            b'info': {
                b'name': self.torrent_name,
                b'piece length': self.PIECE_LENGTH,
                b'pieces': pieces,
                b'files': [
                    {
                        b'length': self.FILE_SIZE,
                        b'path': [basename(self.file1)],
                    },
                    {
                        b'length': self.FILE_SIZE,
                        b'path': [basename(self.file2)],
                    },
                ],
            }
        }
        self.torrent_data = bencode(self.torrent_data_dict)

        self.torrent_file_path = self._mktemp(contents=self.torrent_data)
Example #14
0
    def setUp(self):
        all_data = create_random_data(self.FILE_SIZE)
        self.file1 = self._mktemp(contents=all_data)
        self.torrent_data_path = dirname(self.file1)

        pieces = b''
        for i in range(0, self.FILE_SIZE, self.PIECE_LENGTH):
            s = sha1()
            s.update(all_data[i:i + self.PIECE_LENGTH])
            pieces += s.digest()

        self.torrent_data_dict = {
            b'announce': 'https://fake.com',
            b'info': {
                b'name': self.file1,
                b'piece length': self.PIECE_LENGTH,
                b'pieces': pieces,
            }
        }
        self.torrent_data = bencode(self.torrent_data_dict)

        self.torrent_file_path = self._mktemp(contents=self.torrent_data)
Example #15
0
def test_dictionary_nested():
    """Test the handling of nested dictionaries."""
    encoded = bencode({'foo': 42, 'bar': {'sketch': 'parrot', 'foobar': 23}})

    assert encoded == 'd3:bard6:foobari23e6:sketch6:parrote3:fooi42ee'.encode('utf-8')
Example #16
0
def test_encode():
    """Encode should give known result with known input."""
    for plain, encoded in ENCODE:
        assert encoded == bencode(plain)
def main() -> None:
    # Create and configure ArgumentParser.
    parser = argparse.ArgumentParser(
        description=
        "py3createtorrent is a comprehensive command line utility for creating torrents."
    )

    parser.add_argument(
        "-p",
        "--piece-length",
        type=int,
        action="store",
        dest="piece_length",
        default=0,
        help="piece size in KiB. 0 = automatic selection (default).")

    parser.add_argument("-P",
                        "--private",
                        action="store_true",
                        dest="private",
                        default=False,
                        help="create private torrent")

    parser.add_argument("-c",
                        "--comment",
                        type=str,
                        action="store",
                        dest="comment",
                        default=False,
                        help="include comment")

    parser.add_argument("-s",
                        "--source",
                        type=str,
                        action="store",
                        dest="source",
                        default=False,
                        help="include source")

    parser.add_argument("-f",
                        "--force",
                        action="store_true",
                        dest="force",
                        default=False,
                        help="do not ask anything, just do it")

    parser.add_argument("-v",
                        "--verbose",
                        action="store_true",
                        dest="verbose",
                        default=False,
                        help="verbose mode")

    parser.add_argument("-q",
                        "--quiet",
                        action="store_true",
                        dest="quiet",
                        default=False,
                        help="be quiet, e.g. don't print summary")

    parser.add_argument(
        "-o",
        "--output",
        type=str,
        action="store",
        dest="output",
        default=None,
        metavar="PATH",
        help=
        "custom output location (directory or complete path). default = current directory."
    )

    parser.add_argument("-e",
                        "--exclude",
                        type=str,
                        action="append",
                        dest="exclude",
                        default=[],
                        metavar="PATH",
                        help="exclude path (can be repeated)")

    parser.add_argument(
        "--exclude-pattern",
        type=str,
        action="append",
        dest="exclude_pattern",
        default=[],
        metavar="REGEXP",
        help="exclude paths matching the regular expression (can be repeated)")

    parser.add_argument(
        "--exclude-pattern-ci",
        type=str,
        action="append",
        dest="exclude_pattern_ci",
        default=[],
        metavar="REGEXP",
        help=
        "exclude paths matching the case-insensitive regular expression (can be repeated)"
    )

    parser.add_argument(
        "-d",
        "--date",
        type=int,
        action="store",
        dest="date",
        default=-1,
        metavar="TIMESTAMP",
        help=
        "set creation date (unix timestamp). -1 = now (default). -2 = disable."
    )

    parser.add_argument(
        "-n",
        "--name",
        type=str,
        action="store",
        dest="name",
        default=None,
        help="use this file (or directory) name instead of the real one")

    parser.add_argument("--md5",
                        action="store_true",
                        dest="include_md5",
                        default=False,
                        help="include MD5 hashes in torrent file")

    parser.add_argument(
        "--config",
        type=str,
        action="store",
        help=
        "use another config file instead of the default one from the home directory"
    )

    parser.add_argument("-t",
                        "--tracker",
                        metavar="TRACKER_URL",
                        action="append",
                        dest="trackers",
                        default=[],
                        help="tracker to use for the torrent")
    parser.add_argument("--node",
                        metavar="HOST,PORT",
                        action="append",
                        dest="nodes",
                        default=[],
                        help="DHT bootstrap node to use for the torrent")
    parser.add_argument("--webseed",
                        metavar="WEBSEED_URL",
                        action="append",
                        dest="webseeds",
                        default=[],
                        help="webseed URL for the torrent")

    parser.add_argument("path",
                        help="file or folder for which to create a torrent")

    args = parser.parse_args()

    global VERBOSE
    VERBOSE = args.verbose

    config = Config()
    if args.config:
        if not os.path.isfile(args.config):
            parser.error("The config file at '%s' does not exist" %
                         args.config)
        config.path = args.config

    try:
        config.load_config()
    except json.JSONDecodeError as exc:
        print("Could not parse config file at '%s'" %
              config.get_path_to_config_file(),
              file=sys.stderr)
        print(exc, file=sys.stderr)
        sys.exit(1)
    except Config.InvalidConfigError as exc:
        print(exc, file=sys.stderr)
        sys.exit(1)

    printv('Config / Tracker abbreviations:\n' +
           pprint.pformat(config.tracker_abbreviations))
    printv('Config / Advertise:         ' + str(config.advertise))
    printv('Config / Best trackers URL: ' + config.best_trackers_url)

    # Ask the user if he really wants to use uncommon piece lengths.
    # (Unless the force option has been set.)
    if not args.force and 0 < args.piece_length < 16:
        if "yes" != input(
                "It is strongly recommended to use a piece length greater or equal than 16 KiB! Do you "
                "really want to continue? yes/no: "):
            parser.error("Aborted.")

    if not args.force and args.piece_length > 16384:
        if "yes" != input(
                "It is strongly recommended to use a maximum piece length of 16384 KiB (16 MiB)! Do you really "
                "want to continue? yes/no: "):
            parser.error("Aborted.")

    if not args.force and args.piece_length % 16 != 0:
        if "yes" != input(
                "It is strongly recommended to use a piece length that is a multiple of 16 KiB! Do you really "
                "want to continue? yes/no: "):
            parser.error("Aborted.")

    # Verbose and quiet options may not be used together.
    if args.verbose and args.quiet:
        parser.error("Being verbose and quiet exclude each other.")

    # ##########################################
    # CALCULATE/SET THE FOLLOWING METAINFO DATA:
    # - info
    #   - pieces (concatenated 20 byte sha1 hashes of all the data)
    #   - files (if multiple files)
    #   - length and md5sum (if single file)
    #   - name (may be overwritten in the next section by the --name option)

    input_path = args.path  # type: str
    trackers = args.trackers  # type: List[str]

    # Validate the given path.
    if not os.path.isfile(input_path) and not os.path.isdir(input_path):
        parser.error("'%s' neither is a file nor a directory." % input_path)

    # Evaluate / apply the tracker abbreviations.
    trackers = replace_in_list(trackers, config.tracker_abbreviations)

    # Remove duplicate trackers.
    trackers = remove_duplicates(trackers)

    # Validate tracker URLs.
    invalid_trackers = False
    best_shortcut_present = False
    regexp = re.compile(r"^(http|https|udp)://", re.I)
    regexp_best = re.compile(r"best([0-9]+)", re.I)
    for t in trackers:
        m = regexp_best.match(t)
        if m:
            best_shortcut_present = True
        if not regexp.search(t) and not m:
            print("Warning: Not a valid tracker URL: %s" % t, file=sys.stderr)
            invalid_trackers = True

    if invalid_trackers and not args.force:
        if "yes" != input("Some tracker URLs are invalid. Continue? yes/no: "):
            parser.error("Aborted.")

    # Handle best[0-9] shortcut.
    if best_shortcut_present:
        new_trackers = []
        for t in trackers:
            m = regexp_best.match(t)
            if m:
                try:
                    new_trackers.extend(
                        get_best_trackers(int(m.group(1)),
                                          config.best_trackers_url))
                except urllib.error.URLError as e:
                    print(
                        "Error: Could not download best trackers from '%s'. Reason: %s"
                        % (config.best_trackers_url, e),
                        file=sys.stderr)
                    sys.exit(1)
            else:
                new_trackers.append(t)
        trackers = new_trackers

    # Disallow DHT bootstrap nodes for private torrents.
    if args.nodes and args.private:
        parser.error(
            "DHT bootstrap nodes cannot be specified for a private torrent. Private torrents do not support DHT."
        )

    # Validate DHT bootstrap nodes.
    parsed_nodes = list()
    invalid_nodes = False
    for n in args.nodes:
        splitted = n.split(",")
        if len(splitted) != 2:
            print(
                "Invalid format for DHT bootstrap node '%s'. Please use the format 'host,port'."
                % n,
                file=sys.stderr)
            invalid_nodes = True
            continue

        host, port = splitted
        if not port.isdigit():
            print(
                "Invalid port number for DHT bootstrap node '%s'. Ports must be numeric."
                % n,
                file=sys.stderr)
            invalid_nodes = True

        parsed_nodes.append([host, int(port)])

    if invalid_nodes and not args.force:
        if "yes" != input(
                "Some DHT bootstrap nodes are invalid. Continue? yes/no: "):
            parser.error("Aborted.")

    # Parse and validate excluded paths.
    excluded_paths = set(
        [os.path.normcase(os.path.abspath(path)) for path in args.exclude])

    # Parse exclude patterns.
    excluded_regexps = set(
        re.compile(regexp) for regexp in args.exclude_pattern)
    excluded_regexps |= set(
        re.compile(regexp, re.IGNORECASE)
        for regexp in args.exclude_pattern_ci)

    # Warn the user if he attempts to exclude any paths when creating a torrent for a single file (makes no sense).
    if os.path.isfile(input_path) and (len(excluded_paths) > 0
                                       or len(excluded_regexps) > 0):
        print(
            "Warning: Excluding paths is not possible when creating a torrent for a single file.",
            file=sys.stderr)

    # Warn the user if he attempts to exclude a specific path, that does not even exist.
    for path in excluded_paths:
        if not os.path.exists(path):
            print(
                "Warning: You're excluding a path that does not exist: '%s'" %
                path,
                file=sys.stderr)

    # Get the torrent's files and / or calculate its size.
    if os.path.isfile(input_path):
        torrent_size = os.path.getsize(input_path)
    else:
        torrent_files = get_files_in_directory(
            input_path,
            excluded_paths=excluded_paths,
            excluded_regexps=excluded_regexps)
        torrent_size = sum([
            os.path.getsize(os.path.join(input_path, file))
            for file in torrent_files
        ])

    # Torrents for 0 byte data can't be created.
    if torrent_size == 0:
        print("Error: Can't create torrent for 0 byte data.", file=sys.stderr)
        print("Check your files and exclusions!", file=sys.stderr)
        sys.exit(1)

    # Calculate or parse the piece size.
    if args.piece_length == 0:
        piece_length = calculate_piece_length(torrent_size)
    elif args.piece_length > 0:
        piece_length = args.piece_length * KIB
    else:
        parser.error("Invalid piece size: '%d'" % args.piece_length)

    # Do the main work now.
    # -> prepare the metainfo dictionary.
    if os.path.isfile(input_path):
        info = create_single_file_info(input_path, piece_length,
                                       args.include_md5)
    else:
        info = create_multi_file_info(input_path, torrent_files, piece_length,
                                      args.include_md5)

    assert len(info['pieces']) % 20 == 0, "len(pieces) not a multiple of 20"

    # ###########################
    # FINISH METAINFO DICTIONARY:
    # - info
    #   - piece length
    #   - name (eventually overwrite)
    #   - private
    # - announce (if at least one tracker was specified)
    # - announce-list (if multiple trackers were specified)
    # - nodes (if at least one DHT bootstrap node was specified)
    # - creation date (may be disabled as well)
    # - created by
    # - comment (may be disabled as well)

    # Finish sub-dict "info".
    info['piece length'] = piece_length

    if args.private:
        info['private'] = 1

    # Re-use the name regex for source parameter.
    if args.source:
        args.source = args.source.strip()

        regexp = re.compile(r"^[A-Z0-9_\-., ]+$", re.I)

        if not regexp.match(args.source):
            parser.error(
                "Invalid source: '%s'. Allowed chars: A_Z, a-z, 0-9, any of {.,_-} plus spaces."
                % args.source)

        info['source'] = args.source

    # Construct outer metainfo dict, which contains the torrent's whole information.
    metainfo = {'info': info}  # type: Dict[str, Any]
    if trackers:
        metainfo['announce'] = trackers[0]

    # Make "announce-list" field, if there are multiple trackers.
    if len(trackers) > 1:
        metainfo['announce-list'] = [[tracker] for tracker in trackers]

    # Set DHT bootstrap nodes.
    if parsed_nodes:
        metainfo['nodes'] = parsed_nodes

    # Set webseeds (url-list).
    if args.webseeds:
        metainfo['url-list'] = args.webseeds

    # Set "creation date".
    # The user may specify a custom creation date. He may also decide not to include the creation date field at all.
    if args.date == -1:
        # use current time
        metainfo['creation date'] = int(time.time())
    elif args.date >= 0:
        # use specified timestamp directly
        metainfo['creation date'] = args.date
    elif args.date < -2:
        parser.error(
            "Invalid date: Negative timestamp values are not possible (except for -1 to use current date "
            "automatically or -2 to disable storing a creation date altogether)."
        )

    # Add the "created by" field.
    metainfo['created by'] = 'py3createtorrent v%s' % __version__

    # Add user's comment or advertise py3createtorrent (unless this behaviour has been disabled by the user).
    # The user may also decide not to include the comment field at all by specifying an empty comment.
    if isinstance(args.comment, str):
        if len(args.comment) > 0:
            metainfo['comment'] = args.comment
    elif config.advertise:
        metainfo['comment'] = "created with " + metainfo['created by']

    # Add the name field.
    # By default this is the name of directory or file the torrent is being created for.
    if args.name:
        args.name = args.name.strip()

        regexp = re.compile(r"^[A-Z0-9_\-., ()]+$", re.I)

        if not regexp.match(args.name):
            parser.error(
                "Invalid name: '%s'. Allowed chars: A_Z, a-z, 0-9, any of {.,_-()} plus spaces."
                % args.name)

        metainfo['info']['name'] = args.name

    # ###################################################
    # BENCODE METAINFO DICTIONARY AND WRITE TORRENT FILE:
    # - take into consideration the --output option
    # - properly handle KeyboardInterrupts while writing the file

    # Respect the custom output location.
    if not args.output:
        # Use current directory.
        output_path = metainfo['info']['name'] + ".torrent"

    else:
        # Use the directory or filename specified by the user.
        args.output = os.path.abspath(args.output)

        # The user specified an output directory:
        if os.path.isdir(args.output):
            output_path = os.path.join(args.output,
                                       metainfo['info']['name'] + ".torrent")
            if os.path.isfile(output_path):
                if not args.force and os.path.exists(output_path):
                    if "yes" != input(
                            "'%s' does already exist. Overwrite? yes/no: " %
                            output_path):
                        parser.error("Aborted.")

        # The user specified a filename:
        else:
            # Is there already a file with this path? -> overwrite?!
            if os.path.isfile(args.output):
                if not args.force and os.path.exists(args.output):
                    if "yes" != input(
                            "'%s' does already exist. Overwrite? yes/no: " %
                            args.output):
                        parser.error("Aborted.")

            output_path = args.output

    # Actually write the torrent file now.
    try:
        with open(output_path, "wb") as fh:
            fh.write(bencode(metainfo))
    except IOError as exc:
        print("IOError: " + str(exc), file=sys.stderr)
        print(
            "Could not write the torrent file. Check torrent name and your privileges.",
            file=sys.stderr)
        print("Absolute output path: '%s'" % os.path.abspath(output_path),
              file=sys.stderr)
        sys.exit(1)
    except KeyboardInterrupt:
        # Properly handle KeyboardInterrupts.
        # todo: open()'s context manager may already do this on his own?
        if os.path.exists(output_path):
            os.remove(output_path)

    # #########################
    # PREPARE AND PRINT SUMMARY
    # - but check quiet option

    # If the quiet option has been set, we're already finished here, because we don't print a summary in this case.
    if args.quiet:
        sys.exit(0)

    # Print summary!
    print("Successfully created torrent:")

    # Create the list of backup trackers.
    backup_trackers = ""
    if 'announce-list' in metainfo:
        _backup_trackers = metainfo['announce-list'][1:]
        _backup_trackers.sort(key=lambda x: x[0].lower())

        for tracker in _backup_trackers:
            backup_trackers += "    " + tracker[0] + "\n"
        backup_trackers = backup_trackers.rstrip()
    else:
        backup_trackers = "    (none)"

    # Calculate piece count.
    piece_count = math.ceil(torrent_size / metainfo['info']['piece length'])

    # Make torrent size human readable.
    if torrent_size > 10 * MIB:
        size = "%.2f MiB" % (torrent_size / MIB)
    else:
        size = "%d KiB" % (torrent_size / KIB)

    # Make creation date human readable (ISO format).
    if 'creation date' in metainfo:
        creation_date = datetime.datetime.fromtimestamp(metainfo['creation \
date']).isoformat(' ')
    else:
        creation_date = "(none)"

    # Now actually print the summary table.
    print("  Name:                %s\n"
          "  Size:                %s\n"
          "  Pieces:              %d x %d KiB\n"
          "  Comment:             %s\n"
          "  Private:             %s\n"
          "  Creation date:       %s\n"
          "  DHT bootstrap nodes: %s\n"
          "  Webseeds:            %s\n"
          "  Primary tracker:     %s\n"
          "  Backup trackers:\n"
          "%s" %
          (metainfo['info']['name'], size, piece_count, piece_length / KIB,
           metainfo['comment'] if 'comment' in metainfo else "(none)",
           "yes" if args.private else "no", creation_date,
           metainfo['nodes'] if 'nodes' in metainfo else "(none)",
           metainfo['url-list'] if 'url-list' in metainfo else "(none)",
           metainfo['announce'] if 'announce' in metainfo else "(none)",
           backup_trackers))
Example #18
0
def test_encode_bencached():
    """Ensure Bencached objects can be encoded."""
    assert bencode([Bencached(bencode('test'))]) == b'l4:teste'
Example #19
0
def ih2torrent(loop, infohash, filename, bootstrap):
    global keep_running

    logger.info('Using node ID: {}'.format(hexlify(nodeid).decode()))

    # Add bootstrapping nodes.
    if bootstrap == []:
        logger.info('Using router.bittorrent.com as the bootstrapping node.')
        ip = yield from dns_resolve(loop, 'router.bittorrent.com')
        logger.info('Resolved to: {}'.format(ip))
        yield from nodes.put(inet_aton(ip) + struct.pack('!H', 6881))
    else:
        unresolved = []
        for host, port in bootstrap:
            try:
                parsed = ipaddress.ip_address(host)
                if type(parsed) != ipaddress.IPv4Address:
                    raise ValueError(
                        'Bootstrap node {} not an IPv4 address or hostname.'
                        .format(host))
                yield from nodes.put(inet_aton(host) + port.to_bytes(2, byteorder='big'))
            except ValueError:
                unresolved.append((host, port))

        if len(unresolved) > 0:
            logger.info('Resolving {} host name(s).'.format(len(unresolved)))
            tasks = [dns_resolve(loop, host) for host, port in bootstrap]
            ips = yield from asyncio.gather(*tasks)
            for ip, (host, port) in zip(ips, unresolved):
                yield from nodes.put(inet_aton(ip) +
                                     port.to_bytes(2, byteorder='big'))


    # Recursively search for peers.
    keep_running = True
    while keep_running:
        if values.qsize() > 0:
            while values.qsize() > 0:
                peer = yield from values.get()
                host, port = inet_ntoa(peer[:4]), struct.unpack('!H', peer[4:])[0]
                loop.create_task(
                    get_metadata_with_retries(loop, host, port, infohash))
        elif get_peers_in_progress < 100 and get_metadatas_in_progress < 100 and nodes.qsize() > 0:
            peer = yield from nodes.get()
            host, port = inet_ntoa(peer[:4]), struct.unpack('!H', peer[4:])[0]
            loop.create_task(get_peers(loop, host, port, infohash))
        else:
            yield

            if get_peers_in_progress == 0 and get_metadatas_in_progress == 0 \
               and nodes.qsize() == 0 and values.qsize() == 0:
                logger.info('Nothing more to do. Quitting.')
                keep_running = False

    if full_metadata:
        k = 8
        n = get_closest_nodes(k, infohash)
        n = [[inet_ntoa(p[:4]), struct.unpack('!H', p[4:])[0]]
             for p in n]
        torrent = {
            'nodes': n,
            'info': full_metadata
        }

        if filename != '':
            try:
                with open(filename, 'wb') as f:
                    f.write(bencode(torrent))
            except IOError as e:
                logger.error('Error writing torrent file: {}'.format(e))
        else:
            print_torrent(torrent)
Example #20
0
def test_dictionary_unicode():
    """Test the handling of unicode in dictionaries."""
    encoded = bencode({u'foo': 42, 'bar': {u'sketch': u'parrot', 'foobar': 23}})

    assert encoded == 'd3:bard6:foobari23e6:sketch6:parrote3:fooi42ee'.encode('utf-8')
Example #21
0
def test_encode_bytes():
    """Ensure bytes can be encoded."""
    assert bencode(b'\x9c') == b'1:\x9c'
Example #22
0
def test_encode_roundtrip():
    """Consecutive calls to decode and encode should deliver the original data again."""
    for plain, encoded in ENCODE:
        assert encoded == bencode(bdecode(encoded))
Example #23
0
def test_decode_roundtrip():
    """Consecutive calls to encode and decode should deliver the original data again."""
    for plain, encoded in VALUES:
        assert plain == bdecode(bencode(plain))
Example #24
0
async def ih2torrent(infohash, filename, bootstrap):
    global keep_running, resolver

    logger.info('Using node ID: {}'.format(hexlify(nodeid).decode()))

    resolver = aiodns.DNSResolver()

    # Add bootstrapping nodes.
    if bootstrap == []:
        logger.info('Using router.bittorrent.com as the bootstrapping node.')
        ip = await dns_resolve('router.bittorrent.com')
        logger.info('Resolved to: {}'.format(ip))
        await nodes.put(inet_aton(ip) + struct.pack('!H', 6881))
    else:
        unresolved = []
        for host, port in bootstrap:
            try:
                parsed = ipaddress.ip_address(host)
                if type(parsed) != ipaddress.IPv4Address:
                    raise ValueError(
                        'Bootstrap node {} not an IPv4 address or hostname.'
                        .format(host))
                await nodes.put(inet_aton(host) + port.to_bytes(2, byteorder='big'))
            except ValueError:
                unresolved.append((host, port))

        if len(unresolved) > 0:
            logger.info('Resolving {} host name(s).'.format(len(unresolved)))
            tasks = [dns_resolve(host) for host, port in bootstrap]
            ips = await asyncio.gather(*tasks)
            for ip, (host, port) in zip(ips, unresolved):
                await nodes.put(inet_aton(ip) +
                                port.to_bytes(2, byteorder='big'))


    # Recursively search for peers.
    keep_running = True
    while keep_running:
        if values.qsize() > 0:
            while values.qsize() > 0:
                peer = await values.get()
                host, port = inet_ntoa(peer[:4]), struct.unpack('!H', peer[4:])[0]
                asyncio.create_task(
                    get_metadata_with_retries(host, port, infohash),
                    name='get-metadata-{}-{}'.format(host, port))
        elif get_peers_in_progress < 100 and get_metadatas_in_progress < 100 and nodes.qsize() > 0:
            peer = await nodes.get()
            host, port = inet_ntoa(peer[:4]), struct.unpack('!H', peer[4:])[0]
            asyncio.create_task(get_peers(host, port, infohash),
                                name='get-peers-{}-{}'.format(host, port))
        else:
            await asyncio.sleep(0)

            if get_peers_in_progress == 0 and get_metadatas_in_progress == 0 \
               and nodes.qsize() == 0 and values.qsize() == 0:
                logger.info('Nothing more to do. Quitting.')
                keep_running = False

    if full_metadata:
        k = 8
        n = get_closest_nodes(k, infohash)
        n = [[inet_ntoa(p[:4]), struct.unpack('!H', p[4:])[0]]
             for p in n]
        torrent = {
            'nodes': n,
            'info': full_metadata
        }

        if filename != '':
            try:
                with open(filename, 'wb') as f:
                    f.write(bencode(torrent))
            except IOError as e:
                logger.error('Error writing torrent file: {}'.format(e))
        else:
            print_torrent(torrent)
Example #25
0
def test_dictionary_sorted():
    """Ensure the keys of a dictionary are sorted before being encoded."""
    encoded = bencode({'zoo': 42, 'bar': 'spam'})

    assert encoded.index(b'zoo') > encoded.index(b'bar')