Example #1
0
    def _connect_via_http(self):
        """
        https://wiki.theory.org/BitTorrentSpecification#Tracker_Request_Parameters
        make a request to tracker which is an HTTP(S) service
        that holds information about the torrent and peers.
        """

        params = {
            'info_hash': self.torrent.info_hash,
            'peer_id': self.torrent.peer_id,
            'left': self.torrent.left,
            'downloaded': 0,
            'uploaded': 0,
            'port': 6881,
            'compact': 1,
            'event': 'started'
        }

        url = self.url + '?' + urlencode(params)
        r = urlopen(url)
        response = bdecode(r.read())
        if 'failure reason' not in response:
            return self._decode_peers(response['peers'])
        else:
            time.sleep(response['interval'])
def make_tracker_request(gto_dict, peer_id, info_hash, key_file, crt_file):
    # TODO(hammer): support partial downloads
    left = sum([f.get('length') for f in gto_dict.get('info').get('files')])
    key = get_random_string(8)
    payload = {
        'peer_id': peer_id,
        'port': 20893,
        'uploaded': 0,
        'downloaded': 0,
        'left': left,
        'corrupt': 0,
        'redundant': 0,
        'compact': 1,
        'numwant': 200,
        'key': key,
        'no_peer_id': 1,
        'supportcrypto': 1,
        'event': 'started',
    }
    url = 'https://dream.annailabs.com:21111/tracker.php/announce'
    url += '?info_hash=' + urllib.parse.quote(info_hash.digest(), '') + '&'
    url += urllib.parse.urlencode(payload)
    r = requests.get(url, verify=False, cert=(crt_file, key_file))
    logging.debug('Tracker response content: %s' % r.content)
    tracker_response = bencode.bdecode(r.content.strip())
    return tracker_response
Example #3
0
def getMessage22(s, q):

    try:
        data, address = s.recvfrom(1024 * 64)
        msg = bdecode(data)
        msg_type = msg.get("y", "e")
        #print(msg_type)
        #print(msg)
        if msg_type == "e":
            pass

        if (msg_type == "r"):
            if "nodes" in msg["r"]:
                nodes = decode_nodes(msg["r"]["nodes"])
                #print (address)
                dealFindNodesBack(nodes, s, q)
        if (msg_type == "q"):
            if msg["q"] == "ping":
                dealPing(msg, s, address, q)
            elif msg["q"] == "find_node":
                dealFideNodes(msg, s, address, q)
            elif msg["q"] == "get_peers":
                dealGetPeer(msg, s, address, q)
            elif msg["q"] == "announce_peer":
                dealAnnouncePeer(msg, s, address, q)
            else:
                pass
                #print(msg)
    except ConnectionResetError:
        print("ldldldldlldldlld=======")

    except:
        print("error")
        print(msg)
Example #4
0
def verifyFileContent(torrentFile, actualFile):
  torrent = bdecode(f)
  plength = torrent['info']['piece length']
  thash = torrent['info']['pieces']
  flength = torrent['info']['length']
  realfile = actualFile
  realfile.seek(0,2)
  realfileLength = realfile.tell()
  realfile.seek(0,0)
  if flength != realfileLength:
    print("File sizes do not match! Invalid file!")
    return False
  piece = realfile.read(plength)
  digest = hashlib.sha1(piece).digest()
  while piece != bytes():
    piece = realfile.read(plength)
    if piece != bytes():
      phash = hashlib.sha1(piece).digest()
      digest = digest + phash
  if digest == thash:
    print("file integrity is intact!!")
    print("MAGENT URI:", genenateMagentURI(torrent))
    return True
  else:
    print("kid, what are you trying to pull? get that malicious shit outta here")
    return False
Example #5
0
    def http_scraper(self, torrent, tracker):
        params = {
            'info_hash': torrent.info_hash,
            'peer_id': torrent.peer_id,
            'uploaded': 0,
            'downloaded': 0,
            'left': torrent.total_length,
            'event': 'started',
            'port': 0,
        }

        try:
            answer_tracker = requests.get(tracker, params=params, timeout=5)
            if answer_tracker.status_code != 200:
                print(
                    "Failed tracker '%s', status=%d, body=%s" %
                    (tracker, answer_tracker.status_code, answer_tracker.text))
            else:
                list_peers = bdecode(answer_tracker.content)
                # t = UdpTrackerAnnounceOutput()
                #
                # t.from_bytes(list_peers['peers'])

                for peer in list_peers['peers']:
                    s = SockAddr(peer['ip'], peer['port'])
                    self.dict_sock_addr[s.__hash__()] = s
            print("Got %d peers from HTTP tracker" % len(self.dict_sock_addr))

        except Exception:
            logging.exception("HTTP scraping failed")
Example #6
0
def getPeersMessage(s, info):
    while True:

        try:
            data, address = s.recvfrom(1024 * 4)
            msg = bdecode(data)
            msg_type = msg.get("y", "e")
            #print(msg_type)
            #print(msg)
            if msg_type == "e":
                pass
            if (msg_type == "r"):
                if "token" in msg["r"]:
                    if "nodes" in msg["r"]:
                        nodes = decode_nodes(msg["r"]["nodes"])
                        sendgetpeers2(info, nodes, s)
                    if "values" in msg["r"]:
                        print(msg)
                        s.close()
                        return

        except ConnectionResetError:
            print("ldldldldlldldlld=======")

        except:
            print("error")
            #print(msg)
            # print (ExceptionType)
            raise
Example #7
0
    def parse(self):
        with open(self.path, 'rb') as f:
            torrent_file = bdecode(f)

        urls = []
        if 'announce' in torrent_file:
            urls.append(torrent_file['announce'])
        if 'announce-list' in torrent_file:
            for tracker in torrent_file['announce-list']:
                urls.append(tracker[0])

        info = torrent_file['info']
        name = info['name']
        piece_length = info['piece length']
        pieces = info['pieces']

        if 'length' in info:
            length = info['length']  # only exists for single-file torrents
            file_list = [File(length, [name], 0)]
        else:
            files = info['files']
            file_list = []
            offset = 0
            for file in files:
                file_list.append(File(file['length'], file['path'], offset))
                offset += file['length']

        target = FileStructure(name, file_list)

        print("Torrent has %d pieces" % (len(pieces) // 20))
        print("Torrent piece length = %d" % int(piece_length))
        # print("Torrent has %d bytes" % length)

        return Torrent(urls, info, piece_length, pieces, target)
Example #8
0
def getMessage(s):
    while True:  
        data,address = s.recvfrom(65536)
        try:
            msg=bdecode(data)
            msg_type = msg.get("y", "e")
            #print(msg_type)
            #print(msg)
            if msg_type == "e":
                return

            if(msg_type == "r"):
                nodes=decode_nodes(msg["r"]["nodes"])
                #print (address)
                dealFindNodesBack(nodes,s)
            if(msg_type=="q"):
                if msg["q"]=="ping":
                    dealPing(msg,s,address)
                if msg["q"]=="find_node":
                    dealFideNodes(msg,s,address)
                if msg["q"]=="get_peers":
                    dealGetPeer(msg,s,address)
                if msg["q"]=="announce_peer":
                    dealAnnouncePeer(msg,s,address)
        except KeyError:
                print("error")
                print(msg)
                continue
Example #9
0
    def __init__(self, torrent, tracker, dict_sock_addr):
        # loading the parameters for HTTP scrapper
        params = {
            'info_hash': torrent.info_hash,
            'peer_id': torrent.peer_id,
            'uploaded': 0,
            'downloaded': 0,
            'port': 6881,
            'left': torrent.total_length,
            'event': 'started'
        }
        self.dict_sock_addr = dict_sock_addr

        try:
            # HTTP : GET requesting the tracker link for the list of peers
            answer_tracker = requests.get(tracker, params=params, timeout=5)

            # decoding the response content
            list_peers = bdecode(answer_tracker.content)

            # for each peer, create a socket
            for p in list_peers['peers']:
                s = socket_address.SockAddr(p['ip'], p['port'])
                self.dict_sock_addr[s.__hash__()] = s

        except Exception as e:
            logging.exception("HTTP scraping failed: %s" % e.__str__())
Example #10
0
    def load_content(self, path):
        with open(path, 'rb') as file:
            contents = bdecode(file)

        self.torrent_file = contents

        # getting metainfo of torrent file in separate variables
        """
            Please refer to https://en.wikipedia.org/wiki/Torrent_file#File_structure 
            for understanding the structure of Torrent files containing metainfo
        """

        # creating hash info for handshaking purpose
        raw_info_hash = bencode(self.torrent_file['info'])
        self.info_hash = hashlib.sha1(raw_info_hash).digest()
        self.peer_id = self.generate_peer_id()

        # creating directory structure and store file info in the dictionary
        self.announce_list = self.get_trackers()
        self.init_files()

        # creating number of pieces
        self.piece_length = self.torrent_file['info']['piece length']
        self.pieces = self.torrent_file['info']['pieces']
        self.number_of_pieces = math.ceil(self.total_length /
                                          self.piece_length)

        # checking for the lengths.. if equal to zero, it will show an assertion error
        assert (self.total_length > 0)
        assert (len(self.file_names) > 0)

        # return all the values
        return self
Example #11
0
def getInfoMessage(adrr, infohash):
    begin = time()
    the_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    the_socket.connect(adrr)

    print(type(infohash))
    send_handshake(the_socket, infohash)
    packet = the_socket.recv(4096)
    print(packet)
    print(packet[68:])
    if (packet == "".encode()):
        the_socket.close()
        return
    ut_metadata = get_ut_metadata(packet)
    send_ext_handshake(the_socket)
    if (ut_metadata == -1):
        packet = the_socket.recv(4096)
    ut_metadata, metadata_size = get_ut_metadata(packet), get_metadata_size(
        packet)
    print(metadata_size)
    print(ut_metadata, metadata_size)
    # request each piece of metadata
    metadata = "".encode()
    for piece in range(int(math.ceil(metadata_size /
                                     (16.0 * 1024)))):  #piece是个控制块,根据控制块下载数据
        request_metadata(the_socket, ut_metadata, piece)
        packet = recvall(the_socket, 0.5)  #the_socket.recv(1024*17)
        print(packet.index("ee".encode()))
        metadata = metadata + packet[packet.index("ee".encode()) + 2:]
    #print(metadata)
    result = bdecode(metadata)
    print(result["name"])
    print(time() - begin)
Example #12
0
    def read_torrent(self, filename):
        torrent_file = open(filename, 'rb')

        torrent_data = bcoding.bdecode(torrent_file.read())

        if 'announce' not in torrent_data:
            warnings.warn("No 'announce' URL found in %s; ignoring" % filename)
            return

        # Get tracker hostname, see if this torrent should be ignored
        #
        tracker_url = urllib.parse.urlparse(torrent_data['announce'])
        tracker_hostname = tracker_url.hostname

        # Store tracker even if it's excluded; this list is only used for
        # 'info'
        if tracker_hostname not in self.trackers:
            self.trackers.append(tracker_hostname)

        if tracker_hostname in self.config.exclude_trackers:
            if store_ignored:
                self.excluded_torrent_files.append(filename)
            self.excluded += 1
            torrent_file.close()
            return

        type = 'single'
        if 'files' in torrent_data['info']:
            type = 'multi'

        name = torrent_data['info']['name']
        name_index = getattr(self, "names_" + type)

        # Check for duplicates
        is_duplicate = False
        if name in name_index:
            for existing_torrent in name_index[name]:
                if torrent_info_matches(existing_torrent.file_info,
                                        torrent_data['info']):
                    is_duplicate = True
                    self.duplicates += 1
                    break
        if is_duplicate:
            if self.store_ignored:
                self.duplicate_torrent_files.append(filename)
            torrent_file.close()
            return

        torrent = Torrent(filename, name, type, torrent_data['info'])
        self.torrents.append(torrent)

        if name in name_index:
            name_index[name].append(torrent)
        else:
            name_index[name] = [torrent]

        torrent_file.close()
Example #13
0
def make_magnet_from_file(file) :
    file_torrent=open(file,"rb")
    metadata = bcoding.bdecode(file_torrent.read())
    subj = metadata[b'info']
    hashcontents = bcoding.bencode(subj)
    digest = hashlib.sha1(hashcontents).digest()
    b32hash = base64.b32encode(digest).decode()
    return 'magnet:?'+ 'xt=urn:btih:' + b32hash+ '&dn=' + metadata[b'info'][b'name'].decode()+ '&tr=' + metadata[b'announce'].decode()+ '&xl=' + str(metadata[b'info'][b'length'])
    '''convert torrent into magnet file for further sharing, not currently used, but for possible future development'''
    def read_torrent(self, filename):
        torrent_file = open(filename, 'rb')

        torrent_data = bcoding.bdecode(torrent_file.read())

        if 'announce' not in torrent_data:
            warnings.warn("No 'announce' URL found in %s; ignoring" % filename)
            return

        # Get tracker hostname, see if this torrent should be ignored
        #
        tracker_url = urllib.parse.urlparse(torrent_data['announce'])
        tracker_hostname = tracker_url.hostname

        # Store tracker even if it's excluded; this list is only used for
        # 'info'
        if tracker_hostname not in self.trackers:
            self.trackers.append(tracker_hostname)

        if tracker_hostname in self.config.exclude_trackers:
            if store_ignored:
                self.excluded_torrent_files.append(filename)
            self.excluded += 1
            torrent_file.close()
            return

        type = 'single'
        if 'files' in torrent_data['info']:
            type = 'multi'

        name = torrent_data['info']['name']
        name_index = getattr(self, "names_" + type)

        # Check for duplicates
        is_duplicate = False
        if name in name_index:
            for existing_torrent in name_index[name]:
                if torrent_info_matches(existing_torrent.file_info, torrent_data['info']):
                    is_duplicate = True
                    self.duplicates += 1
                    break
        if is_duplicate:
            if self.store_ignored:
                self.duplicate_torrent_files.append(filename)
            torrent_file.close()
            return

        torrent = Torrent(filename, name, type, torrent_data['info'])
        self.torrents.append(torrent)

        if name in name_index:
            name_index[name].append(torrent)
        else:
            name_index[name] = [torrent]

        torrent_file.close()
Example #15
0
 def __init__(self, torrent_file_path):
     self.peers = []
     self.pieces = deque([])
     self.peer_id = hashlib.sha1(str(time.time()).encode('utf-8')).digest()
     self.torrent_dict = bcoding.bdecode(open(torrent_file_path, 'rb').read())
     bencode_info = bcoding.bencode(self.torrent_dict['info'])
     self.infoHash = hashlib.sha1(bencode_info).digest()
     self.generate_peer_connections()
     self.generate_pieces_objects()
     self.num_pieces_so_far = 0
Example #16
0
 def load_from_path(self, path):
     with open(path, 'rb') as file:
         contents = bdecode(file)
     self.torrent_file = contents
     self.piece_length = self.torrent_file['info']['piece length']
     self.pieces = self.torrent_file['info']['pieces']
     self.info_hash = hashlib.sha1(bencode(
         self.torrent_file['info'])).digest()
     self.peer_id = self.generate_peer_id()
     self.announce_list = self.get_trakers()
     self.init_files()
     self.number_of_pieces = math.ceil(self.total_length /
                                       self.piece_length)
     return self
 def unpack_request(self, req):
     #i think we might have to change the endianness of the response
     decoded = bcoding.bdecode(req)
     # print('%d seeders, %d leechers' % (decoded['complete'], decoded['incomplete']))
     peers = []
     for i in range(0, len(decoded['peers']), 6):
         ip_add = str(
             ipaddress.IPv4Address(
                 int.from_bytes(decoded['peers'][i:i + 4],
                                byteorder='big')))
         port = str(
             int.from_bytes(decoded['peers'][i + 4:i + 6], byteorder='big'))
         peers.append(':'.join([ip_add, port]))
     return peers
Example #18
0
    def _open_file(self, path_to_file):
        #read torrent file as binary object
        with open(self.path_to_file, 'rb') as meta_file:
            self.decoded = bdecode(meta_file.read())

        #check to see if it's a multi-file torrent
        if 'files' in self.decoded['info']:
            self.multi_file = True
        '''
            This stuff doesn't care whether or not it's a
            multi-file torrent
        '''
        # extract the creation date
        self.creation_date = self.decoded['creation date']
        # extract the announce url
        self.announce = urllib.parse.urlparse(self.decoded['announce'])
        print(self.decoded['announce'])
        # calculates info hash
        self.info_hash = hashlib.sha1(bencode(self.decoded['info'])).digest()

        # extract piece length
        self.piece_length = self.decoded['info']['piece length']

        # extract pieces (in bytes)
        self.pieces = self.decoded['info']['pieces']
        self.num_pieces = len(self.pieces) // 20

        # extract name / dictionary as to where to store the files
        self.name = self.decoded['info']['name']

        if not self.multi_file:
            # extract length
            self.length = self.decoded['info']['length']
            self.last_piece_len = self.length - self.piece_length * (
                self.num_pieces - 1)
        else:
            self.files = []
            self.length = 0
            for f in self.decoded['info']['files']:
                self.length += int(f['length'])
                self.files.append({
                    'length': int(f['length']),
                    'path': f['path']
                })
            self.last_piece_len = self.length - self.piece_length * (
                self.num_pieces - 1)
        self.file_info()
Example #19
0
    def __init__(self, metafile):
        self.logger = logging.getLogger('main_bt.torrent_wrapper.TorrentWrapper')
        self.logger.info('creating a TorrentWrapper instance')
        with open(metafile, 'rb') as f:
            self.torrent = bdecode(f)

        self.INFO_HASH = sha1_info(self.torrent)
        self.TRACKER_URL = self.announce()
        self.piece_length = self.torrent['info']['piece length']
        self.total_bytes = self.total_file_length()
        
        self.number_pieces = self._num_pieces()
        self.last_piece_length = self._length_of_last_piece()
        self.LAST_PIECE_INDEX = self.number_pieces - 1

        self.file_meta = self.file_info()
        self.file_boundaries_by_byte_indices = self.list_of_file_boundaries()
Example #20
0
    def getDirectDownload(self):
        from bcoding import bdecode
        torrent = self.getTorrent()
        d = bdecode(torrent)
        bases = d["direct download"].split("|")
        # Always make sure the url ends with a slash, so we don't
        # get a different result depending on whether it does or not
        for i, base in enumerate(bases):
            if not base.endswith("/"):
                bases[i] += "/"

        # cache the file list
        torrentFiles = set()
        for f in d["info"]["files"]:
            if f["type"] == "alignment":
                continue
            torrentFiles.add(TorrentFile(f))

        return bases, torrentFiles
Example #21
0
	def getDirectDownload(self):
		from bcoding import bdecode
		torrent = self.getTorrent()
		d = bdecode(torrent)
		bases = d["direct download"].split("|")
		# Always make sure the url ends with a slash, so we don't
		# get a different result depending on whether it does or not
		for i, base in enumerate(bases):
			if not base.endswith("/"):
				bases[i] += "/"

		# cache the file list
		torrentFiles = set()
		for f in d["info"]["files"]:
			if f["type"] == "alignment":
				continue
			torrentFiles.add(TorrentFile(f))

		return bases, torrentFiles
Example #22
0
	def announce(self, downloadedBytes = 0, announceEvent = "started"):
		print("About to send announce no. <{}>".format(self.announceCount))
		self.announceCount += 1

		qParams = {
			"info_hash": self.infoHash,
			"peer_id": self.local_peer_id,
			"port": "6882",
			"uploaded": "0",
			"downloaded": downloadedBytes,
			"left": (self.length - downloadedBytes),
			"compact": "1"
		}
		if (self.trackerid != -1):
			qParams["trackerid"] = self.trackerid
		if (announceEvent != "None"):
			qParams["event"] = announceEvent

		response = bdecode(requests.get(self.announceData, params = qParams).content)
		# TO-DO
		# Handle other info received from response
		if ('tracker id' in response.keys()):
			self.trackerid = response['tracker id']
		if ('interval' in response.keys()):
			self.announceInterval = response['interval']
		if ('min interval' in response.keys()):
			self.announceInterval = response['min interval']
		if ('interval' not in response.keys() and 'min interval' not in response.keys()):
			self.announceInterval = 300

		self.announceInterval = 60

		# Instantiate a peer manager for this torrent file
		if (announceEvent == "started"): # Needs to be created only once
			self.changeStatus("Creating peer manager for this torrent")
			self.pManager = pm.peerManager(response['peers'], self)
		else:
			#Give new peer list to peerManager
			self.pManager.updatePeerList(response['peers'])

		# Lastly start the timer
		self.announceTimer = time.monotonic()
def scrape_udp(info_hash, announce, peer_id):
    parsed = urlparse(announce)
    ip = socket.gethostbyname(parsed.hostname)
    if ip == '127.0.0.1':
        return False
    try:
        # TODO : Needs verification
        sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
        sock.settimeout(8)
        conn = (ip, parsed.port)
        msg, trans_id, action = make_connection_id_request()
        response = send_msg(conn, sock, msg, trans_id, action, 16)
        conn_id = response[8:]
        msg, trans_id, action = make_announce_input(info_hash, conn_id,
                                                    peer_id)
        response = send_msg(conn, sock, msg, trans_id, action, 20)

        payload = bcoding.bdecode(response)
        return payload['peers']
    except TimeoutError:
        return ''
Example #24
0
    def http_scraper(self, torrent, tracker):
        params = {
            'info_hash': torrent.info_hash,
            'peer_id': torrent.peer_id,
            'uploaded': 0,
            'downloaded': 0,
            'port': 6881,
            'left': torrent.total_length,
            'event': 'started'
        }

        try:
            answer_tracker = requests.get(tracker, params=params, timeout=5)
            list_peers = bdecode(answer_tracker.content)

            for p in list_peers['peers']:
                s = SockAddr(p['ip'], p['port'])
                self.dict_sock_addr[s.__hash__()] = s

        except Exception as e:
            logging.exception("HTTP scraping failed: %s" % e.__str__())
Example #25
0
    def open_from_file(self, movie):
        with open(a1[1], 'r+b') as file2:
            contents2 = bdecode(file2)

        self.torrent_file2 = contents2
        self.piece_length = self.torrent_file2['info']['piece length']
        self.pieces = self.torrent_file2['info']['pieces']
        raw_info_hash = bencode(self.torrent_file2['info'])
        self.info_hash = hashlib.sha1(raw_info_hash).digest()
        self.peer_id = self.generate_peer_id()
        self.announce_list = self.get_trakers()
        self.init_files()
        self.number_of_pieces = math.ceil(self.total_length /
                                          self.piece_length)
        logging.debug(self.announce_list)
        logging.debug(self.file_names)

        assert (self.total_length > 0)
        assert (len(self.file_names) > 0)

        return self
Example #26
0
def getInfoMessage(adrr, infohash, infohash2, mu):
    try:
        begin = time()
        the_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        the_socket.connect(adrr)

        #print(type(infohash))
        send_handshake(the_socket, infohash)
        packet = the_socket.recv(4096)
        #print(packet)
        if (packet == "".encode()):
            the_socket.close()
            return
        ut_metadata = get_ut_metadata(packet)
        send_ext_handshake(the_socket)
        if (ut_metadata == -1):
            packet = the_socket.recv(4096)
        ut_metadata, metadata_size = get_ut_metadata(
            packet), get_metadata_size(packet)
        #print("ut_metadata,and metadata_size")
        #print(ut_metadata,metadata_size)
        request_metadata(the_socket, ut_metadata, 0)
        packet = recvall(the_socket, "".encode())  #the_socket.recv(1024*17)
        metadata = packet
        #print(metadata)
        result = bdecode(metadata)

        #if mu.acquire(True):
        #print("ddddddddddddddddddddddd")
        f = open('cili5.txt', 'a', encoding='utf-8')
        print(result["name"])
        f.write(infohash2 + " " + result["name"] + "\n")
        f.close()
        #mu.release()
        print(time() - begin)
        the_socket.close()
        #raise RuntimeError('testError')
    except:
        #raise
        return
Example #27
0
    def _send_http_request(self, e, url):
        payload = self.gen_p(e)
        try:
            r = requests.get(url, params=payload, timeout=1)
            resp = bdecode(bytes(r.text, 'ISO-8859-1'))
            peers = resp['peers']
            peers_dict = {}

            print("HTTP tracker response received ...")
            for i in range(0, len(peers)):
                if not peers[i] in peers_dict.values():
                    #print(Peer(peers[i]))
                    #sys.exit(1)
                    peers_dict[i] = Peer2(peers[i])

            resp['peers'] = peers_dict

            print("List of %d peers received" % len(resp['peers']))

            return resp

        except (ConnectionResetError, ConnectionError) as e:
            return False
def scrape_http(announce, info_hash, peer_id, length):
    params = {
        'info_hash': info_hash,
        'peer_id': peer_id,
        'uploaded': 0,
        'downloaded': 0,
        'port': 6881,
        'left': str(length).encode('utf-8'),
        'corrupt': 0,
        'event': 'started',
    }

    response = requests.get(announce, params=params)

    if response.status_code > 400:
        message = ("Failed to connect to torrent_dict.\n"
                   "Status Code: %s \n"
                   "Reason: %s") % (response.status_code, response.reason)
        raise RuntimeError(message)

    print(f'Tracker Response : {response}')
    results = bcoding.bdecode(response.content)
    return results['peers']
Example #29
0
    def http_scraper(self, torrent, tracker):
        params = {
            'info_hash': torrent.info_hash,
            'peer_id': torrent.peer_id,
            'uploaded': 0,
            'downloaded': 0,
            'left': torrent.total_length,
            'event': 'started'
        }

        try:
            answer_tracker = requests.get(tracker, params=params, timeout=5)
            list_peers = bdecode(answer_tracker.content)
            t = UdpTrackerAnnounceOutput()

            t.from_bytes(list_peers['peers'])

            for ip, port in list_peers['peers']:
                s = SockAddr(ip, port)
                self.dict_sock_addr[s.__hash__()] = s

        except Exception:
            logging.exception("HTTP scraping failed")
Example #30
0
    def downloadAgent(self, record):
        data = record.firstChild.data.strip()
        self.debug("data=%r" % (data))

        component = record.getAttribute("component")
        if component == "cdn":
            cdns = data.split("|")
            print("Available CDNs: %s" % (", ".join(cdns)))
            return

        incrementalTorrent, fullTorrent, toBuild, fromBuild, zero = data.split(
            ";")

        files = set()
        for url in (incrementalTorrent, fullTorrent):
            torrent = urlopen(url)

            d = bdecode(torrent)
            directDownload = d["direct download"].decode("utf-8")
            self.debug("directDownload=%r" % (directDownload))

            # As of S2 1.5, directDownload supports mirrors, e.g.:
            # "http://dist.blizzard.com.edgesuite.net/sc2-pod-retail/NA/22342.direct|http://llnw.blizzard.com/sc2-pod-retail/NA/22342.direct"
            directDownload = directDownload.split("|")[0]

            # Always make sure the url ends with a slash, so we don't
            # get a different result depending on whether it does or not
            if not directDownload.endswith("/"):
                directDownload += "/"

            for f in d["info"]["files"]:
                if f["type"] == "alignment":
                    continue
                path = "/".join(f["path"])
                files.add(path)

        self.outputFiles(files, directDownload)
Example #31
0
	def downloadAgent(self, record):
		data = record.firstChild.data.strip()
		self.debug("data=%r" % (data))

		component = record.getAttribute("component")
		if component == "cdn":
			cdns = data.split("|")
			print("Available CDNs: %s" % (", ".join(cdns)))
			return

		incrementalTorrent, fullTorrent, toBuild, fromBuild, zero = data.split(";")

		files = set()
		for url in (incrementalTorrent, fullTorrent):
			torrent = urlopen(url)

			d = bdecode(torrent)
			directDownload = d["direct download"].decode("utf-8")
			self.debug("directDownload=%r" % (directDownload))

			# As of S2 1.5, directDownload supports mirrors, e.g.:
			# "http://dist.blizzard.com.edgesuite.net/sc2-pod-retail/NA/22342.direct|http://llnw.blizzard.com/sc2-pod-retail/NA/22342.direct"
			directDownload = directDownload.split("|")[0]

			# Always make sure the url ends with a slash, so we don't
			# get a different result depending on whether it does or not
			if not directDownload.endswith("/"):
				directDownload += "/"

			for f in d["info"]["files"]:
				if f["type"] == "alignment":
					continue
				path = "/".join(f["path"])
				files.add(path)

		self.outputFiles(files, directDownload)
Example #32
0
    def parse_file(self):
        with open(self.file, 'rb') as file:
            content = bdecode(file)

        info = content['info']
        files = []
        full_size = 0
        for file in info['files']:
            file_info = {'size': file['length'], 'name': file['path'][0]}
            files.append(file_info)
            full_size += file['length']
        tracker_list = []
        for tracker in content['announce-list']:
            tracker_list.append(tracker[0])
        info_hash = hashlib.sha1(bencode(info)).hexdigest()
        announce_url = content['announce']
        data = {
            'info_hash': info_hash,
            'files': files,
            'announce': announce_url,
            'full_size': self.make_size(full_size),
            'tracker_list': tracker_list
        }
        return data
Example #33
0
	def downloadMfil(self, record):
		program = self.args.program

		data = record.firstChild.data.strip()
		self.debug("data=%r" % (data))

		base, thash, mhash, build = data.split(";")
		self.debug("base=%r" % (base))

		build = int(build)
		baseUrl = self.getBaseUrl(base, program, self.args.preferred_server)
		self.debug("baseUrl=%r" % (baseUrl))
		tfilUrl = baseUrl + "%s-%i-%s.torrent" % (program.lower(), build, thash)
		if self.args.mfil:
			mfilUrl = self.args.mfil
		else:
			mfilUrl = baseUrl + "%s-%i-%s.mfil" % (program.lower(), build, mhash)

		self.debug("tfilUrl=%r" % (tfilUrl))
		self.debug("mfilUrl=%r" % (mfilUrl))
		self.debug("build=%r" % (build))

		torrent = self.cache.get(tfilUrl)
		if torrent:
			self.debug("cache hit: torrent=%r" % (torrent))
			torrent = open(torrent, "rb")
		else:
			self.debug("Reading torrent file: %r" % (tfilUrl))
			try:
				torrent = urlopen(tfilUrl)
			except HTTPError as e:
				raise ServerError("Could not open %s: %s" % (tfilUrl, e))

			path, torrent = self.cache.set(tfilUrl, torrent.read())
			self.debug("Cache torrent path=%r" % (path))

		self.debug("Parsing torrent...")
		d = bdecode(torrent)
		directDownload = d["direct download"]
		self.debug("directDownload=%r" % (directDownload))

		# As of S2 1.5, directDownload supports mirrors, e.g.:
		# "http://dist.blizzard.com.edgesuite.net/sc2-pod-retail/NA/22342.direct|http://llnw.blizzard.com/sc2-pod-retail/NA/22342.direct"
		directDownload = directDownload.split("|")[0]

		# Always make sure the url ends with a slash, so we don't
		# get a different result depending on whether it does or not
		if not directDownload.endswith("/"):
			directDownload += "/"

		mfil = self.cache.get(mfilUrl)
		if mfil:
			self.debug("cache hit: mfil=%r" % (mfil))
		else:
			self.debug("Reading manifest file: %r" % (mfilUrl))
			try:
				mfil = urlopen(mfilUrl).read()
			except HTTPError as e:
				raise ServerError("Could not open %s: %s" % (mfilUrl, e))

			mfilPath, mfil = self.cache.set(mfilUrl, mfil)
			self.debug("Cache manifest path=%r" % (mfilPath))

		mfil = MFIL(mfil)

		files = set()
		for file, fileInfo in mfil["file"].items():

			if isinstance(fileInfo["size"], str) and int(fileInfo["size"]) == 0:
				# Directory
				continue

			files.add(file)

		if True: # add a flag to disable?
			for f in d["info"]["files"]:
				if f["type"] == "alignment":
					continue
				files.add("/".join(f["path"]))

		self.outputFiles(files, directDownload, mfil["file"])
if not os.path.exists(basedir + 'by time'):
    os.makedirs(basedir + 'by time')
os.chdir(basedir + 'by time')
hashlabel = {}
listfiles = []
for root, dirs, files in os.walk(basedir + 'by time', topdown=False):
    for name in files:
        os.remove(os.path.join(root, name))
    for name in dirs:
        os.rmdir(os.path.join(root, name))
shottime = "%.20f" % time.time()
shutil.copy(resumefile, resumefile + '.' + shottime)
resumefile += '.' + shottime
print(datetime.now().strftime('%H:%M:%S') + ' Чтение файла bittorrent, составление первичного массива')
bitfile = open(resumefile, 'rb')
torrent = bdecode(bitfile)
for key, value in torrent.items():
    if key != '.fileguard' and key != 'rec':
        path = value.get(u'path')
        labels = value.get(u'labels')
        timeadd = value.get(u'added_on')
        if not labels:
            value['labels'].append('Empty')
        for label in labels:
            if label in hashlabel:
                hashlabel[label].append([path, timeadd])
            else:
                hashlabel[label] = [[path, timeadd]]

del torrent
bitfile.close()
Example #35
0
def get_gto_dict(content_specifier, auth_token):
    # TODO(hammer): handle non-URIs
    payload = {'token': auth_token}
    r = requests.post(content_specifier, data=payload)
    gto_dict = bencode.bdecode(r.content)
    return gto_dict
Example #36
0
def read():
    r = bcoding.bdecode(sys.stdin.buffer)
    logging.info("read data: %s", r)
    return r
Example #37
0
def read(pod):
    """Reads data from pod's stdout."""
    return bcoding.bdecode(pod.stdout)
Example #38
0
            #got list of peers
            announce_response = decode_udpannounceresponse(response)
            print(
                '\nTracker response:\nconnection id:{}\naction:{}\ntransaction id:{}\ninterval:{}\nleechers:{}\nseeders:{}\n'
                .format(connection_response[2], announce_response[0],
                        announce_response[1], announce_response[2],
                        announce_response[3], announce_response[4]))
            peers = []
            for ip, port in announce_response[5]:
                peers.append(Peer(ip, port))
            print('Found {} peers from {}'.format(len(peers), tracker[0]))
            return peers, tracker[0]


with open(torrent, 'rb') as f:
    meta_info = bdecode(f)

torrent_data = get_torrent_data(meta_info)

progress = download_info()

#got the chunk dictionary
chunks = get_chunks(meta_info, torrent_data['length'])

#create our bitfield
mybitfield = bitstring.BitArray(
    math.ceil(torrent_data['length'] / meta_info['info']['piece length']))

#make partitions in chunks(form blocks)
partitioned_chunks = make_blocks_in_chunks(chunks)
def read():
    return dict(bdecode(sys.stdin.buffer))
Example #40
0
 def tracker(self):
   params_dict = self.make_params()
   r = requests.get(self.url, params=params_dict)
   self.response = bdecode(r.content)
   self.peers = self.response['peers']
Example #41
0
def try_to_connect_to_tracker(tracker_list, info_hash):
    parameters = {
        'info_hash': info_hash,
        'peer_id': PEER_ID,
        'port': 6885,
        'uploaded': 0,
        'downloaded': 0,
        'left': 0,
        'event': 'started'
    }
    #print(parameters)
    for tracker in tracker_list:

        if str.startswith(tracker[0], 'http'):
            try:
                #print(tracker[0])
                print('\nTrying to connect to (HTTP) tracker -> {}'.format(
                    tracker[0]))
                response = requests.get(tracker[0], parameters, timeout=5)
                #print(response.content,'\n')
            except:
                print('Can\'t connect to {}'.format(tracker[0]))

            if not response:
                print('No response')
                return
            response = bdecode(response.content)
            #print(response)
            peers = []
            for peer in response['peers']:
                peers.append(Peer(peer['ip'], peer['port'], peer['peer id']))
            print('Found {} peers from {}'.format(len(peers), tracker[0]))
            return peers, tracker[0]

        elif str.startswith(tracker[0], 'udp'):
            print('\nTrying to connect to (UDP) tracker -> {}'.format(
                tracker[0]))
            request = urlparse(tracker[0])
            sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            sock.settimeout(4)
            try:
                ip, port = socket.gethostbyname(request.hostname), request.port
            except:
                continue
            if ipaddress.ip_address(ip).is_private:
                continue
            connection_request = encode_udpconnection()
            response = send_udp_message((ip, port), sock, connection_request)
            if not response:
                print('No response for udp connection request')
                continue
            connection_response = decode_udpconnection(response)
            announce_request = encode_udpannouncerequest(
                connection_response[2], torrent_data['info_hash'],
                bytes(PEER_ID, 'utf-8'))
            response = send_udp_message((ip, port), sock, announce_request)

            if not response:
                print('No response for udp announce request')
                continue

            #got list of peers
            announce_response = decode_udpannounceresponse(response)
            print(
                '\nTracker response:\nconnection id:{}\naction:{}\ntransaction id:{}\ninterval:{}\nleechers:{}\nseeders:{}\n'
                .format(connection_response[2], announce_response[0],
                        announce_response[1], announce_response[2],
                        announce_response[3], announce_response[4]))
            peers = []
            for ip, port in announce_response[5]:
                peers.append(Peer(ip, port))
            print('Found {} peers from {}'.format(len(peers), tracker[0]))
            return peers, tracker[0]
Example #42
0
 def decodeFile(self):
   f = open(self.inputURL, "rb")
   d = bdecode(f.read())
   self.url = d['announce']
   self.info = d['info']
   self.reencoded = bencode(self.info)