def getSubtitleFileRelativeName(channel_id, infohash, langCode):
    hasher = sha()
    for data in (channel_id, infohash, langCode):
        hasher.update(data)

    subtitleName = hasher.hexdigest() + SUBS_EXTENSION
    return subtitleName
def make_torrent_file(input,
                      userabortflag=None,
                      userprogresscallback=lambda x: None):
    info, piece_length = makeinfo(input, userabortflag, userprogresscallback)
    if userabortflag is not None and userabortflag.isSet():
        return (None, None)
    if info is None:
        return (None, None)
    check_info(info)
    metainfo = {'info': info, 'encoding': input['encoding']}
    if input['nodes'] is None and input['announce'] is None:
        raise ValueError('No tracker set')
    for key in [
            'announce', 'announce-list', 'nodes', 'comment', 'created by',
            'httpseeds', 'url-list', 'authorized-peers'
    ]:
        if input.has_key(key) and input[key] is not None and len(
                input[key]) > 0:
            metainfo[key] = input[key]
            if key == 'comment':
                metainfo['comment.utf-8'] = uniconvert(input['comment'],
                                                       'utf-8')

    if input['torrentsigkeypairfilename'] is not None:
        from ACEStream.Core.Overlay.permid import create_torrent_signature
        create_torrent_signature(metainfo, input['torrentsigkeypairfilename'])
    if 'url-compat' in input:
        metainfo['info']['url-compat'] = input['url-compat']
    if 'x-ts-properties' in input:
        metainfo['x-ts-properties'] = input['x-ts-properties']
    if 'ogg-headers' in input:
        metainfo['ogg-headers'] = input['ogg-headers']
    infohash = sha(bencode(info)).digest()
    return (infohash, metainfo)
    def get_tstream_from_httpseed(self, httpseedurl):
        if not self.allow_get:
            return (400, 'Not Authorized', {
                'Content-Type': 'text/plain',
                'Pragma': 'no-cache'
            }, 'get function is not available with this tracker.')
        wanturlhash = sha(httpseedurl).digest()
        found = False
        for infohash, a in self.allowed.iteritems():
            for goturlhash in a['url-hash-list']:
                if goturlhash == wanturlhash:
                    found = True
                    break

            if found:
                break

        if not found or not self.allowed.has_key(infohash):
            return (404, 'Not Found', {
                'Content-Type': 'text/plain',
                'Pragma': 'no-cache'
            }, alas)
        fname = self.allowed[infohash]['file']
        fpath = self.allowed[infohash]['path']
        print >> sys.stderr, 'tracker: get_stream: Sending', fname
        return (200, 'OK', {
            'Content-Type': 'application/x-bittorrent',
            'Content-Disposition': 'attachment; filename=' + fname
        }, open(fpath, 'rb').read())
def make_torrent_file(input, userabortflag = None, userprogresscallback = lambda x: None):
    info, piece_length = makeinfo(input, userabortflag, userprogresscallback)
    if userabortflag is not None and userabortflag.isSet():
        return (None, None)
    if info is None:
        return (None, None)
    check_info(info)
    metainfo = {'info': info,
     'encoding': input['encoding']}
    if input['nodes'] is None and input['announce'] is None:
        raise ValueError('No tracker set')
    for key in ['announce',
     'announce-list',
     'nodes',
     'comment',
     'created by',
     'httpseeds',
     'url-list',
     'authorized-peers']:
        if input.has_key(key) and input[key] is not None and len(input[key]) > 0:
            metainfo[key] = input[key]
            if key == 'comment':
                metainfo['comment.utf-8'] = uniconvert(input['comment'], 'utf-8')

    if input['torrentsigkeypairfilename'] is not None:
        from ACEStream.Core.Overlay.permid import create_torrent_signature
        create_torrent_signature(metainfo, input['torrentsigkeypairfilename'])
    if 'url-compat' in input:
        metainfo['info']['url-compat'] = input['url-compat']
    if 'x-ts-properties' in input:
        metainfo['x-ts-properties'] = input['x-ts-properties']
    if 'ogg-headers' in input:
        metainfo['ogg-headers'] = input['ogg-headers']
    infohash = sha(bencode(info)).digest()
    return (infohash, metainfo)
 def save_torrent(self, infohash, metadata, source='BC', extra_info={}):
     if not self.initialized:
         return None
     self.check_overflow()
     if self.min_free_space != 0 and (
             self.free_space - len(metadata) < self.min_free_space
             or self.num_collected_torrents % 10 == 0):
         self.free_space = self.get_free_space()
         if self.free_space - len(metadata) < self.min_free_space:
             self.warn_disk_full()
             return None
     file_name = get_collected_torrent_filename(infohash)
     if DEBUG:
         print >> sys.stderr, 'metadata: Storing torrent', sha(
             infohash).hexdigest(), 'in', file_name
     save_path = self.write_torrent(metadata, self.torrent_dir, file_name)
     if save_path:
         self.num_collected_torrents += 1
         self.free_space -= len(metadata)
         self.addTorrentToDB(save_path,
                             infohash,
                             metadata,
                             source=source,
                             extra_info=extra_info)
     return file_name
Beispiel #6
0
    def add_metadata_piece(self, piece, data):
        if not self._closed:
            for index, block_tuple in zip(xrange(len(self._metadata_blocks)),
                                          self._metadata_blocks):
                if block_tuple[1] == piece:
                    block_tuple[0] = max(0, block_tuple[0] - 1)
                    block_tuple[2] = data
                    self._metadata_blocks.sort()
                    break

            for requested, piece, data in self._metadata_blocks:
                if data is None:
                    break
            else:
                metadata_blocks = [(piece, data)
                                   for _, piece, data in self._metadata_blocks]
                metadata_blocks.sort()
                metadata = ''.join([data for _, data in metadata_blocks])
                info_hash = sha(metadata).digest()
                if info_hash == self._info_hash:
                    if DEBUG:
                        print >> sys.stderr, 'MiniBitTorrent.add_metadata_piece() Done!'
                    peers = [
                        (timestamp, address)
                        for address, timestamp in self._good_peers.iteritems()
                    ]
                    peers.sort(reverse=True)
                    peers = [address for _, address in peers]
                    self._callback(bdecode(metadata), peers)
                else:
                    if DEBUG:
                        print >> sys.stderr, 'MiniBitTorrent.add_metadata_piece() Failed hashcheck! Restarting all over again :('
                    self._metadata_blocks = [[
                        requested, piece, None
                    ] for requested, piece, data in self._metadata_blocks]
    def get_tstream_from_httpseed(self, httpseedurl):
        if not self.allow_get:
            return (400,
             'Not Authorized',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             'get function is not available with this tracker.')
        wanturlhash = sha(httpseedurl).digest()
        found = False
        for infohash, a in self.allowed.iteritems():
            for goturlhash in a['url-hash-list']:
                if goturlhash == wanturlhash:
                    found = True
                    break

            if found:
                break

        if not found or not self.allowed.has_key(infohash):
            return (404,
             'Not Found',
             {'Content-Type': 'text/plain',
              'Pragma': 'no-cache'},
             alas)
        fname = self.allowed[infohash]['file']
        fpath = self.allowed[infohash]['path']
        print >> sys.stderr, 'tracker: get_stream: Sending', fname
        return (200,
         'OK',
         {'Content-Type': 'application/x-bittorrent',
          'Content-Disposition': 'attachment; filename=' + fname},
         open(fpath, 'rb').read())
Beispiel #8
0
def pubkey2swarmid(livedict):
    if DEBUG:
        print >> sys.stderr, 'pubkey2swarmid:', livedict.keys()
    if livedict['authmethod'] == 'None':
        return Rand.rand_bytes(20)
    else:
        return sha(livedict['pubkey']).digest()
def pubkey2swarmid(livedict):
    if DEBUG:
        print >> sys.stderr, 'pubkey2swarmid:', livedict.keys()
    if livedict['authmethod'] == 'None':
        return Rand.rand_bytes(20)
    else:
        return sha(livedict['pubkey']).digest()
Beispiel #10
0
    def add_metadata_piece(self, piece, data):
        if not self._closed:
            for index, block_tuple in zip(xrange(len(self._metadata_blocks)), self._metadata_blocks):
                if block_tuple[1] == piece:
                    block_tuple[0] = max(0, block_tuple[0] - 1)
                    block_tuple[2] = data
                    self._metadata_blocks.sort()
                    break

            for requested, piece, data in self._metadata_blocks:
                if data is None:
                    break
            else:
                metadata_blocks = [ (piece, data) for _, piece, data in self._metadata_blocks ]
                metadata_blocks.sort()
                metadata = ''.join([ data for _, data in metadata_blocks ])
                info_hash = sha(metadata).digest()
                if info_hash == self._info_hash:
                    if DEBUG:
                        print >> sys.stderr, 'MiniBitTorrent.add_metadata_piece() Done!'
                    peers = [ (timestamp, address) for address, timestamp in self._good_peers.iteritems() ]
                    peers.sort(reverse=True)
                    peers = [ address for _, address in peers ]
                    self._callback(bdecode(metadata), peers)
                else:
                    if DEBUG:
                        print >> sys.stderr, 'MiniBitTorrent.add_metadata_piece() Failed hashcheck! Restarting all over again :('
                    self._metadata_blocks = [ [requested, piece, None] for requested, piece, data in self._metadata_blocks ]
def create_torrent_signature(metainfo, keypairfilename):
    keypair = EC.load_key(keypairfilename)
    bmetainfo = bencode(metainfo)
    digester = sha(bmetainfo[:])
    digest = digester.digest()
    sigstr = keypair.sign_dsa_asn1(digest)
    metainfo['signature'] = sigstr
    metainfo['signer'] = str(keypair.pub().get_der())
def create_torrent_signature(metainfo, keypairfilename):
    keypair = EC.load_key(keypairfilename)
    bmetainfo = bencode(metainfo)
    digester = sha(bmetainfo[:])
    digest = digester.digest()
    sigstr = keypair.sign_dsa_asn1(digest)
    metainfo['signature'] = sigstr
    metainfo['signer'] = str(keypair.pub().get_der())
def rsa_verify_data_pubkeyobj(plaintext, extra, pubkey, sig):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    s = sig.tostring()
    if DEBUG:
        import binascii
        print >> sys.stderr, 'rsa_verify_data_pubkeyobj: len(digest)', len(digest), 'len(s)', len(s), 'digest', digest, 'extra', extra, binascii.hexlify(extra.tostring())
    return pubkey.verify(digest, s)
def verify_torrent_signature(metainfo):
    r = deepcopy(metainfo)
    signature = r['signature']
    signer = r['signer']
    del r['signature']
    del r['signer']
    bmetainfo = bencode(r)
    digester = sha(bmetainfo[:])
    digest = digester.digest()
    return do_verify_torrent_signature(digest, signature, signer)
def verify_torrent_signature(metainfo):
    r = deepcopy(metainfo)
    signature = r['signature']
    signer = r['signer']
    del r['signature']
    del r['signer']
    bmetainfo = bencode(r)
    digester = sha(bmetainfo[:])
    digest = digester.digest()
    return do_verify_torrent_signature(digest, signature, signer)
Beispiel #16
0
def rsa_verify_data_pubkeyobj(plaintext, extra, pubkey, sig):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    s = sig.tostring()
    if DEBUG:
        import binascii
        print >> sys.stderr, 'rsa_verify_data_pubkeyobj: len(digest)', len(
            digest), 'len(s)', len(
                s), 'digest', digest, 'extra', extra, binascii.hexlify(
                    extra.tostring())
    return pubkey.verify(digest, s)
Beispiel #17
0
 def _create(metainfo, protected = False):
     metainfo = validTorrentFile(metainfo)
     t = TorrentDef()
     t.protected = protected
     t.metainfo = metainfo
     t.ts_metainfo_valid = True
     t.metainfo_valid = True
     maketorrent.copy_metainfo_to_input(t.metainfo, t.input)
     if t.get_url_compat():
         t.infohash = makeurl.metainfo2swarmid(t.metainfo)
     else:
         t.infohash = sha(bencode(metainfo['info'], params={'skip_dict_sorting': True})).digest()
     if DEBUG:
         print >> sys.stderr, 'TorrentDef::_create: infohash:', `(t.infohash)`
     return t
def check_fork(a, b, level):
    myoffset = a[0]
    siblingoffset = b[0]
    if myoffset > siblingoffset:
        data = b[1] + a[1]
        if DEBUG:
            print >> sys.stderr, 'merkle: combining', siblingoffset, myoffset
    else:
        data = a[1] + b[1]
        if DEBUG:
            print >> sys.stderr, 'merkle: combining', myoffset, siblingoffset
    digester = sha()
    digester.update(data)
    digest = digester.digest()
    parentstartoffset, parentoffset = get_parent_offset(myoffset, level - 1)
    return [parentoffset, digest]
Beispiel #19
0
 def verify(self, piece, index):
     try:
         extra = piece[-self.OUR_SIGSIZE:-self.OUR_SIGSIZE +
                       self.EXTRA_SIZE]
         lensig = ord(piece[-self.OUR_SIGSIZE + self.EXTRA_SIZE])
         if lensig > self.MAX_ECDSA_ASN1_SIGSIZE:
             print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'lensig wrong', lensig
             return False
         diff = lensig - self.MAX_ECDSA_ASN1_SIGSIZE
         if diff == 0:
             sig = piece[-self.OUR_SIGSIZE + self.EXTRA_SIZE +
                         self.LENGTH_SIZE:]
         else:
             sig = piece[-self.OUR_SIGSIZE + self.EXTRA_SIZE +
                         self.LENGTH_SIZE:diff]
         content = piece[:-self.OUR_SIGSIZE]
         if DEBUG:
             print >> sys.stderr, 'ECDSAAuth: verify piece', index, 'sig', ` sig `
             print >> sys.stderr, 'ECDSAAuth: verify dig', sha(
                 content).hexdigest()
         ret = ecdsa_verify_data_pubkeyobj(content, extra, self.pubkey, sig)
         if ret:
             seqnum, rtstamp = self._decode_extra(piece)
             if DEBUG:
                 print >> sys.stderr, 'ECDSAAuth: verify piece', index, 'seq', seqnum, 'ts %.5f s' % rtstamp, 'ls', lensig
             mod = seqnum % self.get_npieces()
             thres = self.seqnum - self.get_npieces() / 2
             if seqnum <= thres:
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'old seqnum', seqnum, '<<', self.seqnum
                 return False
             if mod != index:
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'expected', mod
                 return False
             if self.startts is not None and rtstamp < self.startts:
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'older than oldest known ts', rtstamp, self.startts
                 return False
             self.seqnum = max(self.seqnum, seqnum)
             if self.startts is None:
                 self.startts = rtstamp - 300.0
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@: startts', self.startts
         else:
             print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ piece', index, 'failed sig'
         return ret
     except:
         print_exc()
         return False
Beispiel #20
0
 def _create(metainfo, protected=False):
     metainfo = validTorrentFile(metainfo)
     t = TorrentDef()
     t.protected = protected
     t.metainfo = metainfo
     t.ts_metainfo_valid = True
     t.metainfo_valid = True
     maketorrent.copy_metainfo_to_input(t.metainfo, t.input)
     if t.get_url_compat():
         t.infohash = makeurl.metainfo2swarmid(t.metainfo)
     else:
         t.infohash = sha(
             bencode(metainfo['info'], params={'skip_dict_sorting':
                                               True})).digest()
     if DEBUG:
         print >> sys.stderr, 'TorrentDef::_create: infohash:', ` (
             t.infohash) `
     return t
 def save_torrent(self, infohash, metadata, source = 'BC', extra_info = {}):
     if not self.initialized:
         return None
     self.check_overflow()
     if self.min_free_space != 0 and (self.free_space - len(metadata) < self.min_free_space or self.num_collected_torrents % 10 == 0):
         self.free_space = self.get_free_space()
         if self.free_space - len(metadata) < self.min_free_space:
             self.warn_disk_full()
             return None
     file_name = get_collected_torrent_filename(infohash)
     if DEBUG:
         print >> sys.stderr, 'metadata: Storing torrent', sha(infohash).hexdigest(), 'in', file_name
     save_path = self.write_torrent(metadata, self.torrent_dir, file_name)
     if save_path:
         self.num_collected_torrents += 1
         self.free_space -= len(metadata)
         self.addTorrentToDB(save_path, infohash, metadata, source=source, extra_info=extra_info)
     return file_name
def fill_tree(tree, height, npieces, hashes):
    startoffset = int(pow(2, height) - 1)
    if DEBUG:
        print >> sys.stderr, 'merkle: bottom of tree starts at', startoffset
    for offset in range(startoffset, startoffset + npieces):
        tree[offset] = hashes[offset - startoffset]

    for level in range(height, 0, -1):
        if DEBUG:
            print >> sys.stderr, 'merkle: calculating level', level
        for offset in range(int(pow(2, level) - 1), int(pow(2, level + 1) - 2), 2):
            parentstartoffset, parentoffset = get_parent_offset(offset, level)
            data = tree[offset] + tree[offset + 1]
            digester = sha()
            digester.update(data)
            digest = digester.digest()
            tree[parentoffset] = digest

    return tree
 def verify(self, piece, index):
     try:
         extra = piece[-self.OUR_SIGSIZE:-self.OUR_SIGSIZE + self.EXTRA_SIZE]
         lensig = ord(piece[-self.OUR_SIGSIZE + self.EXTRA_SIZE])
         if lensig > self.MAX_ECDSA_ASN1_SIGSIZE:
             print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'lensig wrong', lensig
             return False
         diff = lensig - self.MAX_ECDSA_ASN1_SIGSIZE
         if diff == 0:
             sig = piece[-self.OUR_SIGSIZE + self.EXTRA_SIZE + self.LENGTH_SIZE:]
         else:
             sig = piece[-self.OUR_SIGSIZE + self.EXTRA_SIZE + self.LENGTH_SIZE:diff]
         content = piece[:-self.OUR_SIGSIZE]
         if DEBUG:
             print >> sys.stderr, 'ECDSAAuth: verify piece', index, 'sig', `sig`
             print >> sys.stderr, 'ECDSAAuth: verify dig', sha(content).hexdigest()
         ret = ecdsa_verify_data_pubkeyobj(content, extra, self.pubkey, sig)
         if ret:
             seqnum, rtstamp = self._decode_extra(piece)
             if DEBUG:
                 print >> sys.stderr, 'ECDSAAuth: verify piece', index, 'seq', seqnum, 'ts %.5f s' % rtstamp, 'ls', lensig
             mod = seqnum % self.get_npieces()
             thres = self.seqnum - self.get_npieces() / 2
             if seqnum <= thres:
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'old seqnum', seqnum, '<<', self.seqnum
                 return False
             if mod != index:
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'expected', mod
                 return False
             if self.startts is not None and rtstamp < self.startts:
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece', index, 'older than oldest known ts', rtstamp, self.startts
                 return False
             self.seqnum = max(self.seqnum, seqnum)
             if self.startts is None:
                 self.startts = rtstamp - 300.0
                 print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@: startts', self.startts
         else:
             print >> sys.stderr, 'ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ piece', index, 'failed sig'
         return ret
     except:
         print_exc()
         return False
def ecdsa_verify_data_pubkeyobj(plaintext, extra, pubkey, blob):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    return pubkey.verify_dsa_asn1(digest, blob)
Beispiel #25
0
def parsedir(
    directory, parsed, files, blocked, exts=[".torrent", TRIBLER_TORRENT_EXT], return_metainfo=False, errfunc=_errfunc
):
    if DEBUG:
        errfunc("checking dir")
    dirs_to_check = [directory]
    new_files = {}
    new_blocked = {}
    torrent_type = {}
    while dirs_to_check:
        directory = dirs_to_check.pop()
        newtorrents = False
        for f in os.listdir(directory):
            newtorrent = None
            for ext in exts:
                if f.endswith(ext):
                    newtorrent = ext[1:]
                    break

            if newtorrent:
                newtorrents = True
                p = os.path.join(directory, f)
                new_files[p] = [(int(os.path.getmtime(p)), os.path.getsize(p)), 0]
                torrent_type[p] = newtorrent

        if not newtorrents:
            for f in os.listdir(directory):
                p = os.path.join(directory, f)
                if os.path.isdir(p):
                    dirs_to_check.append(p)

    new_parsed = {}
    to_add = []
    added = {}
    removed = {}
    for p, v in new_files.items():
        oldval = files.get(p)
        if not oldval:
            to_add.append(p)
            continue
        h = oldval[1]
        if oldval[0] == v[0]:
            if h:
                if blocked.has_key(p):
                    to_add.append(p)
                else:
                    new_parsed[h] = parsed[h]
                new_files[p] = oldval
            else:
                new_blocked[p] = 1
            continue
        if parsed.has_key(h) and not blocked.has_key(p):
            if DEBUG:
                errfunc("removing " + p + " (will re-add)")
            removed[h] = parsed[h]
        to_add.append(p)

    to_add.sort()
    for p in to_add:
        new_file = new_files[p]
        v, h = new_file
        if new_parsed.has_key(h):
            if not blocked.has_key(p) or files[p][0] != v:
                errfunc("**warning** " + p + " is a duplicate torrent for " + new_parsed[h]["path"])
            new_blocked[p] = 1
            continue
        if DEBUG:
            errfunc("adding " + p)
        try:
            tdef = TorrentDef.load(p)
            h = tdef.get_infohash()
            d = tdef.get_metainfo()
            new_file[1] = h
            if new_parsed.has_key(h):
                errfunc("**warning** " + p + " is a duplicate torrent for " + new_parsed[h]["path"])
                new_blocked[p] = 1
                continue
            a = {}
            a["path"] = p
            f = os.path.basename(p)
            a["file"] = f
            a["type"] = torrent_type[p]
            if tdef.get_url_compat():
                a["url"] = tdef.get_url()
            i = d["info"]
            l = 0
            nf = 0
            if i.has_key("length"):
                l = i.get("length", 0)
                nf = 1
            elif i.has_key("files"):
                for li in i["files"]:
                    nf += 1
                    if li.has_key("length"):
                        l += li["length"]

            a["numfiles"] = nf
            a["length"] = l
            a["name"] = i.get("name", f)

            def setkey(k, d=d, a=a):
                if d.has_key(k):
                    a[k] = d[k]

            setkey("failure reason")
            setkey("warning message")
            setkey("announce-list")
            if tdef.get_urllist() is not None:
                httpseedhashes = []
                for url in tdef.get_urllist():
                    urlhash = sha(url).digest()
                    httpseedhashes.append(urlhash)

                a["url-hash-list"] = httpseedhashes
            if return_metainfo:
                a["metainfo"] = d
        except:
            print_exc()
            errfunc("**warning** " + p + " has errors")
            new_blocked[p] = 1
            continue

        if DEBUG:
            errfunc("... successful")
        new_parsed[h] = a
        added[h] = a

    for p, v in files.items():
        if not new_files.has_key(p) and not blocked.has_key(p):
            if DEBUG:
                errfunc("removing " + p)
            removed[v[1]] = parsed[v[1]]

    if DEBUG:
        errfunc("done checking")
    return (new_parsed, new_files, new_blocked, added, removed)
Beispiel #26
0
def add_key(tracker):
    key = ''
    for i in sha(basekeydata + tracker).digest()[-6:]:
        key += mapbase64[ord(i) & 63]

    keys[tracker] = key
def verify_data_pubkeyobj(plaintext, pubkey, blob):
    digest = sha(plaintext).digest()
    return pubkey.verify_dsa_asn1(digest, blob)
def makeinfo(input, userabortflag, userprogresscallback):
    encoding = input['encoding']
    pieces = []
    sh = sha()
    done = 0L
    fs = []
    totalsize = 0L
    totalhashed = 0L
    subs = []
    for file in input['files']:
        inpath = file['inpath']
        outpath = file['outpath']
        if DEBUG:
            print >> sys.stderr, 'makeinfo: inpath', inpath, 'outpath', outpath
        if os.path.isdir(inpath):
            dirsubs = subfiles(inpath)
            subs.extend(dirsubs)
        elif outpath is None:
            subs.append(([os.path.basename(inpath)], inpath))
        else:
            subs.append((filename2pathlist(outpath, skipfirst=True), inpath))

    subs.sort()
    newsubs = []
    for p, f in subs:
        if 'live' in input:
            size = input['files'][0]['length']
        else:
            size = os.path.getsize(f)
        totalsize += size
        newsubs.append((p, f, size))

    subs = newsubs
    if input['piece length'] == 0:
        if input['createmerkletorrent']:
            piece_len_exp = 18
        elif totalsize > 8589934592L:
            piece_len_exp = 21
        elif totalsize > 2147483648L:
            piece_len_exp = 20
        elif totalsize > 536870912:
            piece_len_exp = 19
        elif totalsize > 67108864:
            piece_len_exp = 18
        elif totalsize > 16777216:
            piece_len_exp = 17
        elif totalsize > 4194304:
            piece_len_exp = 16
        else:
            piece_len_exp = 15
        piece_length = 2 ** piece_len_exp
    else:
        piece_length = input['piece length']
    if 'live' not in input:
        for p, f, size in subs:
            pos = 0L
            h = open(f, 'rb')
            if input['makehash_md5']:
                hash_md5 = md5.new()
            if input['makehash_sha1']:
                hash_sha1 = sha()
            if input['makehash_crc32']:
                hash_crc32 = zlib.crc32('')
            while pos < size:
                a = min(size - pos, piece_length - done)
                if userabortflag is not None and userabortflag.isSet():
                    return (None, None)
                readpiece = h.read(a)
                if userabortflag is not None and userabortflag.isSet():
                    return (None, None)
                sh.update(readpiece)
                if input['makehash_md5']:
                    hash_md5.update(readpiece)
                if input['makehash_crc32']:
                    hash_crc32 = zlib.crc32(readpiece, hash_crc32)
                if input['makehash_sha1']:
                    hash_sha1.update(readpiece)
                done += a
                pos += a
                totalhashed += a
                if done == piece_length:
                    pieces.append(sh.digest())
                    done = 0
                    sh = sha()
                if userprogresscallback is not None:
                    userprogresscallback(float(totalhashed) / float(totalsize))

            newdict = odict()
            newdict['length'] = num2num(size)
            newdict['path'] = uniconvertl(p, encoding)
            newdict['path.utf-8'] = uniconvertl(p, 'utf-8')
            for file in input['files']:
                if file['inpath'] == f:
                    if file['playtime'] is not None:
                        newdict['playtime'] = file['playtime']
                    break

            if input['makehash_md5']:
                newdict['md5sum'] = hash_md5.hexdigest()
            if input['makehash_crc32']:
                newdict['crc32'] = '%08X' % hash_crc32
            if input['makehash_sha1']:
                newdict['sha1'] = hash_sha1.digest()
            fs.append(newdict)
            h.close()

        if done > 0:
            pieces.append(sh.digest())
    if len(subs) == 1:
        flkey = 'length'
        flval = num2num(totalsize)
        name = subs[0][0][0]
    else:
        flkey = 'files'
        flval = fs
        outpath = input['files'][0]['outpath']
        l = filename2pathlist(outpath)
        name = l[0]
    infodict = odict()
    infodict['piece length'] = num2num(piece_length)
    infodict[flkey] = flval
    infodict['name'] = uniconvert(name, encoding)
    infodict['name.utf-8'] = uniconvert(name, 'utf-8')
    if 'live' not in input:
        if input['createmerkletorrent']:
            merkletree = MerkleTree(piece_length, totalsize, None, pieces)
            root_hash = merkletree.get_root_hash()
            infodict['root hash'] = root_hash
        else:
            infodict['pieces'] = ''.join(pieces)
    else:
        infodict['live'] = input['live']
    if input.has_key('provider'):
        infodict['provider'] = input['provider']
    if input.has_key('content_id'):
        infodict['content_id'] = input['content_id']
    if input.has_key('premium'):
        infodict['premium'] = input['premium']
    if input.has_key('license'):
        infodict['license'] = input['license']
    if input.has_key('tns'):
        infodict['tns'] = input['tns']
    if 'cs_keys' in input:
        infodict['cs_keys'] = input['cs_keys']
    if 'private' in input:
        infodict['private'] = input['private']
    if 'sharing' in input:
        infodict['sharing'] = input['sharing']
    if 'ns-metadata' in input:
        infodict['ns-metadata'] = input['ns-metadata']
    if len(subs) == 1:
        for file in input['files']:
            if file['inpath'] == f:
                if file['playtime'] is not None:
                    infodict['playtime'] = file['playtime']

    infodict.sort()
    return (infodict, piece_length)
def verify_data(plaintext, permid, blob):
    pubkey = EC.pub_key_from_der(permid)
    digest = sha(plaintext).digest()
    return pubkey.verify_dsa_asn1(digest, blob)
def sign_data(plaintext, ec_keypair):
    digest = sha(plaintext).digest()
    return ec_keypair.sign_dsa_asn1(digest)
def verify_response(randomA, randomB, peeridB, pubA, sigA):
    list = [randomA, randomB, peeridB]
    blist = bencode(list)
    digest = sha(blist).digest()
    return pubA.verify_dsa_asn1(digest, sigA)
def makeinfo(input, userabortflag, userprogresscallback):
    encoding = input['encoding']
    pieces = []
    sh = sha()
    done = 0L
    fs = []
    totalsize = 0L
    totalhashed = 0L
    subs = []
    for file in input['files']:
        inpath = file['inpath']
        outpath = file['outpath']
        if DEBUG:
            print >> sys.stderr, 'makeinfo: inpath', inpath, 'outpath', outpath
        if os.path.isdir(inpath):
            dirsubs = subfiles(inpath)
            subs.extend(dirsubs)
        elif outpath is None:
            subs.append(([os.path.basename(inpath)], inpath))
        else:
            subs.append((filename2pathlist(outpath, skipfirst=True), inpath))

    subs.sort()
    newsubs = []
    for p, f in subs:
        if 'live' in input:
            size = input['files'][0]['length']
        else:
            size = os.path.getsize(f)
        totalsize += size
        newsubs.append((p, f, size))

    subs = newsubs
    if input['piece length'] == 0:
        if input['createmerkletorrent']:
            piece_len_exp = 18
        elif totalsize > 8589934592L:
            piece_len_exp = 21
        elif totalsize > 2147483648L:
            piece_len_exp = 20
        elif totalsize > 536870912:
            piece_len_exp = 19
        elif totalsize > 67108864:
            piece_len_exp = 18
        elif totalsize > 16777216:
            piece_len_exp = 17
        elif totalsize > 4194304:
            piece_len_exp = 16
        else:
            piece_len_exp = 15
        piece_length = 2**piece_len_exp
    else:
        piece_length = input['piece length']
    if 'live' not in input:
        for p, f, size in subs:
            pos = 0L
            h = open(f, 'rb')
            if input['makehash_md5']:
                hash_md5 = md5.new()
            if input['makehash_sha1']:
                hash_sha1 = sha()
            if input['makehash_crc32']:
                hash_crc32 = zlib.crc32('')
            while pos < size:
                a = min(size - pos, piece_length - done)
                if userabortflag is not None and userabortflag.isSet():
                    return (None, None)
                readpiece = h.read(a)
                if userabortflag is not None and userabortflag.isSet():
                    return (None, None)
                sh.update(readpiece)
                if input['makehash_md5']:
                    hash_md5.update(readpiece)
                if input['makehash_crc32']:
                    hash_crc32 = zlib.crc32(readpiece, hash_crc32)
                if input['makehash_sha1']:
                    hash_sha1.update(readpiece)
                done += a
                pos += a
                totalhashed += a
                if done == piece_length:
                    pieces.append(sh.digest())
                    done = 0
                    sh = sha()
                if userprogresscallback is not None:
                    userprogresscallback(float(totalhashed) / float(totalsize))

            newdict = odict()
            newdict['length'] = num2num(size)
            newdict['path'] = uniconvertl(p, encoding)
            newdict['path.utf-8'] = uniconvertl(p, 'utf-8')
            for file in input['files']:
                if file['inpath'] == f:
                    if file['playtime'] is not None:
                        newdict['playtime'] = file['playtime']
                    break

            if input['makehash_md5']:
                newdict['md5sum'] = hash_md5.hexdigest()
            if input['makehash_crc32']:
                newdict['crc32'] = '%08X' % hash_crc32
            if input['makehash_sha1']:
                newdict['sha1'] = hash_sha1.digest()
            fs.append(newdict)
            h.close()

        if done > 0:
            pieces.append(sh.digest())
    if len(subs) == 1:
        flkey = 'length'
        flval = num2num(totalsize)
        name = subs[0][0][0]
    else:
        flkey = 'files'
        flval = fs
        outpath = input['files'][0]['outpath']
        l = filename2pathlist(outpath)
        name = l[0]
    infodict = odict()
    infodict['piece length'] = num2num(piece_length)
    infodict[flkey] = flval
    infodict['name'] = uniconvert(name, encoding)
    infodict['name.utf-8'] = uniconvert(name, 'utf-8')
    if 'live' not in input:
        if input['createmerkletorrent']:
            merkletree = MerkleTree(piece_length, totalsize, None, pieces)
            root_hash = merkletree.get_root_hash()
            infodict['root hash'] = root_hash
        else:
            infodict['pieces'] = ''.join(pieces)
    else:
        infodict['live'] = input['live']
    if input.has_key('provider'):
        infodict['provider'] = input['provider']
    if input.has_key('content_id'):
        infodict['content_id'] = input['content_id']
    if input.has_key('premium'):
        infodict['premium'] = input['premium']
    if input.has_key('license'):
        infodict['license'] = input['license']
    if input.has_key('tns'):
        infodict['tns'] = input['tns']
    if 'cs_keys' in input:
        infodict['cs_keys'] = input['cs_keys']
    if 'private' in input:
        infodict['private'] = input['private']
    if 'sharing' in input:
        infodict['sharing'] = input['sharing']
    if 'ns-metadata' in input:
        infodict['ns-metadata'] = input['ns-metadata']
    if len(subs) == 1:
        for file in input['files']:
            if file['inpath'] == f:
                if file['playtime'] is not None:
                    infodict['playtime'] = file['playtime']

    infodict.sort()
    return (infodict, piece_length)
def verify_response(randomA, randomB, peeridB, pubA, sigA):
    list = [randomA, randomB, peeridB]
    blist = bencode(list)
    digest = sha(blist).digest()
    return pubA.verify_dsa_asn1(digest, sigA)
def sign_response(randomA, randomB, peeridB, keypairA):
    list = [randomA, randomB, peeridB]
    blist = bencode(list)
    digest = sha(blist).digest()
    blob = keypairA.sign_dsa_asn1(digest)
    return blob
def ecdsa_sign_data(plaintext, extra, ec_keypair):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    return ec_keypair.sign_dsa_asn1(digest)
def parsedir(directory,
             parsed,
             files,
             blocked,
             exts=['.torrent', TRIBLER_TORRENT_EXT],
             return_metainfo=False,
             errfunc=_errfunc):
    if DEBUG:
        errfunc('checking dir')
    dirs_to_check = [directory]
    new_files = {}
    new_blocked = {}
    torrent_type = {}
    while dirs_to_check:
        directory = dirs_to_check.pop()
        newtorrents = False
        for f in os.listdir(directory):
            newtorrent = None
            for ext in exts:
                if f.endswith(ext):
                    newtorrent = ext[1:]
                    break

            if newtorrent:
                newtorrents = True
                p = os.path.join(directory, f)
                new_files[p] = [(int(os.path.getmtime(p)), os.path.getsize(p)),
                                0]
                torrent_type[p] = newtorrent

        if not newtorrents:
            for f in os.listdir(directory):
                p = os.path.join(directory, f)
                if os.path.isdir(p):
                    dirs_to_check.append(p)

    new_parsed = {}
    to_add = []
    added = {}
    removed = {}
    for p, v in new_files.items():
        oldval = files.get(p)
        if not oldval:
            to_add.append(p)
            continue
        h = oldval[1]
        if oldval[0] == v[0]:
            if h:
                if blocked.has_key(p):
                    to_add.append(p)
                else:
                    new_parsed[h] = parsed[h]
                new_files[p] = oldval
            else:
                new_blocked[p] = 1
            continue
        if parsed.has_key(h) and not blocked.has_key(p):
            if DEBUG:
                errfunc('removing ' + p + ' (will re-add)')
            removed[h] = parsed[h]
        to_add.append(p)

    to_add.sort()
    for p in to_add:
        new_file = new_files[p]
        v, h = new_file
        if new_parsed.has_key(h):
            if not blocked.has_key(p) or files[p][0] != v:
                errfunc('**warning** ' + p + ' is a duplicate torrent for ' +
                        new_parsed[h]['path'])
            new_blocked[p] = 1
            continue
        if DEBUG:
            errfunc('adding ' + p)
        try:
            tdef = TorrentDef.load(p)
            h = tdef.get_infohash()
            d = tdef.get_metainfo()
            new_file[1] = h
            if new_parsed.has_key(h):
                errfunc('**warning** ' + p + ' is a duplicate torrent for ' +
                        new_parsed[h]['path'])
                new_blocked[p] = 1
                continue
            a = {}
            a['path'] = p
            f = os.path.basename(p)
            a['file'] = f
            a['type'] = torrent_type[p]
            if tdef.get_url_compat():
                a['url'] = tdef.get_url()
            i = d['info']
            l = 0
            nf = 0
            if i.has_key('length'):
                l = i.get('length', 0)
                nf = 1
            elif i.has_key('files'):
                for li in i['files']:
                    nf += 1
                    if li.has_key('length'):
                        l += li['length']

            a['numfiles'] = nf
            a['length'] = l
            a['name'] = i.get('name', f)

            def setkey(k, d=d, a=a):
                if d.has_key(k):
                    a[k] = d[k]

            setkey('failure reason')
            setkey('warning message')
            setkey('announce-list')
            if tdef.get_urllist() is not None:
                httpseedhashes = []
                for url in tdef.get_urllist():
                    urlhash = sha(url).digest()
                    httpseedhashes.append(urlhash)

                a['url-hash-list'] = httpseedhashes
            if return_metainfo:
                a['metainfo'] = d
        except:
            print_exc()
            errfunc('**warning** ' + p + ' has errors')
            new_blocked[p] = 1
            continue

        if DEBUG:
            errfunc('... successful')
        new_parsed[h] = a
        added[h] = a

    for p, v in files.items():
        if not new_files.has_key(p) and not blocked.has_key(p):
            if DEBUG:
                errfunc('removing ' + p)
            removed[v[1]] = parsed[v[1]]

    if DEBUG:
        errfunc('done checking')
    return (new_parsed, new_files, new_blocked, added, removed)
def verify_data_pubkeyobj(plaintext, pubkey, blob):
    digest = sha(plaintext).digest()
    return pubkey.verify_dsa_asn1(digest, blob)
    def got_metadata(self, permid, message, selversion):
        try:
            message = bdecode(message[1:])
        except:
            print_exc()
            return False

        if not isinstance(message, dict):
            return False
        try:
            infohash = message['torrent_hash']
            if not isValidInfohash(infohash):
                return False
            if infohash not in self.requested_torrents:
                return True
            if self.torrent_db.hasMetaData(infohash):
                return True
            goturl = False
            if selversion >= OLPROTO_VER_ELEVENTH:
                if 'metatype' in message and message[
                        'metatype'] == URL_MIME_TYPE:
                    try:
                        tdef = TorrentDef.load_from_url(message['metadata'])
                        metainfo = tdef.get_metainfo()
                        metadata = bencode(metainfo)
                        goturl = True
                    except:
                        print_exc()
                        return False

                else:
                    metadata = message['metadata']
            else:
                metadata = message['metadata']
            if not self.valid_metadata(infohash, metadata):
                return False
            if DEBUG:
                torrent_size = len(metadata)
                if goturl:
                    mdt = 'URL'
                else:
                    mdt = 'torrent'
                print >> sys.stderr, 'metadata: Recvd', mdt, ` infohash `, sha(
                    infohash).hexdigest(), torrent_size
            extra_info = {}
            if selversion >= OLPROTO_VER_FOURTH:
                try:
                    extra_info = {
                        'leecher': message.get('leecher', -1),
                        'seeder': message.get('seeder', -1),
                        'last_check_time': message.get('last_check_time', -1),
                        'status': message.get('status', 'unknown')
                    }
                except Exception as msg:
                    print_exc()
                    print >> sys.stderr, 'metadata: wrong extra info in msg - ', message
                    extra_info = {}

            filename = self.save_torrent(infohash,
                                         metadata,
                                         extra_info=extra_info)
            self.requested_torrents.remove(infohash)
            if filename is not None:
                self.notify_torrent_is_in(infohash, metadata, filename)
            if permid is not None and BARTERCAST_TORRENTS:
                self.overlay_bridge.add_task(
                    lambda: self.olthread_bartercast_torrentexchange(
                        permid, 'downloaded'), 0)
        except Exception as e:
            print_exc()
            print >> sys.stderr, 'metadata: Received metadata is broken', e, message.keys(
            )
            return False

        return True
def get_collected_torrent_filename(infohash):
    filename = sha(infohash).hexdigest() + '.torrent'
    return filename
def add_key(tracker):
    key = ''
    for i in sha(basekeydata + tracker).digest()[-6:]:
        key += mapbase64[ord(i) & 63]

    keys[tracker] = key
def sign_data(plaintext, ec_keypair):
    digest = sha(plaintext).digest()
    return ec_keypair.sign_dsa_asn1(digest)
def verify_data(plaintext, permid, blob):
    pubkey = EC.pub_key_from_der(permid)
    digest = sha(plaintext).digest()
    return pubkey.verify_dsa_asn1(digest, blob)
Beispiel #43
0
def get_collected_torrent_filename(infohash):
    filename = sha(infohash).hexdigest() + ".torrent"
    return filename
    def got_metadata(self, permid, message, selversion):
        try:
            message = bdecode(message[1:])
        except:
            print_exc()
            return False

        if not isinstance(message, dict):
            return False
        try:
            infohash = message['torrent_hash']
            if not isValidInfohash(infohash):
                return False
            if infohash not in self.requested_torrents:
                return True
            if self.torrent_db.hasMetaData(infohash):
                return True
            goturl = False
            if selversion >= OLPROTO_VER_ELEVENTH:
                if 'metatype' in message and message['metatype'] == URL_MIME_TYPE:
                    try:
                        tdef = TorrentDef.load_from_url(message['metadata'])
                        metainfo = tdef.get_metainfo()
                        metadata = bencode(metainfo)
                        goturl = True
                    except:
                        print_exc()
                        return False

                else:
                    metadata = message['metadata']
            else:
                metadata = message['metadata']
            if not self.valid_metadata(infohash, metadata):
                return False
            if DEBUG:
                torrent_size = len(metadata)
                if goturl:
                    mdt = 'URL'
                else:
                    mdt = 'torrent'
                print >> sys.stderr, 'metadata: Recvd', mdt, `infohash`, sha(infohash).hexdigest(), torrent_size
            extra_info = {}
            if selversion >= OLPROTO_VER_FOURTH:
                try:
                    extra_info = {'leecher': message.get('leecher', -1),
                     'seeder': message.get('seeder', -1),
                     'last_check_time': message.get('last_check_time', -1),
                     'status': message.get('status', 'unknown')}
                except Exception as msg:
                    print_exc()
                    print >> sys.stderr, 'metadata: wrong extra info in msg - ', message
                    extra_info = {}

            filename = self.save_torrent(infohash, metadata, extra_info=extra_info)
            self.requested_torrents.remove(infohash)
            if filename is not None:
                self.notify_torrent_is_in(infohash, metadata, filename)
            if permid is not None and BARTERCAST_TORRENTS:
                self.overlay_bridge.add_task(lambda : self.olthread_bartercast_torrentexchange(permid, 'downloaded'), 0)
        except Exception as e:
            print_exc()
            print >> sys.stderr, 'metadata: Received metadata is broken', e, message.keys()
            return False

        return True
Beispiel #45
0
def rsa_sign_data(plaintext, extra, rsa_keypair):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    return rsa_keypair.sign(digest)
Beispiel #46
0
def ecdsa_sign_data(plaintext, extra, ec_keypair):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    return ec_keypair.sign_dsa_asn1(digest)
def rsa_sign_data(plaintext, extra, rsa_keypair):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    return rsa_keypair.sign(digest)
def sign_response(randomA, randomB, peeridB, keypairA):
    list = [randomA, randomB, peeridB]
    blist = bencode(list)
    digest = sha(blist).digest()
    blob = keypairA.sign_dsa_asn1(digest)
    return blob
Beispiel #49
0
def ecdsa_verify_data_pubkeyobj(plaintext, extra, pubkey, blob):
    digester = sha(plaintext)
    digester.update(extra)
    digest = digester.digest()
    return pubkey.verify_dsa_asn1(digest, blob)