def getSomeMetadata(self, channel_id, infohash): s1 = SubtitleInfo("eng", None) s2 = SubtitleInfo("rus", None) self.content1 = u"Subtitle Content 1" self.content2 = u"Subtitle Content 2" hasher = sha() hasher.update(self.content1) s1.checksum = hasher.digest() hasher = sha() hasher.update(self.content2) s2.checksum = hasher.digest() metadata = MetadataDTO(channel_id, infohash, time.time(), "", {"eng":s1, "rus":s2}) if self.nextKeypair is None: metadata.signature = "fake" else: metadata.sign(self.nextKeypair) return metadata
def setupDB(self,nickname): # Change at runtime. Must be set before DB inserts self.session.set_nickname(nickname) self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) self.channelcast_db = self.session.open_dbhandler(NTFY_CHANNELCAST) self.votecast_db = self.session.open_dbhandler(NTFY_VOTECAST) try: # Add some torrents belonging to own channel tdef1, self.bmetainfo1 = self.get_default_torrent('sumfilename1','Hallo S01E10') dbrec= self.torrent_db.addExternalTorrent(tdef1, extra_info={"filename":"sumfilename1"}) self.infohash1 = tdef1.get_infohash() self.channelcast_db.addOwnTorrent(tdef1) tdef2, self.bmetainfo2 = self.get_default_torrent('sumfilename2','Hallo S02E01') dbrec = self.torrent_db.addExternalTorrent(tdef2, extra_info={"filename":"sumfilename2"}) self.infohash2 = tdef2.get_infohash() self.torrenthash2 = sha(self.bmetainfo2).digest() self.channelcast_db.addOwnTorrent(tdef2) tdef3, self.bmetainfo3 = self.get_default_torrent('sumfilename3','Halo Demo') self.torrent_db.addExternalTorrent(tdef3, extra_info={"filename":"sumfilename3"}) self.infohash3 = tdef3.get_infohash() self.torrenthash3 = sha(self.bmetainfo3).digest() self.channelcast_db.addOwnTorrent(tdef3) # Now, add some votes self.votecast_db.subscribe("MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTU") self.votecast_db.spam("MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTX") vote = {'mod_id':"MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTU", 'voter_id':"MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTX",'vote':1, 'time_stamp':132314} self.votecast_db.addVote(vote) except: print_exc()
def getSomeMetadata(self, channel_id, infohash): s1 = SubtitleInfo("eng", None) s2 = SubtitleInfo("rus", None) self.content1 = u"Subtitle Content 1" self.content2 = u"Subtitle Content 2" hasher = sha() hasher.update(self.content1) s1.checksum = hasher.digest() hasher = sha() hasher.update(self.content2) s2.checksum = hasher.digest() metadata = MetadataDTO(channel_id, infohash, time.time(), "", { "eng": s1, "rus": s2 }) if self.nextKeypair is None: metadata.signature = "fake" else: metadata.sign(self.nextKeypair) return metadata
def setupDB(self, nickname): # Change at runtime. Must be set before DB inserts self.session.set_nickname(nickname) self.torrent_db = self.session.open_dbhandler(NTFY_TORRENTS) self.channelcast_db = self.session.open_dbhandler(NTFY_CHANNELCAST) self.votecast_db = self.session.open_dbhandler(NTFY_VOTECAST) try: # Add some torrents belonging to own channel tdef1, self.bmetainfo1 = self.get_default_torrent( 'sumfilename1', 'Hallo S01E10') dbrec = self.torrent_db.addExternalTorrent( tdef1, extra_info={"filename": "sumfilename1"}) self.infohash1 = tdef1.get_infohash() self.channelcast_db.addOwnTorrent(tdef1) tdef2, self.bmetainfo2 = self.get_default_torrent( 'sumfilename2', 'Hallo S02E01') dbrec = self.torrent_db.addExternalTorrent( tdef2, extra_info={"filename": "sumfilename2"}) self.infohash2 = tdef2.get_infohash() self.torrenthash2 = sha(self.bmetainfo2).digest() self.channelcast_db.addOwnTorrent(tdef2) tdef3, self.bmetainfo3 = self.get_default_torrent( 'sumfilename3', 'Halo Demo') self.torrent_db.addExternalTorrent( tdef3, extra_info={"filename": "sumfilename3"}) self.infohash3 = tdef3.get_infohash() self.torrenthash3 = sha(self.bmetainfo3).digest() self.channelcast_db.addOwnTorrent(tdef3) # Now, add some votes self.votecast_db.subscribe( "MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTU" ) self.votecast_db.spam( "MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTX" ) vote = { 'mod_id': "MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTU", 'voter_id': "MFIwEAYHKoZIzj0CAQYFK4EEABoDPgAEAIV8h+eS+vQ+0uqZNv3MYYTLo5s0JP+cmkvJ7U4JAHhfRv1wCqZSKIuY7Q+3ESezhRnnmmX4pbOVhKTX", 'vote': 1, 'time_stamp': 132314 } self.votecast_db.addVote(vote) except: print_exc()
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) self.mypermid = str(self.my_keypair.pub().get_der()) self.hispermid = str(self.his_keypair.pub().get_der()) self.myhash = sha(self.mypermid).digest() # Give Tribler some download history print >>sys.stderr,"test: Populating MYPREFERENCES table" self.myprefdb = self.session.open_dbhandler(NTFY_MYPREFERENCES) data = {'destination_path':'.'} infohashes = self.create_good_my_prefs(self,btconn.current_version) for i in range(0,len(infohashes)): commit = (i == len(infohashes)-1) self.myprefdb.addMyPreference(infohashes[i], data, commit=commit) # Give Tribler some peers print >>sys.stderr,"test: Populating PEERS table" self.peerdb = self.session.open_dbhandler(NTFY_PEERS) past = int(time.time())-1000000000 peers = self.create_good_random_peers(btconn.current_version,num=200) peers = [] for i in range(0,len(peers)): peer = peers[i] peer.update({'last_seen':past, 'last_connected':past}) del peer['connect_time'] peer['num_torrents'] = peer['nfiles'] del peer['nfiles'] commit = (i == len(peers)-1) self.peerdb.addPeer(peer['permid'], peer, update_dns=True, update_connected=True, commit=commit)
def check_response1(self,resp1_data,rB,myid): resp1 = bdecode(resp1_data) self.assert_(type(resp1) == DictType) self.assert_(resp1.has_key('certA')) self.assert_(resp1.has_key('rA')) self.assert_(resp1.has_key('B')) self.assert_(resp1.has_key('SA')) # show throw exception when key no good pubA = EC.pub_key_from_der(resp1['certA']) rA = resp1['rA'] self.assert_(type(rA) == StringType) self.assert_(len(rA) == random_size) B = resp1['B'] self.assert_(type(B) == StringType) self.assert_(B,myid) SA = resp1['SA'] self.assert_(type(SA) == StringType) # verify signature sig_list = [rA,rB,myid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() self.assert_(pubA.verify_dsa_asn1(sig_hash,SA)) # Cannot resign the data with his keypair to double check. Signing # appears to yield different, supposedly valid sigs each time. return resp1
def convert__postsession_4_1__4_2(self, session, default_download_config): # the mugshot was stored in icons/<hash>.jpg # however... what is the permid??? safename = "%s.jpg" % sha(session.get_permid()).hexdigest() safepath = os.path.join(self.dir_root, "icons", safename) if os.path.exists(safepath): session.set_mugshot(open(safepath, "r").read(), "image/jpeg") os.remove(safepath) bool_ = lambda x: x == "1" and True or False self._convert__helper_4_1__4_2(self.config, default_download_config.set_alloc_rate, "alloc_rate", int) self._convert__helper_4_1__4_2(self.config, default_download_config.set_alloc_type, "alloc_type") self._convert__helper_4_1__4_2(self.config, default_download_config.set_dest_dir, "defaultfolder") self._convert__helper_4_1__4_2( self.config, default_download_config.set_double_check_writes, "double_check", bool_ ) self._convert__helper_4_1__4_2(self.config, default_download_config.set_lock_files, "lock_files", bool_) self._convert__helper_4_1__4_2( self.config, default_download_config.set_lock_while_reading, "lock_while_reading", bool_ ) self._convert__helper_4_1__4_2(self.config, default_download_config.set_max_conns, "max_connections", int) self._convert__helper_4_1__4_2(self.config, default_download_config.set_max_files_open, "max_files_open", int) self._convert__helper_4_1__4_2( self.config, default_download_config.set_triple_check_writes, "trible_check", bool_ )
def fill_tree(tree, height, npieces, hashes): # 1. Fill bottom of tree with hashes startoffset = int(pow(2, height) - 1) if DEBUG: print >>sys.stderr, "merkle: bottom of tree starts at", startoffset for offset in range(startoffset, startoffset + npieces): # print >> sys.stderr,"merkle: copying",offset # print >> sys.stderr,"merkle: hashes[",offset-startoffset,"]=",str(hashes[offset-startoffset]) tree[offset] = hashes[offset - startoffset] # 2. Note that unused leaves are NOT filled. It may be a good idea to fill # them as hashing 0 values may create a security problem. However, the # filler values would have to be known to any initial seeder, otherwise it # will not be able build the same hash tree as the other initial seeders. # Assume anyone should be able to autonomously become a seeder, the filler # must be public info. I don't know whether having public info as filler # instead of 0s is any safer, cryptographically speaking. Hence, we stick # with 0 for the moment # 3. Calculate higher level hashes from leaves for level in range(height, 0, -1): if DEBUG: print >>sys.stderr, "merkle: calculating level", level for offset in range(int(pow(2, level) - 1), int(pow(2, level + 1) - 2), 2): # print >> sys.stderr,"merkle: data offset",offset [parentstartoffset, parentoffset] = get_parent_offset(offset, level) # print >> sys.stderr,"merkle: parent offset",parentoffset data = tree[offset] + tree[offset + 1] digester = sha() digester.update(data) digest = digester.digest() tree[parentoffset] = digest # for offset in range(0,treesize-1): # print offset,"HASH",str(tree[offset]) return tree
def check_response1(self, resp1_data, rB, myid): resp1 = bdecode(resp1_data) self.assert_(type(resp1) == DictType) self.assert_(resp1.has_key('certA')) self.assert_(resp1.has_key('rA')) self.assert_(resp1.has_key('B')) self.assert_(resp1.has_key('SA')) # show throw exception when key no good pubA = EC.pub_key_from_der(resp1['certA']) rA = resp1['rA'] self.assert_(type(rA) == StringType) self.assert_(len(rA) == random_size) B = resp1['B'] self.assert_(type(B) == StringType) self.assert_(B, myid) SA = resp1['SA'] self.assert_(type(SA) == StringType) # verify signature sig_list = [rA, rB, myid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() self.assert_(pubA.verify_dsa_asn1(sig_hash, SA)) # Cannot resign the data with his keypair to double check. Signing # appears to yield different, supposedly valid sigs each time. return resp1
def save_torrent(self, infohash, metadata, source='BC', extra_info={}): # check if disk is full before save it to disk and database if not self.initialized: return None self.check_overflow() if self.min_free_space != 0 and (self.free_space - len(metadata) < self.min_free_space or self.num_collected_torrents % 10 == 0): self.free_space = self.get_free_space() if self.free_space - len(metadata) < self.min_free_space: self.warn_disk_full() return None file_name = get_collected_torrent_filename(infohash) if DEBUG: print >> sys.stderr,time.asctime(),'-', "metadata: Storing torrent", sha(infohash).hexdigest(),"in",file_name save_path = self.write_torrent(metadata, self.torrent_dir, file_name) if save_path: self.num_collected_torrents += 1 self.free_space -= len(metadata) self.addTorrentToDB(save_path, infohash, metadata, source=source, extra_info=extra_info) # check if space is enough and remove old torrents return file_name
def _create(metainfo): # TODO: replace with constructor # raises ValueErrors if not good validTorrentFile(metainfo) t = TorrentDef() t.metainfo = metainfo t.metainfo_valid = True # copy stuff into self.input maketorrent.copy_metainfo_to_input(t.metainfo, t.input) # For testing EXISTING LIVE, or EXISTING MERKLE: DISABLE, i.e. keep true infohash if t.get_url_compat(): t.infohash = makeurl.metainfo2swarmid(t.metainfo) else: # Two places where infohash calculated, here and in maketorrent.py # Elsewhere: must use TorrentDef.get_infohash() to allow P2PURLs. t.infohash = sha(bencode(metainfo['info'])).digest() assert isinstance( t.infohash, str), "INFOHASH has invalid type: %s" % type(t.infohash) assert len( t.infohash ) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len( t.infohash) #print >>sys.stderr,"INFOHASH",`t.infohash` return t
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) self.mypermid = str(self.my_keypair.pub().get_der()) self.hispermid = str(self.his_keypair.pub().get_der()) self.myhash = sha(self.mypermid).digest()
def save_torrent(self, infohash, metadata, source='BC', extra_info={}): # check if disk is full before save it to disk and database if not self.initialized: return None self.check_overflow() if self.min_free_space != 0 and ( self.free_space - len(metadata) < self.min_free_space or self.num_collected_torrents % 10 == 0): self.free_space = self.get_free_space() if self.free_space - len(metadata) < self.min_free_space: self.warn_disk_full() return None file_name = get_collected_torrent_filename(infohash) if DEBUG: print >> sys.stderr, "metadata: Storing torrent", sha( infohash).hexdigest(), "in", file_name save_path = self.write_torrent(metadata, self.torrent_dir, file_name) if save_path: self.num_collected_torrents += 1 self.free_space -= len(metadata) self.addTorrentToDB(save_path, infohash, metadata, source=source, extra_info=extra_info) # check if space is enough and remove old torrents return file_name
def add_metadata_piece(self, piece, data): """ A metadata piece was received """ if not self._closed: for index, block_tuple in zip(xrange(len(self._metadata_blocks)), self._metadata_blocks): if block_tuple[1] == piece: block_tuple[0] = max(0, block_tuple[0] - 1) block_tuple[2] = data self._metadata_blocks.sort() break # def p(s): # if s is None: return 0 # return len(s) # if DEBUG: print >> sys.stderr, "Progress:", [p(t[2]) for t in self._metadata_blocks] # see if we are done for requested, piece, data in self._metadata_blocks: if data is None: break else: # _metadata_blocks is sorted by requested count. we need to sort it by piece-id metadata_blocks = [(piece, data) for _, piece, data in self._metadata_blocks] metadata_blocks.sort() metadata = "".join([data for _, data in metadata_blocks]) info_hash = sha(metadata).digest() if info_hash == self._info_hash: if DEBUG: print >> sys.stderr, "MiniBitTorrent.add_metadata_piece() Done!" # get nice list with recent BitTorrent peers, sorted # by most recently connected peers = [ (timestamp, address) for address, timestamp in self._good_peers.iteritems() ] peers.sort(reverse=True) peers = [address for _, address in peers] self._callback(bdecode(metadata), peers) else: # for piece, data in metadata_blocks: # open("failed-hash-{0}.data".format(piece), "w+").write(data) # todo: hash failed... now what? # quick solution... remove everything and try again if DEBUG: print >> sys.stderr, "MiniBitTorrent.add_metadata_piece() Failed hashcheck! Restarting all over again :(" self._metadata_blocks = [[ requested, piece, None ] for requested, piece, data in self._metadata_blocks]
def create_torrent_signature(metainfo, keypairfilename): keypair = EC.load_key(keypairfilename) bmetainfo = bencode(metainfo) digester = sha(bmetainfo[:]) digest = digester.digest() sigstr = keypair.sign_dsa_asn1(digest) metainfo['signature'] = sigstr metainfo['signer'] = str(keypair.pub().get_der())
def create_torrent_signature(metainfo,keypairfilename): keypair = EC.load_key(keypairfilename) bmetainfo = bencode(metainfo) digester = sha(bmetainfo[:]) digest = digester.digest() sigstr = keypair.sign_dsa_asn1(digest) metainfo['signature'] = sigstr metainfo['signer'] = str(keypair.pub().get_der())
def verify(self,piece,index): """ A piece is valid if: - the signature is correct, - the seqnum % npieces == piecenr. - the seqnum is no older than self.seqnum - npieces @param piece The piece data as received from peer @param index The piece number as received from peer @return Boolean """ try: # Can we do this without memcpy? #print >>sys.stderr,"ECDSAAuth: verify",len(piece) extra = piece[-self.OUR_SIGSIZE:-self.OUR_SIGSIZE+self.EXTRA_SIZE] lensig = ord(piece[-self.OUR_SIGSIZE+self.EXTRA_SIZE]) if lensig > self.MAX_ECDSA_ASN1_SIGSIZE: print >>sys.stderr,"ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece",index,"lensig wrong",lensig return False #print >>sys.stderr,"ECDSAAuth: verify lensig",lensig diff = lensig-self.MAX_ECDSA_ASN1_SIGSIZE if diff == 0: sig = piece[-self.OUR_SIGSIZE+self.EXTRA_SIZE+self.LENGTH_SIZE:] else: sig = piece[-self.OUR_SIGSIZE+self.EXTRA_SIZE+self.LENGTH_SIZE:diff] content = piece[:-self.OUR_SIGSIZE] if DEBUG: print >>sys.stderr,"ECDSAAuth: verify piece",index,"sig",`sig` print >>sys.stderr,"ECDSAAuth: verify dig",sha(content).hexdigest() ret = ecdsa_verify_data_pubkeyobj(content,extra,self.pubkey,sig) if ret: (seqnum, rtstamp) = self._decode_extra(piece) if DEBUG: print >>sys.stderr,"ECDSAAuth: verify piece",index,"seq",seqnum,"ts %.5f s" % rtstamp,"ls",lensig mod = seqnum % self.get_npieces() thres = self.seqnum - self.get_npieces()/2 if seqnum <= thres: print >>sys.stderr,"ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece",index,"old seqnum",seqnum,"<<",self.seqnum return False elif mod != index: print >>sys.stderr,"ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece",index,"expected",mod return False elif self.startts is not None and rtstamp < self.startts: print >>sys.stderr,"ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ failed piece",index,"older than oldest known ts",rtstamp,self.startts return False else: self.seqnum = max(self.seqnum,seqnum) if self.startts is None: self.startts = rtstamp-300.0 # minus 5 min in case we read piece N+1 before piece N print >>sys.stderr,"ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@: startts",self.startts else: print >>sys.stderr,"ECDSAAuth: @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ piece",index,"failed sig" return ret except: print_exc() return False
def testGetSubtitlesFileRelativeName(self): #subtitles filenames are build from the sha1 hash #of the triple (channel_id, infohash, langCode) name = getSubtitleFileRelativeName(testChannelId, testInfohash, "rus") hasher = sha() for value in (testChannelId, testInfohash, "rus"): hasher.update(value) self.assertEquals(hasher.hexdigest() + ".srt", name)
def create_resp2_bad_sig_input(self, rB, resp1_dict, hisid): resp2 = {} resp2['certB'] = str(self.my_keypair.pub().get_der()) resp2['A'] = hisid sig_data = '\x00\x00\x00\x00\x00\x30\x00\x00' sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp2['SB'] = sig_asn1 return self.create_response2_payload(resp2)
def create_resp2_bad_sig_input(self,rB,resp1_dict,hisid): resp2 = {} resp2['certB'] = str(self.my_keypair.pub().get_der()) resp2['A'] = hisid sig_data = '\x00\x00\x00\x00\x00\x30\x00\x00' sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp2['SB'] = sig_asn1 return self.create_response2_payload(resp2)
def verify_torrent_signature(metainfo): r = deepcopy(metainfo) signature = r['signature'] signer = r['signer'] del r['signature'] del r['signer'] bmetainfo = bencode(r) digester = sha(bmetainfo[:]) digest = digester.digest() return do_verify_torrent_signature(digest,signature,signer)
def create_resp2_wrong_randomA(self,rB,resp1_dict,hisid): resp2 = {} resp2['certB'] = str(self.my_keypair.pub().get_der()) resp2['A'] = hisid sig_list = [rB,"wrong".zfill(random_size),hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp2['SB'] = sig_asn1 return self.create_response2_payload(resp2)
def rsa_verify_data_pubkeyobj(plaintext,extra,pubkey,sig): digester = sha(plaintext) digester.update(extra) digest = digester.digest() # The type of sig is array.array() at this point (why?), M2Crypto RSA verify # will complain if it is not a string or Unicode object. Check if this is a # memcpy. s = sig.tostring() return pubkey.verify(digest,s)
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) self.mypermid = str(self.my_keypair.pub().get_der()) self.hispermid = str(self.his_keypair.pub().get_der()) self.myhash = sha(self.mypermid).digest() self.buddycast = BuddyCastFactory.getInstance(superpeer=True) self.buddycast.olthread_register(True)
def verify_torrent_signature(metainfo): r = deepcopy(metainfo) signature = r['signature'] signer = r['signer'] del r['signature'] del r['signer'] bmetainfo = bencode(r) digester = sha(bmetainfo[:]) digest = digester.digest() return do_verify_torrent_signature(digest, signature, signer)
def create_good_response2(self,rB,resp1_dict,hisid): resp2 = {} resp2['certB'] = str(self.my_keypair.pub().get_der()) resp2['A'] = hisid sig_list = [rB,resp1_dict['rA'],hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp2['SB'] = sig_asn1 return self.create_response2_payload(resp2)
def create_good_response2(self, rB, resp1_dict, hisid): resp2 = {} resp2['certB'] = str(self.my_keypair.pub().get_der()) resp2['A'] = hisid sig_list = [rB, resp1_dict['rA'], hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp2['SB'] = sig_asn1 return self.create_response2_payload(resp2)
def create_resp2_wrong_randomA(self, rB, resp1_dict, hisid): resp2 = {} resp2['certB'] = str(self.my_keypair.pub().get_der()) resp2['A'] = hisid sig_list = [rB, "wrong".zfill(random_size), hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp2['SB'] = sig_asn1 return self.create_response2_payload(resp2)
def create_good_response1(self, rB, hisid): resp1 = {} resp1['certA'] = str(self.my_keypair.pub().get_der()) resp1['rA'] = "".zfill(cr_random_size) resp1['B'] = hisid sig_list = [resp1['rA'], rB, hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp1['SA'] = sig_asn1 return [resp1['rA'], self.create_response1_payload(resp1)]
def create_bad_resp1_bad_cert(self, rB, hisid): resp1 = {} resp1['certA'] = '\x00\x00\x00\x00\x00\x30\x00\x00' resp1['rA'] = "".zfill(random_size) resp1['B'] = hisid sig_list = [resp1['rA'], rB, hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp1['SA'] = sig_asn1 return [resp1['rA'], self.create_response1_payload(resp1)]
def create_good_response1(self,rB,hisid): resp1 = {} resp1['certA'] = str(self.my_keypair.pub().get_der()) resp1['rA'] = "".zfill(cr_random_size) resp1['B'] = hisid sig_list = [resp1['rA'],rB,hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp1['SA'] = sig_asn1 return [resp1['rA'],self.create_response1_payload(resp1)]
def create_bad_resp1_too_short_randomA(self, rB, hisid): resp1 = {} resp1['certA'] = str(self.my_keypair.pub().get_der()) resp1['rA'] = '\x00\x00\x00\x00\x00\x30\x00\x00' resp1['B'] = hisid sig_list = [resp1['rA'], rB, hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp1['SA'] = sig_asn1 return [resp1['rA'], self.create_response1_payload(resp1)]
def create_bad_resp1_bad_cert(self,rB,hisid): resp1 = {} resp1['certA'] = '\x00\x00\x00\x00\x00\x30\x00\x00' resp1['rA'] = "".zfill(random_size) resp1['B'] = hisid sig_list = [resp1['rA'],rB,hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp1['SA'] = sig_asn1 return [resp1['rA'],self.create_response1_payload(resp1)]
def create_bad_resp1_too_short_randomA(self,rB,hisid): resp1 = {} resp1['certA'] = str(self.my_keypair.pub().get_der()) resp1['rA'] = '\x00\x00\x00\x00\x00\x30\x00\x00' resp1['B'] = hisid sig_list = [resp1['rA'],rB,hisid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() sig_asn1 = str(self.my_keypair.sign_dsa_asn1(sig_hash)) resp1['SA'] = sig_asn1 return [resp1['rA'],self.create_response1_payload(resp1)]
def pubkey2swarmid(livedict): """ Calculate SHA1 of pubkey (or cert). Make X.509 Subject Key Identifier compatible? """ if DEBUG: print >>sys.stderr,"pubkey2swarmid:",livedict.keys() if livedict['authmethod'] == "None": # No live-source auth return Rand.rand_bytes(20) else: return sha(livedict['pubkey']).digest()
def pubkey2swarmid(livedict): """ Calculate SHA1 of pubkey (or cert). Make X.509 Subject Key Identifier compatible? """ if DEBUG: print >> sys.stderr, "pubkey2swarmid:", livedict.keys() if livedict['authmethod'] == "None": # No live-source auth return Rand.rand_bytes(20) else: return sha(livedict['pubkey']).digest()
def add_metadata_piece(self, piece, data): """ A metadata piece was received """ if not self._closed: for index, block_tuple in zip(xrange(len(self._metadata_blocks)), self._metadata_blocks): if block_tuple[1] == piece: block_tuple[0] = max(0, block_tuple[0] - 1) block_tuple[2] = data self._metadata_blocks.sort() break # def p(s): # if s is None: return 0 # return len(s) # if DEBUG: print >> sys.stderr, "Progress:", [p(t[2]) for t in self._metadata_blocks] progress = sum([1 if t[2] else 0 for t in self._metadata_blocks])/float(len(self._metadata_blocks)) self._notifier.notify(NTFY_TORRENTS, NTFY_MAGNET_PROGRESS, self._info_hash, progress) # see if we are done for requested, piece, data in self._metadata_blocks: if data is None: break else: # _metadata_blocks is sorted by requested count. we need to sort it by piece-id metadata_blocks = [(piece, data) for _, piece, data in self._metadata_blocks] metadata_blocks.sort() metadata = "".join([data for _, data in metadata_blocks]) info_hash = sha(metadata).digest() if info_hash == self._info_hash: if DEBUG: print >> sys.stderr, "MiniBitTorrent.add_metadata_piece() Done!" # get nice list with recent BitTorrent peers, sorted # by most recently connected peers = [(timestamp, address) for address, timestamp in self._good_peers.iteritems()] peers.sort(reverse=True) peers = [address for _, address in peers] self._callback(bdecode(metadata), peers) else: # for piece, data in metadata_blocks: # open("failed-hash-{0}.data".format(piece), "w+").write(data) # todo: hash failed... now what? # quick solution... remove everything and try again if DEBUG: print >> sys.stderr, "MiniBitTorrent.add_metadata_piece() Failed hashcheck! Restarting all over again :(" self._metadata_blocks = [[requested, piece, None] for requested, piece, data in self._metadata_blocks]
def getSubtitleFileRelativeName(channel_id, infohash, langCode): # subtitles filenames are build from the sha1 hash # of the triple (channel_id, infohash, langCode) # channel_id and infohash are binary versions assert utilities.validPermid(channel_id), "Invalid channel_id %s" % utilities.show_permid_short(channel_id) assert utilities.validInfohash(infohash), "Invalid infohash %s" % bin2str(infohash) assert LanguagesProvider.getLanguagesInstance().isLangCodeSupported(langCode), "Unsupported language code %s" % langCode hasher = sha() for data in (channel_id, infohash, langCode): hasher.update(data) subtitleName = hasher.hexdigest() + SUBS_EXTENSION return subtitleName
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) self.my_permid = str(self.my_keypair.pub().get_der()) self.my_hash = sha(self.my_permid).digest() self.his_permid = str(self.his_keypair.pub().get_der()) # Start our server side, to with Tribler will try to connect self.listen_port = 4123 self.listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) self.listen_socket.bind(("", self.listen_port)) self.listen_socket.listen(10) self.listen_socket.settimeout(10)
def check_fork(a, b, level): myoffset = a[0] siblingoffset = b[0] if myoffset > siblingoffset: data = b[1] + a[1] if DEBUG: print >> sys.stderr, "merkle: combining", siblingoffset, myoffset else: data = a[1] + b[1] if DEBUG: print >> sys.stderr, "merkle: combining", myoffset, siblingoffset digester = sha() digester.update(data) digest = digester.digest() [parentstartoffset, parentoffset] = get_parent_offset(myoffset, level - 1) return [parentoffset, digest]
def check_fork(a, b, level): myoffset = a[0] siblingoffset = b[0] if myoffset > siblingoffset: data = b[1] + a[1] if DEBUG: print >>sys.stderr, "merkle: combining", siblingoffset, myoffset else: data = a[1] + b[1] if DEBUG: print >>sys.stderr, "merkle: combining", myoffset, siblingoffset digester = sha() digester.update(data) digest = digester.digest() [parentstartoffset, parentoffset] = get_parent_offset(myoffset, level - 1) return [parentoffset, digest]
def check_response2(self,resp2_data,rA,rB,myid): resp2 = bdecode(resp2_data) self.testcase.assert_(type(resp2) == DictType) self.testcase.assert_(resp2.has_key('certB')) self.testcase.assert_(resp2.has_key('A')) self.testcase.assert_(resp2.has_key('SB')) # show throw exception when key no good pubB = EC.pub_key_from_der(resp2['certB']) A = resp2['A'] self.testcase.assert_(type(A) == StringType) self.testcase.assert_(A,myid) SB = resp2['SB'] self.testcase.assert_(type(SB) == StringType) # verify signature sig_list = [rB,rA,myid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() self.testcase.assert_(pubB.verify_dsa_asn1(sig_hash,SB))
def check_response2(self, resp2_data, rA, rB, myid): resp2 = bdecode(resp2_data) self.testcase.assert_(type(resp2) == DictType) self.testcase.assert_(resp2.has_key('certB')) self.testcase.assert_(resp2.has_key('A')) self.testcase.assert_(resp2.has_key('SB')) # show throw exception when key no good pubB = EC.pub_key_from_der(resp2['certB']) A = resp2['A'] self.testcase.assert_(type(A) == StringType) self.testcase.assert_(A, myid) SB = resp2['SB'] self.testcase.assert_(type(SB) == StringType) # verify signature sig_list = [rB, rA, myid] sig_data = bencode(sig_list) sig_hash = sha(sig_data).digest() self.testcase.assert_(pubB.verify_dsa_asn1(sig_hash, SB))
def convert_PeerDB(self, limit=0): print >>sys.stderr, time.asctime(),'-', "convert_PeerDB" peer_db = PeerDB.getInstance(self.bsddb_dir) npeers = 0 for permid,db_data in peer_db._data.iteritems(): data = { 'ip':None, 'port':None, 'name':None, 'last_seen':0, 'similarity':0, 'connected_times':0, 'oversion':0, # overlay version 'buddycast_times':0, 'last_buddycast_time':0, 'thumbnail':None, 'npeers':0, 'ntorrents':0, 'nprefs':0, 'nqueries':0, 'last_connected':0, 'friend':0, 'superpeer':0, } data.update(db_data) iconfilename = sha(permid).hexdigest() if iconfilename in self.icons: icon_str = self.readIcon(iconfilename) data['thumbnail'] = icon_str icon_path = os.path.join(self.icon_dir, iconfilename + '.jpg') if os.path.isfile(icon_path): print >> sys.stderr, time.asctime(),'-', 'remove', icon_path os.remove(icon_path) self._addPeerToDB(permid, data) npeers += 1 self.permid_id[permid] = npeers if limit and npeers >= limit: break #nconnpeers = self._fetchone('select count(*) from Peer where connected_times>0;') #print "npeers", npeers, nconnpeers self._commit()
def convert__postsession_4_1__4_2(self, session, default_download_config): # the mugshot was stored in icons/<hash>.jpg # however... what is the permid??? safename = "%s.jpg" % sha(session.get_permid()).hexdigest() safepath = os.path.join(self.dir_root, "icons", safename) if os.path.exists(safepath): session.set_mugshot(open(safepath, "r").read(), "image/jpeg") os.remove(safepath) bool_ = lambda x: x=="1" and True or False self._convert__helper_4_1__4_2(self.config, default_download_config.set_alloc_rate, "alloc_rate", int) self._convert__helper_4_1__4_2(self.config, default_download_config.set_alloc_type, "alloc_type") self._convert__helper_4_1__4_2(self.config, default_download_config.set_dest_dir, "defaultfolder") self._convert__helper_4_1__4_2(self.config, default_download_config.set_double_check_writes, "double_check", bool_) self._convert__helper_4_1__4_2(self.config, default_download_config.set_lock_files, "lock_files", bool_) self._convert__helper_4_1__4_2(self.config, default_download_config.set_lock_while_reading, "lock_while_reading", bool_) self._convert__helper_4_1__4_2(self.config, default_download_config.set_max_conns, "max_connections", int) self._convert__helper_4_1__4_2(self.config, default_download_config.set_max_files_open, "max_files_open", int) self._convert__helper_4_1__4_2(self.config, default_download_config.set_triple_check_writes, "trible_check", bool_)
def _create(metainfo): # TODO: replace with constructor # raises ValueErrors if not good validTorrentFile(metainfo) t = TorrentDef() t.metainfo = metainfo t.metainfo_valid = True # copy stuff into self.input maketorrent.copy_metainfo_to_input(t.metainfo, t.input) # For testing EXISTING LIVE, or EXISTING MERKLE: DISABLE, i.e. keep true infohash if t.get_url_compat(): t.infohash = makeurl.metainfo2swarmid(t.metainfo) else: # Two places where infohash calculated, here and in maketorrent.py # Elsewhere: must use TorrentDef.get_infohash() to allow P2PURLs. t.infohash = sha(bencode(metainfo["info"])).digest() # print >>sys.stderr,time.asctime(),'-', "INFOHASH",`t.infohash` return t
def setUpPostSession(self): """ override TestAsServer """ TestAsServer.setUpPostSession(self) self.mypermid = str(self.my_keypair.pub().get_der()) self.hispermid = str(self.his_keypair.pub().get_der()) self.myhash = sha(self.mypermid).digest() # Give Tribler some download history print >> sys.stderr, "test: Populating MYPREFERENCES table" self.myprefdb = self.session.open_dbhandler(NTFY_MYPREFERENCES) data = {'destination_path': '.'} infohashes = self.create_good_my_prefs(self, btconn.current_version) for i in range(0, len(infohashes)): commit = (i == len(infohashes) - 1) self.myprefdb.addMyPreference(infohashes[i], data, commit=commit) # Give Tribler some peers print >> sys.stderr, "test: Populating PEERS table" self.peerdb = self.session.open_dbhandler(NTFY_PEERS) past = int(time.time()) - 1000000000 peers = self.create_good_random_peers(btconn.current_version, num=200) peers = [] for i in range(0, len(peers)): peer = peers[i] peer.update({'last_seen': past, 'last_connected': past}) del peer['connect_time'] peer['num_torrents'] = peer['nfiles'] del peer['nfiles'] commit = (i == len(peers) - 1) self.peerdb.addPeer(peer['permid'], peer, update_dns=True, update_connected=True, commit=commit)
def fill_tree(tree, height, npieces, hashes): # 1. Fill bottom of tree with hashes startoffset = int(pow(2, height) - 1) if DEBUG: print >> sys.stderr, "merkle: bottom of tree starts at", startoffset for offset in range(startoffset, startoffset + npieces): # print >> sys.stderr,"merkle: copying",offset # print >> sys.stderr,"merkle: hashes[",offset-startoffset,"]=",str(hashes[offset-startoffset]) tree[offset] = hashes[offset - startoffset] # 2. Note that unused leaves are NOT filled. It may be a good idea to fill # them as hashing 0 values may create a security problem. However, the # filler values would have to be known to any initial seeder, otherwise it # will not be able build the same hash tree as the other initial seeders. # Assume anyone should be able to autonomously become a seeder, the filler # must be public info. I don't know whether having public info as filler # instead of 0s is any safer, cryptographically speaking. Hence, we stick # with 0 for the moment # 3. Calculate higher level hashes from leaves for level in range(height, 0, -1): if DEBUG: print >> sys.stderr, "merkle: calculating level", level for offset in range(int(pow(2, level) - 1), int(pow(2, level + 1) - 2), 2): # print >> sys.stderr,"merkle: data offset",offset [parentstartoffset, parentoffset] = get_parent_offset(offset, level) # print >> sys.stderr,"merkle: parent offset",parentoffset data = tree[offset] + tree[offset + 1] digester = sha() digester.update(data) digest = digester.digest() tree[parentoffset] = digest # for offset in range(0,treesize-1): # print offset,"HASH",str(tree[offset]) return tree
def _create(metainfo): # TODO: replace with constructor # raises ValueErrors if not good validTorrentFile(metainfo) t = TorrentDef() t.metainfo = metainfo t.metainfo_valid = True # copy stuff into self.input maketorrent.copy_metainfo_to_input(t.metainfo,t.input) # For testing EXISTING LIVE, or EXISTING MERKLE: DISABLE, i.e. keep true infohash if t.get_url_compat(): t.infohash = makeurl.metainfo2swarmid(t.metainfo) else: # Two places where infohash calculated, here and in maketorrent.py # Elsewhere: must use TorrentDef.get_infohash() to allow P2PURLs. t.infohash = sha(bencode(metainfo['info'])).digest() assert isinstance(t.infohash, str), "INFOHASH has invalid type: %s" % type(t.infohash) assert len(t.infohash) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len(t.infohash) #print >>sys.stderr,"INFOHASH",`t.infohash` return t
def verify_data_pubkeyobj(plaintext,pubkey,blob): digest = sha(plaintext).digest() return pubkey.verify_dsa_asn1(digest,blob)
def got_metadata(self, permid, message, selversion): """ receive torrent file from others """ # Arno, 2007-06-20: Disabled the following code. What's this? Somebody sends # us something and we refuse? Also doesn't take into account download help #and remote-query extension. #if self.upload_rate <= 0: # if no upload, no download, that's the game # return True # don't close connection try: message = bdecode(message[1:]) except: print_exc() return False if not isinstance(message, dict): return False try: infohash = message['torrent_hash'] if not isValidInfohash(infohash): # 19/02/10 Boudewijn: isValidInfohash either returns # True or raises a ValueError. So this part of the # code will never be reached... return False assert isinstance( infohash, str), "INFOHASH has invalid type: %s" % type(infohash) assert len( infohash ) == INFOHASH_LENGTH, "INFOHASH has invalid length: %d" % len( infohash) #print >>sys.stderr,"metadata: got_metadata: hexinfohash: get_collected_torrent_filename(infohash) if not infohash in self.requested_torrents: # got a torrent which was not requested return True if self.torrent_db.hasMetaData(infohash): return True # P2PURL goturl = False if selversion >= OLPROTO_VER_ELEVENTH: if 'metatype' in message and message[ 'metatype'] == URL_MIME_TYPE: try: tdef = TorrentDef.load_from_url(message['metadata']) # Internal storage format is still .torrent file metainfo = tdef.get_metainfo() metadata = bencode(metainfo) goturl = True except: print_exc() return False else: metadata = message['metadata'] else: metadata = message['metadata'] if not self.valid_metadata(infohash, metadata): return False if DEBUG: torrent_size = len(metadata) if goturl: mdt = "URL" else: mdt = "torrent" print >> sys.stderr, "metadata: Recvd", mdt, ` infohash `, sha( infohash).hexdigest(), torrent_size extra_info = {} if selversion >= OLPROTO_VER_FOURTH: try: extra_info = { 'leecher': message.get('leecher', -1), 'seeder': message.get('seeder', -1), 'last_check_time': message.get('last_check_time', -1), 'status': message.get('status', 'unknown') } except Exception, msg: print_exc() print >> sys.stderr, "metadata: wrong extra info in msg - ", message extra_info = {} filename = self.save_torrent(infohash, metadata, extra_info=extra_info) self.requested_torrents.remove(infohash) #if DEBUG: # print >>sys.stderr,"metadata: Was I asked to relay for someone", self.proxy_message_handler if filename is not None: self.notify_torrent_is_in(infohash, metadata, filename) # BarterCast: add bytes of torrent to BarterCastDB # Save exchanged KBs in BarterCastDB if permid is not None and BARTERCAST_TORRENTS: self.overlay_bridge.add_task( lambda: self.olthread_bartercast_torrentexchange( permid, 'downloaded'), 0)
def rsa_sign_data(plaintext,extra,rsa_keypair): digester = sha(plaintext) digester.update(extra) digest = digester.digest() return rsa_keypair.sign(digest)
from Tribler.Core.Subtitles.MetadataDomainObjects.Languages import LanguagesProvider import logging import os import unittest import codecs from Tribler.Core.Subtitles.SubtitlesHandler import SubtitlesHandler,\ getSubtitleFileRelativeName from Tribler.Core.Overlay.SecureOverlay import OLPROTO_VER_FOURTEENTH logging.basicConfig(level=logging.DEBUG) _keypairs = (generate_keypair(), generate_keypair(), generate_keypair()) testChannelId = str(_keypairs[0].pub().get_der()) testDestPermId = str(_keypairs[1].pub().get_der()) testMyPermId = str(_keypairs[2].pub().get_der()) testInfohash = sha("yoman!").digest() RES_DIR = os.path.join('..', '..', 'subtitles_test_res') class TestSubtitlesHandler(unittest.TestCase): def setUp(self): self._session = MockSession() self.ol_bridge = MockOverlayBridge() self.rmdDBHandler = MockMetadataDBHandler() self.underTest = SubtitlesHandler() def tearDown(self): self.ol_bridge = None #cleanup the mess in collected dir