def createWSSEToken(self):
   nonce = sha.sha(str(time.time() + random.random())).digest()
   nonce_enc = base64.encodestring(nonce).strip()
   created = datetime.datetime.now().isoformat() + 'Z'
   password_digest = sha.sha(nonce + created + self.password).digest()
   password_digest_enc = base64.encodestring(password_digest).strip()
   self.wsse_token = 'UsernameToken Username="******", PasswordDigest="%s", Nonce="%s", Created="%s"' % (self.username, password_digest_enc, nonce_enc, created)
Пример #2
0
def create_wsse_auth(username, password):
  created = datetime.now().isoformat() + "Z"
  nonce   = b64encode(sha.sha(str(time() + random())).digest())
  digest  = b64encode(sha.sha(nonce + created + password).digest())
  wsse  = 'UsernameToken Username="******", PasswordDigest="%(p)s", Nonce="%(n)s", Created="%(c)s"'
  value = dict(u = username, p = digest, n = nonce, c = created)
  return wsse % value
Пример #3
0
 def agreement(question, nick):
     import sha
     digest = sha.sha(question.lower()).digest() + sha.sha(nick).digest()
     digest = sha.sha(digest).hexdigest()
     seed = int(digest[-8:], 16)
     r = random.Random(seed)
     return r.uniform(0, 100)
Пример #4
0
    def next_block(self):
	if self.level == 0:
           data = self.f.read(BY2BLK)
           if len(data) == 0:
               self.level += 1
           else:
               self.hashes.append(sha.sha(data))
	       sd = sha.sha(data).digest()
	       return data
        if self.level > 0: 
	   hashdata = ""
	   hnum = 0
	   while len(self.hashes) > 0 and hnum < H2BLK:
	       hashdata += self.hashes[0].digest()
	       self.hashes.remove(self.hashes[0])
               hnum += 1
	   self.nexthash.append(sha.sha(hashdata))        
	   if len(self.hashes) == 0 and len(self.nexthash) == 1:
	       self.rootkey = self.nexthash[0].digest()
               self.eoflag = True
           elif len(self.hashes) == 0:
               self.hashes = self.nexthash
	       self.nexthash = []
	       self.level += 1
	   return hashdata
Пример #5
0
    def agreement_scale(self, nicklist, msg):
        question = msg.split("question:")[-1].strip()

        phrase = "The question is \"%s\". " % question
        phrase = "People that agree completely will go to that end of the room "
        phrase += "(100%), people that disagree on the other (0%). "
        self.msg(self.factory.channel, phrase)

        phrase = ""
        agreements = {}
        for nick in nicklist:
            import sha
            digest = sha.sha(question.lower()).digest() + sha.sha(nick).digest()
            digest = sha.sha(digest).digest()
            val = (ord(digest[-1]) + ord(digest[-2])) % 100

            if val not in agreements.keys():
                agreements[val] = []
            agreements[val].append(nick)

        for percent, nicks in agreements.items():
            # Sort them in the right order
            for nick in nicks:
                phrase += "%s %s%%, " % (nick, percent)

        self.msg(self.factory.channel, phrase)
Пример #6
0
def computePackageVerification(g, dirname, excluded):
    # SPDX 1.2 Section 4.7
    # The SPDXTools command "GenerateVerificationCode" can be used to
    # check the verification codes created.  Note that you must manually
    # remove "license.spdx" from the unpacked dist directory before
    # computing the verification code.

    verify_node = BNode()

    hashes = []
    for dirpath, dirnames, filenames in os.walk(dirname):
        for fn in filenames:
            full_fn = os.path.join(dirpath, fn)
            f = open(full_fn, 'rb')
            d = f.read()
            f.close()

            if full_fn in excluded:
                #print('excluded in verification: ' + full_fn)
                continue
            #print('included in verification: ' + full_fn)

            file_sha1 = sha.sha(d).digest().encode('hex').lower()
            hashes.append(file_sha1)

    #print(repr(hashes))
    hashes.sort()
    #print(repr(hashes))
    verify_code = sha.sha(''.join(hashes)).digest().encode('hex').lower()

    for fn in excluded:
        g.add((verify_node, SPDX.packageVerificationCodeExcludedFile, Literal(fn)))
    g.add((verify_node, SPDX.packageVerificationCodeValue, Literal(verify_code)))

    return verify_node
Пример #7
0
def makeX(salt, username, password):
    if len(username)>=256:
        raise ValueError("username too long")
    if len(salt)>=256:
        raise ValueError("salt too long")
    return stringToNumber(sha.sha(salt + sha.sha(username + ":" + password)\
           .digest()).digest())
Пример #8
0
    def got_metadata(self, permid, message, selversion):    
        """ receive torrent file from others """
        
        # Arno, 2007-06-20: Disabled the following code. What's this? Somebody sends 
        # us something and we refuse? Also doesn't take into account download help 
        #and remote-query extension.
        
        #if self.upload_rate <= 0:    # if no upload, no download, that's the game
        #    return True    # don't close connection
        
        try:
            message = bdecode(message[1:])
        except:
            print_exc()
            return False
        if not isinstance(message, dict):
            return False
        try:
            infohash = message['torrent_hash']
            if not isValidInfohash(infohash):
                return False

            if not infohash in self.requested_torrents:    # got a torrent which was not requested
                return True
            if self.torrent_db.hasMetaData(infohash):
                return True
            
            metadata = message['metadata']
            if not self.valid_metadata(infohash, metadata):
                return False
            if DEBUG:
                torrent_size = len(metadata)
                print >> sys.stderr,"metadata: Recvd torrent", `infohash`, sha(infohash).hexdigest(), torrent_size
            
            extra_info = {}
            if selversion >= OLPROTO_VER_FOURTH:
                try:
                    extra_info = {'leecher': message.get('leecher', -1),
                              'seeder': message.get('seeder', -1),
                              'last_check_time': message.get('last_check_time', -1),
                              'status':message.get('status', 'unknown')}
                except Exception, msg:
                    print_exc()
                    print >> sys.stderr, "metadata: wrong extra info in msg - ", message
                    extra_info = {}
                
            filename = self.save_torrent(infohash, metadata, extra_info=extra_info)
            self.requested_torrents.remove(infohash)
            
            if DEBUG:
                print >>sys.stderr,"metadata: Was I asked to dlhelp someone",self.dlhelper

            self.notify_torrent_is_in(infohash,metadata,filename)
            
            
            # BarterCast: add bytes of torrent to BarterCastDB
            # Save exchanged KBs in BarterCastDB
            if permid != None and BARTERCAST_TORRENTS:
                self.overlay_bridge.add_task(lambda:self.olthread_bartercast_torrentexchange(permid, 'downloaded'), 0)
Пример #9
0
def test_total_too_short():
    ds = DummyStorage(4)
    try:
        StorageWrapper(ds, 4, [sha(chr(0xff) * 4).digest(),
            sha(chr(0xFF) * 4).digest()], 4, ds.finished, None)
        raise 'fail'
    except ValueError:
        pass
Пример #10
0
def test_total_too_big():
    ds = DummyStorage(9)
    try:
        sw = StorageWrapper(ds, 4, [sha('qqqq').digest(),
            sha(chr(0xFF) * 4).digest()], 4, ds.finished, None)
        raise 'fail'
    except ValueError:
        pass
Пример #11
0
 def get_signature(self, data, config, signing_string):
     d = data.copy()
     d['merchant_id'] = config.merchant_id
     val1 = signing_string.format(**d)
     hash1 = sha.sha(val1).hexdigest()
     val2 = "{0}.{1}".format(hash1, config.shared_secret)
     hash2 = sha.sha(val2).hexdigest()
     return hash2
Пример #12
0
 def rehash_voca(self, maxw = -1):
     self._dhashvoca_F.clear()
     self._dhashvoca_R.clear()
     for i, x in enumerate(self.voca):
         if len(self._dhashvoca_F) == maxw:
             break
         self._dhashvoca_F[sha.sha(x[0].encode("utf-8")).hexdigest()[:8]] = i
         self._dhashvoca_R[sha.sha(x[2].encode("utf-8")).hexdigest()[:8]] = i
Пример #13
0
 def test_copy(self):
     import sha
     for repeat in [1, 10, 100]:
         d1 = rsha.sha("abc" * repeat)
         d2 = d1.copy()
         d1.update("def" * repeat)
         d2.update("gh" * repeat)
         assert d1.digest() == sha.sha("abc"*repeat+"def"*repeat).digest()
         assert d2.digest() == sha.sha("abc"*repeat+"gh"*repeat).digest()
Пример #14
0
def compare_region(expected_region_file, got_region_file):
    # sha1 test for now, not great since regions can be permuted, but works for now
    f = open(expected_region_file, 'r')
    expected_sha1 = sha.sha(f.read()).hexdigest()
    f.close()
    f = open(got_region_file, 'r')
    got_sha1 = sha.sha(f.read()).hexdigest()
    f.close()
    return expected_sha1 == got_sha1
Пример #15
0
def user_preferences(request, template_name='accounts/prefs.html'):
    redirect_to = request.REQUEST.get(REDIRECT_FIELD_NAME,
                                      reverse("dashboard"))

    profile, profile_is_new = \
        Profile.objects.get_or_create(user=request.user)
    must_configure = not profile.first_time_setup_done
    profile.save()

    siteconfig = SiteConfiguration.objects.get_current()
    auth_backend = siteconfig.get("auth_backend")
    can_change_password = auth_backend in ['builtin', 'x509']

    if request.POST:
        form = PreferencesForm(request.POST)

        if form.is_valid():
            password = form.cleaned_data['password1']

            if can_change_password and password:
                salt = sha(str(time.time())).hexdigest()[:5]
                hash = sha(salt + password)
                newpassword = '******' % (salt, hash.hexdigest())
                request.user.password = newpassword

            if auth_backend == "builtin":
                request.user.first_name = form.cleaned_data['first_name']
                request.user.last_name = form.cleaned_data['last_name']
                request.user.email = form.cleaned_data['email']

            request.user.review_groups = form.cleaned_data['groups']
            request.user.save()

            profile.first_time_setup_done = True
            profile.syntax_highlighting = \
                form.cleaned_data['syntax_highlighting']
            profile.save()

            return HttpResponseRedirect(redirect_to)
    else:
        form = PreferencesForm({
            'settings': settings,
            'redirect_to': redirect_to,
            'first_name': request.user.first_name,
            'last_name': request.user.last_name,
            'email': request.user.email,
            'syntax_highlighting': profile.syntax_highlighting,
            'groups': [g.id for g in request.user.review_groups.all()],
        })

    return render_to_response(template_name, RequestContext(request, {
        'form': form,
        'settings': settings,
        'can_change_password': can_change_password,
        'must_configure': must_configure,
    }))
Пример #16
0
 def check_c14n_exc(self):
     """http://www.w3.org/TR/xml-exc-c14n/
     """
     s = StringIO()
     Canonicalize(self.el, s, unsuppressedPrefixes=[])
     cxml = s.getvalue()
     d1 = base64.encodestring(sha.sha(C14N_EXCL1).digest()).strip()
     d2 = base64.encodestring(sha.sha(cxml).digest()).strip()
     self.assertEqual(d1, C14N_EXCL1_DIGEST)
     self.assertEqual(d1, d2)
def IncrementalOTA_VerifyEnd(info):
  target_radio_img = FindRadio(info.target_zip)
  source_radio_img = FindRadio(info.source_zip)
  if not target_radio_img or not source_radio_img: return
  if source_radio_img != target_radio_img:
    info.script.CacheFreeSpaceCheck(len(source_radio_img))
    radio_type, radio_device = common.GetTypeAndDevice("/radio", info.info_dict)
    info.script.PatchCheck("%s:%s:%d:%s:%d:%s" % (
        radio_type, radio_device,
        len(source_radio_img), sha.sha(source_radio_img).hexdigest(),
        len(target_radio_img), sha.sha(target_radio_img).hexdigest()))
def create_wsse_token(username, password):
  created = re.sub(u"\.\d+$", "", datetime.datetime.now().isoformat()) + "Z"
  nonce   = sha.sha(str(time.time() + random.random())).digest()
  digest  = sha.sha(nonce + created + password).digest()

  nonce64  = base64.b64encode(nonce)
  digest64 = base64.b64encode(digest)

  format = 'UsernameToken Username="******", PasswordDigest="%(p)s", Nonce="%(n)s", Created="%(c)s"'
  value  = dict(u = username, p = digest64, n = nonce64, c = created)
  return format % value
Пример #19
0
    def testOneWayBlast(self, num = 2**12):
        a = self.a
        b = self.b
        
        import sha
        
        
        for i in xrange(num):
            a.omsgq.append(sha.sha(`i`).digest())
        runTillEmpty(a, b, noisy=self.noisy)

        self.assertEqual(len(b.protocol.q), num)
Пример #20
0
def doit(orig, mod, patchpath):
    origfiles = set([fn for fn in doscan(orig) if not os.path.isfile(os.path.join(mod, fn) + ".alwaysreplace")])
    modfiles = doscan(mod)
    commonfiles = origfiles.intersection(modfiles)
    newfiles = modfiles.difference(origfiles)
    filesnotneeded = origfiles.difference(modfiles)
    directories = set([os.path.dirname(entry) for entry in modfiles])

    try:
        os.mkdir(patchpath)
    except:
        print "Unable to create %s" % patchpath
        sys.exit(1)

    print "Checking for differences in %d common files, can take a while." % len(commonfiles)
    commonfiles = [cf for cf in commonfiles if (ourfcmp(cf, os.path.join(orig, cf), os.path.join(mod, cf), False) == False)]
    print "%d file(s) need patching." % len(commonfiles)

    f = open(os.path.join(patchpath, "modfiles.txt"), "wb")
    for fnn in modfiles:
        fnn_hash = sha.sha(open(os.path.join(mod, fnn), "rb").read()).hexdigest()
        f.write("%s %s\n" % (fnn, fnn_hash))
    f.close()

    f = open(os.path.join(patchpath, "tobepatched.txt"), "wb")
    for fnn in commonfiles:
        fnn_hash = sha.sha(open(os.path.join(mod, fnn), "rb").read()).hexdigest()
        f.write("%s %s\n" % (fnn, fnn_hash))
    f.close()

    f = open(os.path.join(patchpath, "newfiles.txt"), "wb")
    for fnn in newfiles:
        fnn_hash = sha.sha(open(os.path.join(mod, fnn), "rb").read()).hexdigest()
        f.write("%s %s\n" % (fnn, fnn_hash))
    f.close()

    f = open(os.path.join(patchpath, "directories.txt"), "wb")
    for fnn in directories:
        f.write(fnn)
        f.write("\n")
    f.close()

    print "Creating PATCHES for the other files"
    for pf in commonfiles:
        h = sha.sha(open(os.path.join(orig, pf), "rb").read()).hexdigest()
        mkpatch(os.path.join(orig, pf), os.path.join(mod, pf), os.path.join(patchpath, h + ".patch"))

    print "Copying NEW files to %s" % patchpath
    for nf in newfiles:
        nf_hash = sha.sha(open(os.path.join(mod, nf), 'rb').read()).hexdigest()
        print "%s => %s" % (nf, nf_hash)
        docopy(os.path.join(mod, nf), os.path.join(patchpath, nf_hash) + ".new")
Пример #21
0
 def check_c14n_exc2_unsuppressed(self):
     """http://www.w3.org/TR/xml-exc-c14n/
     The method of canonicalization described in this specification receives 
     an InclusiveNamespaces PrefixList parameter, which lists namespace prefixes 
     that are handled in the manner described by the Canonical XML Recommendation 
     """
     s = StringIO()
     Canonicalize(self.el, s, unsuppressedPrefixes=['xsi', 'xsd'])
     cxml = s.getvalue()
     d1 = base64.encodestring(sha.sha(C14N_EXCL2).digest()).strip()
     d2 = base64.encodestring(sha.sha(cxml).digest()).strip()
     self.assertEqual(d1, C14N_EXCL2_DIGEST)
     self.assertEqual(d1, d2)
Пример #22
0
 def set_skey(self, SKEY):
     if not self.block3b:
         self.block3b = self._gen_block3b(SKEY)
     crypta = ARC4.new(sha("keyA" + self.S + SKEY).digest())
     cryptb = ARC4.new(sha("keyB" + self.S + SKEY).digest())
     if self.initiator:
         self.encrypt = crypta.encrypt
         self.decrypt = cryptb.decrypt
     else:
         self.encrypt = cryptb.encrypt
         self.decrypt = crypta.decrypt
     self.encrypt("x" * 1024)  # discard first 1024 bytes
     self.decrypt("x" * 1024)
Пример #23
0
def test_last_piece_not_pre():
    ds = DummyStorage(51, ranges = [(50, 1)])
    sw = StorageWrapper(ds, 2, [sha('aa').digest()] * 25 + [sha('b').digest()], 2, ds.finished, None)
    for i in xrange(25):
        assert sw.new_request(i) == (0, 2)
    assert sw.new_request(25) == (0, 1)
    sw.piece_came_in(25, 0, 'b')
    r = range(25)
    shuffle(r)
    for i in r:
        sw.piece_came_in(i, 0, 'aa')
    assert ds.done
    assert ds.s == 'a' * 50 + 'b'
Пример #24
0
 def set_skey(self, SKEY):
     if not self.block3b:
         self.block3b = self._gen_block3b(SKEY)
     crypta = RC4.RC4(sha('keyA'+self.S+SKEY).digest())
     cryptb = RC4.RC4(sha('keyB'+self.S+SKEY).digest())
     if self.initiator:
         self.encrypt = crypta.update
         self.decrypt = cryptb.update
     else:
         self.encrypt = cryptb.update
         self.decrypt = crypta.update
     self.encrypt('x'*1024)  # discard first 1024 bytes
     self.decrypt('x'*1024)
def IncrementalOTA_VerifyEnd(info):
  try:
    target_radio_img = info.target_zip.read("RADIO/radio.img")
    source_radio_img = info.source_zip.read("RADIO/radio.img")
  except KeyError:
    pass
  else:
    if source_radio_img != target_radio_img:
      info.script.CacheFreeSpaceCheck(len(source_radio_img))
      radio_type, radio_device = common.GetTypeAndDevice("/radio", info.info_dict)
      info.script.PatchCheck("%s:%s:%d:%s:%d:%s" % (
          radio_type, radio_device,
          len(source_radio_img), sha.sha(source_radio_img).hexdigest(),
          len(target_radio_img), sha.sha(target_radio_img).hexdigest()))
Пример #26
0
    def authenticateIncomingMsg(self, authKey, authParameters, wholeMsg):        
        # 7.3.2.1 & 2
        if len(authParameters) != 12:
            raise error.StatusInformation(
                errorIndication='authenticationError'
                )

        # 7.3.2.3
        l = string.find(wholeMsg, str(authParameters))
        if l == -1:
            raise error.ProtocolError('Cant locate digest in wholeMsg')
        wholeHead = wholeMsg[:l]
        wholeTail = wholeMsg[l+12:]
        authenticatedWholeMsg = '%s%s%s' % (
            wholeHead, _twelveZeros, wholeTail
            )

        # 7.3.2.4a
        extendedAuthKey = map(ord, str(authKey) + _fortyFourZeros)

        # 7.3.2.4b --> noop
        
        # 7.3.2.4c
        k1 = string.join(
            map(lambda x,y: chr(x^y), extendedAuthKey, self.__ipad), ''
            )

        # 7.3.2.4d --> noop

        # 7.3.2.4e
        k2 = string.join(
            map(lambda x,y: chr(x^y), extendedAuthKey, self.__opad), ''
            )

        # 7.3.2.5a
        d1 = sha.sha(k1+authenticatedWholeMsg).digest()

        # 7.3.2.5b
        d2 = sha.sha(k2+d1).digest()
        
        # 7.3.2.5c
        mac = d2[:12]
         
        # 7.3.2.6
        if mac != authParameters:
            raise error.StatusInformation(
                errorIndication='authenticationFailure'
                )
        
        return authenticatedWholeMsg
Пример #27
0
	def __handlecookies(self):
		if "action" in self.form.keys() and self.form['action'].value == "login":
			if self.form['username'].value != self.username or sha.sha(self.form['password'].value).hexdigest() != self.password:
				self.__dumpheader()
				self.__dumplogin("wrong password! (%s)<br />" % sha.sha(self.form['password'].value).hexdigest())
				self.__dumpfooter()
				sys.exit(0)
			self.dict = {}
			self.dict['username'] = self.form['username'].value
			self.dict['password'] = self.form['password'].value
			self.dict2cookie()
		elif "action" in self.form.keys() and self.form['action'].value == "logout":
			self.dict = {}
			self.dict2cookie()
Пример #28
0
    def setUpPreSession(self):
        TestAsServer.setUpPreSession(self)

        self.destdir = os.path.join(os.getcwdu(),"dest")

        try:
            shutil.rmtree(self.destdir)
        except:
            pass
        os.mkdir(self.destdir)
        
        # Create tree with 6 chunks
        self.chunks = []
        for i in range(0,6+1):
            chunk = chr(i) * 1024
            self.chunks.append(chunk)
            
        self.hashes = {}
        self.hashes[(7,7)] = '\x00' * 20
        for i in range(0,6+1):
            hash = sha(self.chunks[i]).digest()
            self.hashes[(i,i)] = hash
        self.hashes[(0,1)] = sha(self.hashes[(0,0)]+self.hashes[(1,1)]).digest()
        self.hashes[(2,3)] = sha(self.hashes[(2,2)]+self.hashes[(3,3)]).digest()
        self.hashes[(4,5)] = sha(self.hashes[(4,4)]+self.hashes[(5,5)]).digest()
        self.hashes[(6,7)] = sha(self.hashes[(6,6)]+self.hashes[(7,7)]).digest()
        
        self.hashes[(0,3)] = sha(self.hashes[(0,1)]+self.hashes[(2,3)]).digest()
        self.hashes[(4,7)] = sha(self.hashes[(4,5)]+self.hashes[(6,7)]).digest()
        
        self.hashes[(0,7)] = sha(self.hashes[(0,3)]+self.hashes[(4,7)]).digest()
Пример #29
0
    def test_verify_bad_signature(self):
        """
        Testing verify to make sure it fails when we use a bad signature
        """
        rsa = RSA.load_key(self.privkey)
        message = "This is the message string"
        digest = sha.sha(message).digest() 

        otherMessage = "Abracadabra"
        otherDigest = sha.sha(otherMessage).digest() 
        otherSignature = rsa.sign(otherDigest)

        self.assertRaises(RSA.RSAError, rsa.verify, 
                          digest, otherSignature)
Пример #30
0
    def post(self):
        new_password = self.get_argument('password', None)
        current_password = self.get_argument('password_current', None)

        if new_password is not None and current_password is not None:
            if sha.sha(current_password).hexdigest() != self.current_user.password:
                alert_text = "<strong>FAILED:</strong> Your original password was invalid."
            else:
                self.current_user.password = sha.sha(new_password).hexdigest()
                self.current_user.save()
                alert_text = "Your password, public key, and private key have been changed!"
        else:
            alert_text = "Password change failed!"
        self.render("../templates/user_page.html", page='account', alert=alert_text)
Пример #31
0
def test4():
    from sha import sha
    key = "foo"
    longDevId1 = LList("dev-" + sha(key).hexdigest()) + "-" + LRndSeq()
    longDevId2 = "dev-" + sha(key).hexdigest() + "-bla"
    print commonStrLen(longDevId1, longDevId2)
Пример #32
0
def get_image_guid(path, force=False):
	'''
	'''
	im = Image.open(path)
	return sha.sha(im.tostring()).hexdigest()
Пример #33
0
from sha import sha
from lcg import LCG
import socket

c = socket.socket()
c.connect(('localhost', 12347))
message, hashval = c.recv(1000).split('?')

if sha(message, LCG(123)) == hashval:
    print "Integrity maintained"

else:
    print "Message Tampered"
Пример #34
0
def hash(f_pwd):
    """ Returns a hex digest SHA 
    @param f_pwd: Some string to be hashed
    """
    return sha.sha(f_pwd).hexdigest()
Пример #35
0
def sha1(x):
    return sha.sha(x).hexdigest().upper()
Пример #36
0
    def __init__(self, storage, config, hashes, piece_size, finished,
            statusfunc, flag, data_flunked, infohash, errorfunc, resumefile):
        self.numpieces = len(hashes)
        self.storage = storage
        self.config = config
        check_hashes = config['check_hashes']
        self.hashes = hashes
        self.piece_size = piece_size
        self.data_flunked = data_flunked
        self.errorfunc = errorfunc
        self.total_length = storage.get_total_length()
        self.amount_left = self.total_length
        self.partial_mark = "BitTorrent - this part has not been "+\
                            "downloaded yet."+infohash+\
                            tobinary(config['download_slice_size'])
        if self.total_length <= piece_size * (self.numpieces - 1):
            raise BTFailure, _("bad data in responsefile - total too small")
        if self.total_length > piece_size * self.numpieces:
            raise BTFailure, _("bad data in responsefile - total too big")
        self.finished = finished
        self.numactive = array('H', [0] * self.numpieces)
        self.inactive_requests = [1] * self.numpieces
        self.amount_inactive = self.total_length
        self.endgame = False
        self.have = Bitfield(self.numpieces)
        self.waschecked = Bitfield(self.numpieces)
        if self.numpieces < 32768:
            typecode = 'h'
        else:
            typecode = 'l'
        self.places = array(typecode, [NO_PLACE] * self.numpieces)
        if not check_hashes:
            self.rplaces = array(typecode, range(self.numpieces))
            fastresume = True
        else:
            self.rplaces = self._load_fastresume(resumefile, typecode)
            if self.rplaces is not None:
                fastresume = True
            else:
                self.rplaces = array(typecode, [UNALLOCATED] * self.numpieces)
                fastresume = False
        self.holepos = 0
        self.stat_numfound = 0
        self.stat_numflunked = 0
        self.stat_numdownloaded = 0
        self.stat_active = {}
        self.stat_new = {}
        self.stat_dirty = {}
        self.download_history = {}
        self.failed_pieces = {}

        if self.numpieces == 0:
            return
        targets = {}
        total = 0
        if not fastresume:
            for i in xrange(self.numpieces):
                if self._waspre(i):
                    self.rplaces[i] = ALLOCATED
                    total += 1
                else:
                    targets[hashes[i]] = i
        if total and check_hashes:
            statusfunc(_("checking existing file"), 0)
        def markgot(piece, pos):
            if self.have[piece]:
                if piece != pos:
                    return
                self.rplaces[self.places[pos]] = ALLOCATED
                self.places[pos] = self.rplaces[pos] = pos
                return
            self.places[piece] = pos
            self.rplaces[pos] = piece
            self.have[piece] = True
            self.amount_left -= self._piecelen(piece)
            self.amount_inactive -= self._piecelen(piece)
            self.inactive_requests[piece] = None
            if not fastresume:
                self.waschecked[piece] = True
            self.stat_numfound += 1
        lastlen = self._piecelen(self.numpieces - 1)
        partials = {}
        for i in xrange(self.numpieces):
            if not self._waspre(i):
                if self.rplaces[i] != UNALLOCATED:
                    raise BTFailure(_("--check_hashes 0 or fastresume info "
                                      "doesn't match file state (missing data)"))
                continue
            elif fastresume:
                t = self.rplaces[i]
                if t >= 0:
                    markgot(t, i)
                    continue
                if t == UNALLOCATED:
                    raise BTFailure(_("Bad fastresume info (files contain more "
                                      "data)"))
                if t == ALLOCATED:
                    continue
                if t!= FASTRESUME_PARTIAL:
                    raise BTFailure(_("Bad fastresume info (illegal value)"))
                data = self.storage.read(self.piece_size * i,
                                         self._piecelen(i))
                self._check_partial(i, partials, data)
                self.rplaces[i] = ALLOCATED
            else:
                data = self.storage.read(piece_size * i, self._piecelen(i))
                sh = sha(buffer(data, 0, lastlen))
                sp = sh.digest()
                sh.update(buffer(data, lastlen))
                s = sh.digest()
                if s == hashes[i]:
                    markgot(i, i)
                elif s in targets and self._piecelen(i) == self._piecelen(targets[s]):
                    markgot(targets[s], i)
                elif not self.have[self.numpieces - 1] and sp == hashes[-1] and (i == self.numpieces - 1 or not self._waspre(self.numpieces - 1)):
                    markgot(self.numpieces - 1, i)
                else:
                    self._check_partial(i, partials, data)
                statusfunc(fractionDone = 1 - self.amount_left /
                           self.total_length)
            if flag.isSet():
                return
        self.amount_left_with_partials = self.amount_left
        for piece in partials:
            if self.places[piece] < 0:
                pos = partials[piece][0]
                self.places[piece] = pos
                self.rplaces[pos] = piece
                self._make_partial(piece, partials[piece][1])
        for i in xrange(self.numpieces):
            if self.rplaces[i] != UNALLOCATED:
                self.storage.allocated(piece_size * i, self._piecelen(i))
            if self.have[i]:
                self.storage.downloaded(piece_size * i, self._piecelen(i))
Пример #37
0
def makeK(N, g):
    return stringToNumber(sha.sha(numberToString(N) + PAD(N, g)).digest())
Пример #38
0
 def _make_id(self):
     myid = 'M' + version.split()[0].replace('.', '-')
     myid = myid + ('-' * (8-len(myid)))+sha(repr(time())+ ' ' +
                                  str(getpid())).digest()[-6:].encode('hex')
     return myid
Пример #39
0
def makeU(N, A, B):
    return stringToNumber(sha.sha(PAD(N, A) + PAD(N, B)).digest())
Пример #40
0
                                upnp = upnp_type, randomizer = config['random_port'])
                break
            except socketerror, e:
                if upnp_type and e == UPnP_ERROR:
                    print 'WARNING: COULD NOT FORWARD VIA UPnP'
                    upnp_type = 0
                    continue
                print "error: Couldn't listen - " + str(e)
                h.failed()
                return

        response = get_response(config['responsefile'], config['url'], h.error)
        if not response:
            break

        infohash = sha(bencode(response['info'])).digest()

        dow = BT1Download(h.display, h.finished, h.error, disp_exception, doneFlag,
                        config, response, infohash, myid, rawserver, listen_port,
                        configdir)

        if not dow.saveAs(h.chooseFile, h.newpath):
            break

        if not dow.initFiles(old_style = True):
            break
        if not dow.startEngine():
            dow.shutdown()
            break
        dow.startRerequester()
        dow.autoStats()
def create_partial_patches(patches):
    """ Given the patches generates a set of partial patches"""
    shas = {}

    work_dir_root = None
    metadata = []
    try:
        work_dir_root = tempfile.mkdtemp('-fastmode', 'tmp', os.getcwd())
        print "Building patches using work dir: %s" % (work_dir_root)
 
        # Iterate through every patch set in the patch file
        patch_num = 1
        for patch in patches:
            startTime = time.time()

            from_filename,to_filename,patch_filename,forced_updates = patch.split(",")
            from_filename,to_filename,patch_filename = os.path.abspath(from_filename),os.path.abspath(to_filename),os.path.abspath(patch_filename)

            # Each patch iteration uses its own work dir
            work_dir = os.path.join(work_dir_root,str(patch_num))
            os.mkdir(work_dir)

            # Extract from mar into from dir
            work_dir_from =  os.path.join(work_dir,"from");
            os.mkdir(work_dir_from)
            extract_mar(from_filename,work_dir_from)
            from_decoded = decode_filename(from_filename)
            from_buildid = get_buildid(work_dir_from, from_decoded['platform'])
            from_shasum = sha.sha(open(from_filename).read()).hexdigest()
            from_size = str(os.path.getsize(to_filename))
            
            # Extract to mar into to dir
            work_dir_to =  os.path.join(work_dir,"to")
            os.mkdir(work_dir_to)
            extract_mar(to_filename, work_dir_to)
            to_decoded = decode_filename(from_filename)
            to_buildid = get_buildid(work_dir_to, to_decoded['platform'])
            to_shasum = sha.sha(open(to_filename).read()).hexdigest()
            to_size = str(os.path.getsize(to_filename))

            mar_extract_time = time.time()

            partial_filename = create_partial_patch(work_dir_from, work_dir_to, patch_filename, shas, PatchInfo(work_dir, ['channel-prefs.js','update.manifest','removed-files'],['/readme.txt']),forced_updates)
            partial_buildid = to_buildid
            partial_shasum = sha.sha(open(partial_filename).read()).hexdigest()
            partial_size = str(os.path.getsize(partial_filename))

            metadata.append({
             'to_filename': os.path.basename(to_filename),
             'from_filename': os.path.basename(from_filename),
             'partial_filename': os.path.basename(partial_filename),
             'to_buildid':to_buildid, 
             'from_buildid':from_buildid, 
             'to_sha1sum':to_shasum, 
             'from_sha1sum':from_shasum, 
             'partial_sha1sum':partial_shasum, 
             'to_size':to_size,
             'from_size':from_size,
             'partial_size':partial_size,
             'to_version':to_decoded['version'],
             'from_version':from_decoded['version'],
             'locale':from_decoded['locale'],
             'platform':from_decoded['platform'],
            })
            print "done with patch %s/%s time (%.2fs/%.2fs/%.2fs) (mar/patch/total)" % (str(patch_num),str(len(patches)),mar_extract_time-startTime,time.time()-mar_extract_time,time.time()-startTime)
            patch_num += 1
        return metadata
    finally:
        # If we fail or get a ctrl-c during run be sure to clean up temp dir
        if (work_dir_root and os.path.exists(work_dir_root)):
            shutil.rmtree(work_dir_root)        
Пример #42
0
def newID():
    """returns a new pseudorandom globally unique ID string"""
    h = sha()
    h.update(entropy(20))
    return h.digest()
Пример #43
0
def get_new_hasher(initial_data=''):
    if SECURED:
        return SHA.new(initial_data)
    else:
        return sha(initial_data)
Пример #44
0
def validate_handle(handle, precursors, hinfo):
    encinfo = bencode({'precursors': precursors, 'handle': hinfo})
    if sha(encinfo).digest() != handle:
        raise HistoryError, 'bad identifier'
Пример #45
0
        print "Error accessing device on remote host!"
        sys.exit(1)
    remote_size = int(line)
    if size != remote_size:
        print "Source device size (%d) doesn't match remote device size (%d)!" % (
            size, remote_size)
        sys.exit(1)

    same_blocks = diff_blocks = 0

    print "Starting sync..."
    t0 = time.time()
    t_last = t0
    size_blocks = size / blocksize
    for i, l_block in enumerate(getblocks(f, blocksize)):
        l_sum = sha(l_block).hexdigest()
        r_sum = p_out.readline().strip()

        if l_sum == r_sum:
            p_in.write(SAME)
            p_in.flush()
            same_blocks += 1
        else:
            p_in.write(DIFF)
            p_in.flush()
            p_in.write(l_block)
            p_in.flush()
            diff_blocks += 1

        t1 = time.time()
        if t1 - t_last > 1 or (same_blocks + diff_blocks) >= size_blocks:
Пример #46
0
    def piece_came_in(self, index, begin, piece, source = None):
        if self.places[index] < 0:
            if self.rplaces[index] == ALLOCATED:
                self._initalloc(index, index)
            else:
                n = self._get_free_place()
                if self.places[n] >= 0:
                    oldpos = self.places[n]
                    self._move_piece(oldpos, n)
                    n = oldpos
                if self.rplaces[index] < 0 or index == n:
                    self._initalloc(n, index)
                else:
                    self._move_piece(index, n)
                    self._initalloc(index, index)

        if index in self.failed_pieces:
            old = self.storage.read(self.places[index] * self.piece_size +
                                    begin, len(piece))
            if old != piece:
                self.failed_pieces[index][self.download_history[index][begin]]\
                    = None
        self.download_history.setdefault(index, {})
        self.download_history[index][begin] = source

        self.storage.write(self.places[index] * self.piece_size + begin, piece)
        self.stat_dirty[index] = 1
        self.numactive[index] -= 1
        if self.numactive[index] == 0:
            del self.stat_active[index]
        if index in self.stat_new:
            del self.stat_new[index]
        if not self.inactive_requests[index] and not self.numactive[index]:
            del self.stat_dirty[index]
            if sha(self.storage.read(self.piece_size * self.places[index], self._piecelen(index))).digest() == self.hashes[index]:
                self.have[index] = True
                self.storage.downloaded(index * self.piece_size,
                                        self._piecelen(index))
                self.inactive_requests[index] = None
                self.waschecked[index] = True
                self.amount_left -= self._piecelen(index)
                self.stat_numdownloaded += 1
                for d in self.download_history[index].itervalues():
                    if d is not None:
                        d.good(index)
                del self.download_history[index]
                if index in self.failed_pieces:
                    for d in self.failed_pieces[index]:
                        if d is not None:
                            d.bad(index)
                    del self.failed_pieces[index]
                if self.amount_left == 0:
                    self.finished()
            else:
                self.data_flunked(self._piecelen(index), index)
                self.inactive_requests[index] = 1
                self.amount_inactive += self._piecelen(index)
                self.stat_numflunked += 1

                self.failed_pieces[index] = {}
                allsenders = {}
                for d in self.download_history[index].itervalues():
                    allsenders[d] = None
                if len(allsenders) == 1:
                    culprit = allsenders.keys()[0]
                    if culprit is not None:
                        culprit.bad(index, bump = True)
                    del self.failed_pieces[index] # found the culprit already
                return False
        return True
Пример #47
0
""" Test script for the unicodedata module.
Пример #48
0
def check(stamp, resource=None, bits=None,
                 check_expiration=None, ds_callback=None):
    """Check whether a stamp is valid

    Optionally, the stamp may be checked for a specific resource, and/or
    it may require a minimum bit value, and/or it may be checked for
    expiration, and/or it may be checked for double spending.

    If 'check_expiration' is specified, it should contain the number of
    seconds old a date field may be.  Indicating days might be easier in
    many cases, e.g.

      >>> from hashcash import DAYS
      >>> check(stamp, check_expiration=28*DAYS)

    NOTE: Every valid (version 1) stamp must meet its claimed bit value
    NOTE: Check floor of 4-bit multiples (overly permissive in acceptance)
    """
    if stamp.startswith('0:'):          # Version 0
        try:
            date, res, suffix = stamp[2:].split(':')
        except ValueError:
            ERR.write("Malformed version 0 hashcash stamp!\n")
            return False
        if resource is not None and resource != res:
            return False
        elif check_expiration is not None:
            good_until = strftime("%y%m%d%H%M%S", localtime(time()-check_expiration))
            if date < good_until:
                return False
        elif callable(ds_callback) and ds_callback(stamp):
            return False
        elif type(bits) is not int:
            return True
        else:
            hex_digits = int(floor(bits/4))
            return sha(stamp).hexdigest().startswith('0'*hex_digits)
    elif stamp.startswith('1:'):        # Version 1
        try:
            claim, date, res, ext, rand, counter = stamp[2:].split(':')
        except ValueError:
            ERR.write("Malformed version 1 hashcash stamp!\n")
            return False
        if resource is not None and resource != res:
            return False
        elif type(bits) is int and bits > int(claim):
            return False
        elif check_expiration is not None:
            good_until = strftime("%y%m%d%H%M%S", localtime(time()-check_expiration))
            if date < good_until:
                return False
        elif callable(ds_callback) and ds_callback(stamp):
            return False
        else:
            hex_digits = int(floor(int(claim)/4))
            return sha(stamp).hexdigest().startswith('0'*hex_digits)
    else:                               # Unknown ver or generalized hashcash
        ERR.write("Unknown hashcash version: Minimal authentication!\n")
        if type(bits) is not int:
            return True
        elif resource is not None and stamp.find(resource) < 0:
            return False
        else:
            hex_digits = int(floor(bits/4))
            return sha(stamp).hexdigest().startswith('0'*hex_digits)
Пример #49
0
def uuid5(namespace, name):
    """Generate a UUID from the SHA-1 hash of a namespace UUID and a name."""
    import sha
    hash = sha.sha(namespace.bytes + name).digest()
    return UUID(bytes=hash[:16], version=5)
Пример #50
0
class DSATestCase(unittest.TestCase):

    errkey = 'tests/rsa.priv.pem'
    privkey = 'tests/dsa.priv.pem'
    pubkey = 'tests/dsa.pub.pem'
    param = 'tests/dsa.param.pem'

    data = sha.sha('Can you spell subliminal channel?').digest()
    different_data = sha.sha('I can spell.').digest()

    def callback(self, *args):
        pass

    def test_loadkey_junk(self):
        self.assertRaises(DSA.DSAError, DSA.load_key, self.errkey)

    def test_loadkey(self):
        dsa = DSA.load_key(self.privkey)
        assert len(dsa) == 1024
        self.assertRaises(AttributeError, getattr, dsa, 'foobar')
        for k in ('p', 'q', 'g', 'priv', 'pub'):
            self.assertRaises(DSA.DSAError, setattr, dsa, k, 1)

    def test_loadparam(self):
        self.assertRaises(DSA.DSAError, DSA.load_key, self.param)
        dsa = DSA.load_params(self.param)
        assert not dsa.check_key()
        assert len(dsa) == 1024

    def test_sign(self):
        dsa = DSA.load_key(self.privkey)
        assert dsa.check_key()
        r, s = dsa.sign(self.data)
        assert dsa.verify(self.data, r, s)
        assert not dsa.verify(self.data, s, r)

    def test_sign_asn1(self):
        dsa = DSA.load_key(self.privkey)
        blob = dsa.sign_asn1(self.data)
        assert dsa.verify_asn1(self.data, blob)

    def test_sign_with_params_only(self):
        dsa = DSA.load_params(self.param)
        self.assertRaises(AssertionError, dsa.sign, self.data)
        self.assertRaises(AssertionError, dsa.sign_asn1, self.data)

    def test_pub_verify(self):
        dsa = DSA.load_key(self.privkey)
        r, s = dsa.sign(self.data)
        dsapub = DSA.load_pub_key(self.pubkey)
        assert dsapub.check_key()
        assert dsapub.verify(self.data, r, s)
        self.assertRaises(DSA.DSAError, dsapub.sign)

    def test_verify_fail(self):
        dsa = DSA.load_key(self.privkey)
        r, s = dsa.sign(self.data)
        assert not dsa.verify(self.different_data, r, s)

    def test_verify_fail2(self):
        dsa = DSA.load_key(self.privkey)
        r, s = dsa.sign(self.data)
        dsa2 = DSA.load_params(self.param)
        assert not dsa2.check_key()
        self.assertRaises(AssertionError, dsa2.verify, self.data, r, s)

    def test_genparam_setparam_genkey(self):
        dsa = DSA.gen_params(1024, self.callback)
        assert len(dsa) == 1024
        p = dsa.p
        q = dsa.q
        g = dsa.g
        dsa2 = DSA.set_params(p, q, g)
        assert not dsa2.check_key()
        dsa2.gen_key()
        assert dsa2.check_key()
        r, s = dsa2.sign(self.data)
        assert dsa2.verify(self.data, r, s)
Пример #51
0
def sha1(string):
    return sha.sha(string).digest()
Пример #52
0
 def __init__(self, name, data):
   self.name = name
   self.data = data
   self.size = len(data)
   self.sha1 = sha.sha(data).hexdigest()
Пример #53
0
def create_token(p):
    """ Create an Nokoscope token from a password """
    import sha
    return sha.sha(p).hexdigest()
Пример #54
0
def main():
    logging.basicConfig(level=logging.INFO)
    parser = optparse.OptionParser(
        usage="usage: %prog --version=<version> <output path>")
    parser.add_option("-v",
                      "--version",
                      help="Set version number",
                      type="string",
                      dest="version",
                      metavar="VERSION")
    parser.add_option("-b",
                      "--build-type",
                      help="Set build type",
                      type="string",
                      dest="build_type",
                      metavar="BUILD_TYPE")
    parser.add_option("-g",
                      "--git-hash",
                      help="Set git hash",
                      type="string",
                      dest="git_hash",
                      metavar="GIT_HASH")
    opts, args = parser.parse_args()

    if not opts.version:
        parser.error("no version number specified")
        sys.exit(1)

    if len(args) != 1:
        parser.error("no output path specified")
        sys.exit(1)

    output_path = args[0]

    hostname = check_output(["hostname", "-f"]).strip()
    build_time = "%s %s" % (strftime("%d %b %Y %H:%M:%S",
                                     localtime()), time.tzname[0])
    username = os.getenv("USER")

    if opts.git_hash:
        # Git hash provided on the command line.
        git_hash = opts.git_hash
        clean_repo = "true"
    else:
        try:
            # No command line git hash, find it in the local git repository.
            git_hash = check_output(["git", "rev-parse", "HEAD"]).strip()
            clean_repo = subprocess.call(
                "git diff --quiet && git diff --cached --quiet",
                shell=True) == 0
            clean_repo = str(clean_repo).lower()
        except Exception:
            # If the git commands failed, we're probably building outside of a git
            # repository.
            logging.info(
                "Build appears to be outside of a git repository... " +
                "continuing without repository information.")
            git_hash = "non-git-build"
            clean_repo = "true"

    version_string = opts.version
    build_type = opts.build_type

    # Add the Jenkins build ID
    build_id = os.getenv("BUILD_ID", "")

    # Calculate an identifying hash based on all of the variables except for the
    # timestamp. We put this hash in a comment, and use it to check whether to
    # re-generate the file. If it hasn't changed since a previous run, we don't
    # re-write the file. This avoids having to rebuild all binaries on every build.
    identifying_hash = sha.sha(
        repr(
            (git_hash, hostname, username, clean_repo, build_id))).hexdigest()

    if output_up_to_date(output_path, identifying_hash):
        return 0
    d = os.path.dirname(output_path)
    if not os.path.exists(d):
        os.makedirs(d)
    with file(output_path, "w") as f:
        print >> f, """
// THIS FILE IS AUTO-GENERATED! DO NOT EDIT!
//
// id_hash=%(identifying_hash)s
#ifndef VERSION_INFO_H_
#define VERSION_INFO_H_

#define KUDU_GIT_HASH "%(git_hash)s"
#define KUDU_BUILD_HOSTNAME "%(hostname)s"
#define KUDU_BUILD_TIMESTAMP "%(build_time)s"
#define KUDU_BUILD_USERNAME "%(username)s"
#define KUDU_BUILD_CLEAN_REPO %(clean_repo)s
#define KUDU_BUILD_ID "%(build_id)s"
#define KUDU_BUILD_TYPE "%(build_type)s"
#define KUDU_VERSION_STRING "%(version_string)s"
#endif
""" % locals()
    return 0
Пример #55
0
        def processRequest(cls, ps, token, **kw):
            """
            Parameters:
                ps -- ParsedSoap instance
                token -- UsernameToken pyclass instance
            """
            if token.typecode is not UsernameTokenDec:
                raise TypeError, 'expecting GED (%s,%s) representation.' % (
                    UsernameTokenDec.nspname, UsernameTokenDec.pname)

            username = token.Username

            # expecting only one password
            # may have a nonce and a created
            password = nonce = timestamp = None
            for any in token.Any or []:
                if any.typecode is PasswordDec:
                    password = any
                    continue

                if any.typecode is NonceTypeDec:
                    nonce = any
                    continue

                if any.typecode is CreatedTypeDec:
                    timestamp = any
                    continue

                raise TypeError, 'UsernameTokenProfileHander unexpected %s' % str(
                    any)

            if password is None:
                raise RuntimeError, 'Unauthorized, no password'

            # TODO: not yet supporting complexType simpleContent in pyclass_type
            attrs = getattr(password, password.typecode.attrs_aname, {})
            pwtype = attrs.get('Type', cls.PasswordText)

            # Clear Text Passwords
            if cls.PasswordText is not None and pwtype == cls.PasswordText:
                if password == cls.passwordCallback(username):
                    return ps

                raise RuntimeError, 'Unauthorized, clear text password failed'

            if cls.nonces is None: cls.sweep(0)
            if nonce is not None:
                if nonce in cls.nonces:
                    raise RuntimeError, 'Invalid Nonce'

                # created was 10 seconds ago or sooner
                if created is not None and created < time.gmtime(time.time() -
                                                                 10):
                    raise RuntimeError, 'UsernameToken created is expired'

                cls.nonces.append(nonce)

            # PasswordDigest, recommended that implemenations
            # require a Nonce and Created
            if cls.PasswordDigest is not None and pwtype == cls.PasswordDigest:
                digest = sha.sha()
                for i in (nonce, created, cls.passwordCallback(username)):
                    if i is None: continue
                    digest.update(i)

                if password == base64.encodestring(digest.digest()).strip():
                    return ps

                raise RuntimeError, 'Unauthorized, digest failed'

            raise RuntimeError, 'Unauthorized, contents of UsernameToken unknown'
Пример #56
0
xmlrpclib.Marshaller.dispatch[long] = dump_number
xmlrpclib.Marshaller.dispatch[int] = dump_number
xmlrpclib.Marshaller.dispatch[type(None)] = dump_none
xmlrpclib.Marshaller.dispatch[datetime.datetime] = dump_datetime
xmlrpclib.Marshaller.dispatch[datetime.date] = dump_datetime

##############################
## Setup Garbage Collection ##
##############################
gc.enable()

############################
## Random Seed Generation ##
############################
random.seed(sha.sha(str(time.time())).hexdigest())

#############################
## Setup Twisted Threading ##
#############################
threadable.init()
if aztk_config.setup.get("site", "environment") == "sandbox":
    reactor.suggestThreadPoolSize(3)
else:
    try:
        reactor.suggestThreadPoolSize(25)
    except Exception, ex:
        print Exception
        print ex
        sys.exit(1)
Пример #57
0
#       char secbuff[SECRET_LEN * 4];
#       char *hash = secbuff;
#       int idx;

#       for (idx=0; idx<sizeof(secret); idx++) {
#       *hash++ = hex[secret[idx] >> 4];
#       *hash++ = hex[secret[idx] & 0xF];
#       }
#       *hash = '\0';
#       /* remove comment makings in below for apache-2.4+ */
#       ap_log_error(APLOG_MARK, APLOG_NOTICE, 0, s, /*  APLOGNO(11759) */ "secret: %s", secbuff);
#   }


apachesecret = binascii.unhexlify('497d8894adafa5ec7c8c981ddf9c8457da7a90ac')
s = sha.sha(apachesecret)

v = preauth()

print(v['nonce'])
realm = v['Digest realm'][1:-1]

(t,) = struct.unpack('l', base64.b64decode(v['nonce'][1:13]))

# whee, time travel
t = t + 5540

timepac = base64.b64encode(struct.pack('l', t))

s.update(realm)
s.update(timepac)
Пример #58
0
    err('%s: no arguments expected.\n' % sys.argv[0])
    sys.exit(1)

if os.isatty(0):
    err('%s: you must provide the data to stamp on stdin\n' % sys.argv[0])
    sys.exit(1)

# hashlib is only available in python 2.5 or higher, but the 'sha' module
# produces a DeprecationWarning in python 2.6 or higher.  We want to support
# python 2.4 and above without any stupid warnings, so let's try using hashlib
# first, and downgrade if it fails.
try:
    import hashlib
except ImportError:
    import sha
    sh = sha.sha()
else:
    sh = hashlib.sha1()

while 1:
    b = os.read(0, 4096)
    sh.update(b)
    if not b: break

csum = sh.hexdigest()

if not vars.TARGET:
    sys.exit(0)

me = os.path.join(vars.STARTDIR, os.path.join(vars.PWD, vars.TARGET))
f = state.File(name=me)
Пример #59
0
def make_torrent_file(input,
                      userabortflag=None,
                      userprogresscallback=lambda x: None):
    """ Create a torrent file from the supplied input. 
    
    Returns a (infohash,metainfo) pair, or (None,None) on userabort. """

    (info, piece_length) = makeinfo(input, userabortflag, userprogresscallback)
    if userabortflag is not None and userabortflag.isSet():
        return (None, None)
    if info is None:
        return (None, None)

    #if DEBUG:
    #    print >>sys.stderr,"mktorrent: makeinfo returned",`info`

    check_info(info)
    metainfo = {
        'info': info,
        'encoding': input['encoding'],
        'creation date': long(time())
    }

    # http://www.bittorrent.org/DHT_protocol.html says both announce and nodes
    # are not allowed, but some torrents (Azureus?) apparently violate this.
    if input['nodes'] is None and input['announce'] is None:
        raise ValueError('No tracker set')

    for key in [
            'announce', 'announce-list', 'nodes', 'comment', 'created by',
            'httpseeds'
    ]:
        if input[key] is not None and len(input[key]) > 0:
            metainfo[key] = input[key]
            if key == 'comment':
                metainfo['comment.utf-8'] = uniconvert(input['comment'],
                                                       'utf-8')

    # Assuming 1 file, Azureus format no support multi-file torrent with diff
    # bitrates
    bitrate = None
    for file in input['files']:
        if file['playtime'] is not None:
            secs = parse_playtime_to_secs(file['playtime'])
            bitrate = file['length'] / secs
            break
        if input.get('bps') is not None:
            bitrate = input['bps']
            break

    if bitrate is not None or input['thumb'] is not None:
        mdict = {}
        mdict['Publisher'] = 'Tribler'
        if input['comment'] is None:
            descr = ''
        else:
            descr = input['comment']
        mdict['Description'] = descr

        if bitrate is not None:
            mdict['Progressive'] = 1
            mdict['Speed Bps'] = bitrate
        else:
            mdict['Progressive'] = 0

        mdict['Title'] = metainfo['info']['name']
        mdict['Creation Date'] = long(time())
        # Azureus client source code doesn't tell what this is, so just put in random value from real torrent
        mdict['Content Hash'] = 'PT3GQCPW4NPT6WRKKT25IQD4MU5HM4UY'
        mdict['Revision Date'] = long(time())
        if input['thumb'] is not None:
            mdict['Thumbnail'] = input['thumb']
        cdict = {}
        cdict['Content'] = mdict
        metainfo['azureus_properties'] = cdict

    if input['torrentsigkeypairfilename'] is not None:
        create_torrent_signature(metainfo, input['torrentsigkeypairfilename'])

    infohash = sha(bencode(info)).digest()
    return (infohash, metainfo)
Пример #60
0
class RSATestCase(unittest.TestCase):

    errkey = 'tests/dsa.priv.pem'
    privkey = 'tests/rsa.priv.pem'
    privkey2 = 'tests/rsa.priv2.pem'
    pubkey = 'tests/rsa.pub.pem'

    data = sha.sha('The magic words are squeamish ossifrage.').digest()

    e_padding_ok = ('pkcs1_padding', 'pkcs1_oaep_padding')

    s_padding_ok = ('pkcs1_padding', )
    s_padding_nok = ('no_padding', 'sslv23_padding', 'pkcs1_oaep_padding')

    def gen_callback(self, *args):
        pass

    def gen2_callback(self):
        pass

    def pp_callback(self, *args):
        # The passphrase for rsa.priv2.pem is 'qwerty'.
        return 'qwerty'

    def pp2_callback(self, *args):
        # Misbehaving passphrase callback.
        pass

    def test_loadkey_junk(self):
        self.assertRaises(RSA.RSAError, RSA.load_key, self.errkey)

    def test_loadkey_pp(self):
        rsa = RSA.load_key(self.privkey2, self.pp_callback)
        assert len(rsa) == 1024
        assert rsa.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4
        assert rsa.check_key() == 1

    def test_loadkey_pp_bad_cb(self):
        self.assertRaises(RSA.RSAError, RSA.load_key, self.privkey2,
                          self.pp2_callback)

    def test_loadkey(self):
        rsa = RSA.load_key(self.privkey)
        assert len(rsa) == 1024
        assert rsa.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4
        self.assertEqual(
            rsa.n,
            "\x00\x00\x00\x81\x00\xcde!\x15\xdah\xb5`\xce[\xd6\x17d\xba8\xc1I\xb1\xf1\xber\x86K\xc7\xda\xb3\x98\xd6\xf6\x80\xae\xaa\x8f!\x9a\xefQ\xdeh\xbb\xc5\x99\x01o\xebGO\x8e\x9b\x9a\x18\xfb6\xba\x12\xfc\xf2\x17\r$\x00\xa1\x1a \xfc/\x13iUm\x04\x13\x0f\x91D~\xbf\x08\x19C\x1a\xe2\xa3\x91&\x8f\xcf\xcc\xf3\xa4HRf\xaf\xf2\x19\xbd\x05\xe36\x9a\xbbQ\xc86|(\xad\x83\xf2Eu\xb2EL\xdf\xa4@\x7f\xeel|\xfcU\x03\xdb\x89'"
        )
        self.assertRaises(AttributeError, getattr, rsa, 'nosuchprop')
        assert rsa.check_key() == 1

    def test_loadkey_bio(self):
        keybio = BIO.MemoryBuffer(open(self.privkey).read())
        rsa = RSA.load_key_bio(keybio)
        assert len(rsa) == 1024
        assert rsa.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4
        assert rsa.check_key() == 1

    def test_keygen(self):
        rsa = RSA.gen_key(1024, 65537, self.gen_callback)
        assert len(rsa) == 1024
        assert rsa.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4
        assert rsa.check_key() == 1

    def test_keygen_bad_cb(self):
        rsa = RSA.gen_key(1024, 65537, self.gen2_callback)
        assert len(rsa) == 1024
        assert rsa.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4
        assert rsa.check_key() == 1

    def test_private_encrypt(self):
        priv = RSA.load_key(self.privkey)
        # pkcs1_padding
        for padding in self.s_padding_ok:
            p = getattr(RSA, padding)
            ctxt = priv.private_encrypt(self.data, p)
            ptxt = priv.public_decrypt(ctxt, p)
            assert ptxt == self.data
        # The other paddings.
        for padding in self.s_padding_nok:
            p = getattr(RSA, padding)
            self.assertRaises(RSA.RSAError, priv.private_encrypt, self.data, p)
        # Type-check the data to be encrypted.
        self.assertRaises(TypeError, priv.private_encrypt, self.gen_callback,
                          RSA.pkcs1_padding)

    def test_public_encrypt(self):
        priv = RSA.load_key(self.privkey)
        # pkcs1_padding, pkcs1_oaep_padding
        for padding in self.e_padding_ok:
            p = getattr(RSA, padding)
            ctxt = priv.public_encrypt(self.data, p)
            ptxt = priv.private_decrypt(ctxt, p)
            assert ptxt == self.data
        # sslv23_padding
        ctxt = priv.public_encrypt(self.data, RSA.sslv23_padding)
        self.assertRaises(RSA.RSAError, priv.private_decrypt, ctxt,
                          RSA.sslv23_padding)
        # no_padding
        self.assertRaises(RSA.RSAError, priv.public_encrypt, self.data,
                          RSA.no_padding)
        # Type-check the data to be encrypted.
        self.assertRaises(TypeError, priv.public_encrypt, self.gen_callback,
                          RSA.pkcs1_padding)

    def test_x509_public_encrypt(self):
        x509 = X509.load_cert("tests/recipient.pem")
        rsa = x509.get_pubkey().get_rsa()
        rsa.public_encrypt("data", RSA.pkcs1_padding)

    def test_loadpub(self):
        rsa = RSA.load_pub_key(self.pubkey)
        assert len(rsa) == 1024
        assert rsa.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4
        self.assertRaises(RSA.RSAError, setattr, rsa, 'e',
                          '\000\000\000\003\001\000\001')
        self.assertRaises(RSA.RSAError, rsa.private_encrypt, 1)
        self.assertRaises(RSA.RSAError, rsa.private_decrypt, 1)
        assert rsa.check_key()

    def test_loadpub_bad(self):
        self.assertRaises(RSA.RSAError, RSA.load_pub_key, self.errkey)

    def test_savepub(self):
        rsa = RSA.load_pub_key(self.pubkey)
        assert rsa.as_pem()  # calls save_key_bio
        f = 'tests/rsa_test.pub'
        try:
            self.assertEquals(rsa.save_key(f), 1)
        finally:
            try:
                os.remove(f)
            except IOError:
                pass

    def test_set_bn(self):
        rsa = RSA.load_pub_key(self.pubkey)
        assert m2.rsa_set_e(rsa.rsa, '\000\000\000\003\001\000\001') is None
        self.assertRaises(RSA.RSAError, m2.rsa_set_e, rsa.rsa,
                          '\000\000\000\003\001')

    def test_newpub(self):
        old = RSA.load_pub_key(self.pubkey)
        new = RSA.new_pub_key(old.pub())
        assert new.check_key()
        assert len(new) == 1024
        assert new.e == '\000\000\000\003\001\000\001'  # aka 65537 aka 0xf4

    def test_sign_and_verify(self):
        """
        Testing signing and verifying digests
        """
        algos = {'sha1': '', 'ripemd160': '', 'md5': ''}

        if m2.OPENSSL_VERSION_NUMBER >= 0x90800F:
            algos['sha224'] = ''
            algos['sha256'] = ''
            algos['sha384'] = ''
            algos['sha512'] = ''

        message = "This is the message string"
        digest = sha.sha(message).digest()
        rsa = RSA.load_key(self.privkey)
        rsa2 = RSA.load_pub_key(self.pubkey)
        for algo in algos.keys():
            signature = rsa.sign(digest, algo)
            #assert signature == algos[algo], 'mismatched signature with algorithm %s: signature=%s' % (algo, signature)
            verify = rsa2.verify(digest, signature, algo)
            assert verify == 1, 'verification failed with algorithm %s' % algo

    if m2.OPENSSL_VERSION_NUMBER >= 0x90708F:

        def test_sign_and_verify_rsassa_pss(self):
            """
            Testing signing and verifying using rsassa_pss
    
            The maximum size of the salt has to decrease as the
            size of the digest increases because of the size of 
            our test key limits it.
            """
            message = "This is the message string"
            if sys.version_info < (2, 5):
                algos = {
                    'sha1': (43, sha.sha(message).digest()),
                    'md5': (47, md5.md5(message).digest())
                }

            else:
                import hashlib
                algos = {'sha1': 43}
                if not fips_mode:
                    algos['md5'] = 47
                    algos['ripemd160'] = 43

                if m2.OPENSSL_VERSION_NUMBER >= 0x90800F:
                    algos['sha224'] = 35
                    algos['sha256'] = 31
                    algos['sha384'] = 15
                    algos['sha512'] = 0

                for algo, salt_max in algos.iteritems():
                    h = hashlib.new(algo)
                    h.update(message)
                    digest = h.digest()
                    algos[algo] = (salt_max, digest)

            rsa = RSA.load_key(self.privkey)
            rsa2 = RSA.load_pub_key(self.pubkey)
            for algo, (salt_max, digest) in algos.iteritems():
                for salt_length in range(0, salt_max):
                    signature = rsa.sign_rsassa_pss(digest, algo, salt_length)
                    verify = rsa2.verify_rsassa_pss(digest, signature, algo,
                                                    salt_length)
                    assert verify == 1, 'verification failed with algorithm %s salt length %d' % (
                        algo, salt_length)

    def test_sign_bad_method(self):
        """
        Testing calling sign with an unsupported message digest algorithm
        """
        rsa = RSA.load_key(self.privkey)
        message = "This is the message string"
        digest = 'a' * 16
        self.assertRaises(ValueError, rsa.sign, digest, 'bad_digest_method')

    def test_verify_bad_method(self):
        """
        Testing calling verify with an unsupported message digest algorithm
        """
        rsa = RSA.load_key(self.privkey)
        message = "This is the message string"
        digest = 'a' * 16
        signature = rsa.sign(digest, 'sha1')
        self.assertRaises(ValueError, rsa.verify, digest, signature,
                          'bad_digest_method')

    def test_verify_mismatched_algo(self):
        """
        Testing verify to make sure it fails when we use a different
        message digest algorithm
        """
        rsa = RSA.load_key(self.privkey)
        message = "This is the message string"
        digest = sha.sha(message).digest()
        signature = rsa.sign(digest, 'sha1')
        rsa2 = RSA.load_pub_key(self.pubkey)
        self.assertRaises(RSA.RSAError, rsa.verify, digest, signature, 'md5')

    def test_sign_fail(self):
        """
        Testing sign to make sure it fails when I give it
        a bogus digest. Looking at the RSA sign method
        I discovered that with the digest methods we use
        it has to be longer than a certain length.
        """
        rsa = RSA.load_key(self.privkey)
        digest = """This string should be long enough to warrant an error in
        RSA_sign""" * 2

        self.assertRaises(RSA.RSAError, rsa.sign, digest)

    def test_verify_bad_signature(self):
        """
        Testing verify to make sure it fails when we use a bad signature
        """
        rsa = RSA.load_key(self.privkey)
        message = "This is the message string"
        digest = sha.sha(message).digest()

        otherMessage = "Abracadabra"
        otherDigest = sha.sha(otherMessage).digest()
        otherSignature = rsa.sign(otherDigest)

        self.assertRaises(RSA.RSAError, rsa.verify, digest, otherSignature)