Beispiel #1
0
 def RandomSeed(self):
   patron = md5.new()
   math.fi = (1+math.sqrt(5))/2
   math.random = int(math.fi ** random.randint(1, 100))
   math.random = int(math.sqrt(math.random)+(random.randint(10,99)**8))
   math.random = (random.random()*(10**random.randint(1,5)))*math.random
   math.random = math.random - math.sqrt(math.random)
   math.random = math.random * (math.random/2) - (math.random/3)
   patron.update(str(int(math.random)))
   patron_2 = patron.hexdigest()
   op = random.randint(0,4)
   integer = random.randint(5,25)
   patron_3 = ""
   if op==0:
     patron_3 = patron_2[:integer]
   elif op==1:
     patron_3 = patron_2[integer:]
   elif op==2:
     patron_3 = patron_2[:-integer]
   elif op==4:
     patron_3 = patron_2[-integer:]
   patron_4 = hashlib.sha224(patron_3 + str(random.randint(0,10000000)) ).hexdigest()
   patron_5 = base64.b64encode(patron_4)
   patron.update(patron_5 + patron_4 + patron_3 + str(random.randint(0,10000000)))
   patron_6 = patron.hexdigest()
   patron_7 = base64.b64encode(patron_6 + str(math.random + random.randint(1,99999)))
   return hashlib.sha224(patron_7).hexdigest()
Beispiel #2
0
 def hostname_to_ids(self, tenant_id, hostname):
     essex_hash  = hashlib.sha224(str(tenant_id) + str(hostname)).hexdigest()
     diablo_hash = hashlib.sha224(str(hostname)).hexdigest()
     print "hostname hashes for %s" % hostname
     print "essex = %s" % essex_hash
     print "diablo = %s" % diablo_hash
     return [essex_hash, diablo_hash]
Beispiel #3
0
def create_session(user):
	now = datetime.utcnow().replace(tzinfo=utc)
	session_key = hashlib.sha224(user.login.encode('utf-8') + user.password.encode('utf-8') + now.strftime(settings.DATE_FORMAT).encode('utf-8')).hexdigest()
	while cache.get(session_key) != None:
		session_key = hashlib.sha224(user.login.encode('utf-8') + user.password.encode('utf-8') + now.strftime(settings.DATE_FORMAT).encode('utf-8')).hexdigest()
	session_key = ("%03d" % user.id) + session_key
	return session_key
def find_records(soup, community, agency, county, url):
    global date_range
    records = soup.find_all("tr", {"class": "EventSearchGridRow"})
    v = soup.find("input", {"id": "__VIEWSTATE"})["value"]
    e = soup.find("input", {"id": "__EVENTVALIDATION"})["value"]
    v_e = {"__VIEWSTATE": v, "__EVENTVALIDATION": e}

    for record in records:
        #        new_record = [community, '', '', '', '', '', '', '', '', '', '', '', '', '']
        other_data = {"scrape_type": "search", "id_generate": "0"}
        data = {}
        id_and_type = {}
        record_fields = record.find_all("td")
        id_and_type["record_type"] = record_fields[2].string.strip()  # record type
        data["occurred_date"] = date_formatters.format_db_datetime(record_fields[1].string.strip())  # date
        data["address"] = re.sub(r" +", " ", record_fields[4].string.strip())
        if re.search("[A-Za-z]+", data["address"]) is not None:
            data["address"] = data["address"] + county_st
        if id_and_type["record_type"] == "Incident":
            data["reported_date"] = date_formatters.format_db_date(
                date_range["MasterPage$mainContent$txtDateFrom$txtDatePicker"]
            )
            data["date_reported"] = data["reported_date"]
            data["time_reported"] = ""
            data["on_date"] = data["occurred_date"]
        else:
            data["date_occurred"] = date_formatters.format_db_date_part(record_fields[1].string.strip())
            data["time_occurred"] = date_formatters.format_db_time_part(record_fields[1].string.strip())
        if id_and_type["record_type"] != "Accident":
            data["charge"] = remove_semicolon(
                record_fields[3].find_all("strong")[1].next_sibling.strip()
            )  # offense text
        else:
            data["charge"] = ""
        if id_and_type["record_type"] == "Arrest":
            data["name"] = record_fields[3].find_all("strong")[0].next_sibling.strip()  # arrestee
            id_and_type["record_id"] = hashlib.sha224(
                data["name"] + data["occurred_date"] + data["address"] + data["charge"]
            ).hexdigest()
            other_data["id_generate"] = "1"
        else:
            if len(record_fields[3].find_all("strong")) == 0:
                id_and_type["record_id"] = hashlib.sha224(data["occurred_date"] + data["address"]).hexdigest()
            else:
                id_and_type["record_id"] = record_fields[3].find_all("strong")[0].next_sibling.strip()  # case number
        # this is to download the pdf. not sure if we want to try that now.
        has_gif = record_fields[5].find("a").find("div")
        if has_gif is None:
            # there's no pdf
            # return ''
            data["pdf"] = ""
        else:
            data["pdf"] = dl_pdf(
                record_fields[5].find("a")["href"].strip().split("'")[1], id_and_type, agency, v_e, url
            )  # pdf stuff

        data = dict(
            data.items() + other_data.items() + id_and_type.items() + {"agency": agency, "county": county}.items()
        )
        scraper_commands.all_data[id_and_type["record_type"]].append(scraper_commands.check_data(data))
Beispiel #5
0
def update_file(oldfile, newfile, num_files, num_files_updated, obj):
    """Update a doc file."""
    try:
        with open(oldfile, 'r') as _file:
            shaold = hashlib.sha224(_file.read()).hexdigest()
    except IOError:
        shaold = ''
    try:
        with open(newfile, 'r') as _file:
            shanew = hashlib.sha224(_file.read()).hexdigest()
    except IOError:
        shanew = ''
    if shaold != shanew:
        if os.path.exists(oldfile):
            os.unlink(oldfile)
        os.rename(newfile, oldfile)
        num_files_updated['total1'] += 1
        num_files_updated['total2'] += 1
        num_files_updated[obj] += 1
    else:
        if os.path.exists(oldfile):
            os.unlink(newfile)
    num_files['total1'] += 1
    num_files['total2'] += 1
    num_files[obj] += 1
Beispiel #6
0
 def challengeAccepted(self, check):
     """docstring for checkAuth"""
     log.msg(
         "Received %s from client. Checking with secret %s (%s)"
         % (check, self.factory.secret, hashlib.sha224(self.challenge + self.factory.secret).hexdigest())
     )
     return check == hashlib.sha224(self.challenge + self.factory.secret).hexdigest()
def login_accept(mother_win, username, psswrd):
	secure_connection = db.connect('secure.db')

	# Attempt secure user validation
	seed = hashlib.sha224(('%s%s' % (username, psswrd)).encode('utf-8')).hexdigest()
	fruit = hashlib.sha224(psswrd.encode('utf-8')).hexdigest()
	harvest_fruit = db.read(secure_connection, '002', seed)
	db.close(secure_connection)
	if harvest_fruit:
		harvest_fruit = harvest_fruit[0]
		if harvest_fruit[1] == fruit:
			# Update logs and update key
			fb = db.read(LOGS_CONNECTION, '003')[0][0]
			if fb == None:
				fb = 0
			else:
				fb += 1
			items = \
				'a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,t,u,v,w,x,y,z'.split(',')
			random.shuffle(items)
			rnd = ''.join(items)
			sd = (fb, str(datetime.date.today()), username, rnd)
			db.write(LOGS_CONNECTION, '003', sd)
			db.close(LOGS_CONNECTION)
			with open('data/key.dat', 'wt') as f:
				key = hashlib.sha224(('%s%s%s' % (sd[1], sd[2], sd[3])).\
					encode('utf-8')).hexdigest()
				print(key, file=f,sep='')
			mother_win.active[0].win.accept()
			mother_win.create_window('main')
		else:
			print('User not created by root, proceed to delete')
	else:
		print('Either user not existent or password incorrect')
Beispiel #8
0
    def test_feedback(self):
        u = OTNUser(username='******', password='******')
        u.my_email = '*****@*****.**'
        u.pin = hashlib.sha224('5533').hexdigest()
        u.save()
        fb = FacebookProfile(user=u, facebook_id=1341341414)
        fb.save()
        f = Feedback(user=u, speed=2, size=3)
        f.save()

        f = Feedback(user=u, speed=1, size=4, comment="It was damn slow!!")
        f.save()

        pin = hashlib.sha224('5533').hexdigest()
        response = self.client.login(email='*****@*****.**', pin=pin)
        print response

        response = self.client.get("/legals/feedback/post/")
        print response 

        response = self.client.post("/legals/feedback/post/", {'speed':'2',
                                                'size': '1',
                                                'comment': "It's interesting"},
                                    HTTP_X_REQUESTED_WITH='XMLHttpRequest' )
        print json.dumps(json.loads(response.content), indent=2) 

        response = self.client.post("/legals/feedback/post/", {'speed':'2',
                                                'size': '1'},
                                    HTTP_X_REQUESTED_WITH='XMLHttpRequest' )
        print json.dumps(json.loads(response.content), indent=2) 
Beispiel #9
0
 def lineReceived(self, line):
     """docstring for lineReceived"""
     # log.msg("Received %s" % line)
     if self.state == 0:
         if line == "TCPTS 0.1":
             self.state = 1
             log.msg("Connected to TCPTSServer")
         else:
             self.protocolMissmatch()
     elif self.state == 1:
         log.msg("Secret: %s" % self.factory.secret)
         log.msg(
             "Sending challenge reply: %s - %s - %s"
             % (line, self.factory.secret, hashlib.sha224(line + self.factory.secret).hexdigest())
         )
         self.sendLine(hashlib.sha224(line + self.factory.secret).hexdigest())
         self.state = 2
     elif self.state == 2:
         if line == "Authenticated":
             log.msg("Autenticated")
             self.state = 99
         else:
             log.err("Authentication not succesfull. Check secret on client/server")
             reactor.stop()
     else:
         log.err("Unknown data received from server")
         self.transport.loseConnection()
Beispiel #10
0
def hash(string):
    '''
    Returns the sha224 hexdigest (twice) of a string + secret salt
    '''
    return hashlib.sha224(config.hashSalts[0]+\
                              hashlib.sha224(string).hexdigest()+\
                              config.hashSalts[1]).hexdigest()
Beispiel #11
0
def scan_for_test_files(dirname, parser):
  for root, dirs, files in os.walk(dirname):
    for in_filename in files:
      if in_filename.endswith('in.js'):
        in_file_path = "%s/%s" % (dirname, in_filename)
        out_file_path = "%s/%s" % (dirname, "%sout.js" % (in_filename[:-5]))

        in_parsed = parser.parse(in_file_path)

        out_file = open(out_file_path, 'r')
        out_target_output = out_file.read()
        out_file.close()

        # Hopefully this doesn't come back to bite me, but I'm using a hash of the
        # output to compare it with the known TEST PASS state.  The odds of a false
        # positive are pretty slim...
        if (hashlib.sha224(out_target_output).hexdigest() == hashlib.sha224(in_parsed).hexdigest()):
          print "PASS [%s]" % (in_file_path)
        else:
          print "FAIL [%s]" % (in_file_path)

          if parser.save_expected_failures:
            # Write the expected output file for local diffing
            fout = open('%s_expected' % out_file_path, 'w')
            fout.write(in_parsed)
            fout.close()

          else:
            print "\n-- EXPECTED --\n%s" % (out_target_output)
            print "\n-- GOT --\n%s" % (in_parsed)

        parser.reset()
Beispiel #12
0
 def maak_wachtwoord1(self):
     self.nieuwwachtwoord1 = h.sha224(self.nieuwwachtwoord1.get())
     self.nieuwwachtwoord1 = self.nieuwwachtwoord1.digest()
     self.nieuwwachtwoord2 = h.sha224(self.nieuwwachtwoord2.get())
     self.nieuwwachtwoord2 = self.nieuwwachtwoord2.digest()
     if not self.oudwachtwoord == '0':
         self.oudwachtwoord2 = h.sha224(self.oudwachtwoord2.get())
         self.oudwachtwoord2 = self.oudwachtwoord2.digest()
         if not self.oudwachtwoord == self.oudwachtwoord2:
             tkMessageBox.showwarning("Verkeerd wachtwoord", "Het ingevoerde wachtwoord is incorrect!")
         elif self.nieuwwachtwoord1 == self.nieuwwachtwoord2:
             for lid in leden:
                 if lid.naam == root.gebruiker:
                     lid.wachtwoord = self.nieuwwachtwoord1
             tkMessageBox.showwarning("Voltooid", "Uw wachtwoord is veranderd!")
         else:
             tkMessageBox.showwarning("Verkeerd wachtwoord", "Het ingevoerde wachtwoord is incorrect!")
     else: 
         if self.nieuwwachtwoord1 == self.nieuwwachtwoord2:
             for lid in leden:
                 if lid.naam == root.gebruiker:
                     lid.wachtwoord = self.nieuwwachtwoord1
                     
             tkMessageBox.showwarning("Voltooid", "Uw wachtwoord is veranderd!")
         else:
             tkMessageBox.showwarning("Verkeerd wachtwoord", "Het ingevoerde wachtwoord is incorrect!")
             
     self.makewachtwoord.destroy()                          
Beispiel #13
0
Datei: Hash.py Projekt: GLP90/Xbb
    def __init__(self, sample, minCut = '1', subCut = None, branches = None, splitFilesChunkSize = -1, debug = False, inputPath=None):
        
        debug = debug or ('XBBDEBUG_HASH' in os.environ)
        # basic cut(+part) hash
        self.hashKey = '%s_%s' % (sample, minCut)
        if splitFilesChunkSize > 0:
            self.hashKey += '_split%d' % (splitFilesChunkSize)
        if inputPath:
            self.hashKey += '_from:%s'%inputPath.strip().strip('/')
        self.hash = hashlib.sha224(self.hashKey).hexdigest()

        # including subcut
        if subCut:
            if debug:
                print ('DEBUG: hash function debug:')
                print (' > \x1b[32mKEY:', self.hashKey, '\x1b[0m')
            self.hashKey = '%s_[%s]' % (self.hash, subCut)
            self.hash = hashlib.sha224(self.hashKey).hexdigest()

        # including branchnames
        if branches:
            if debug:
                print ('DEBUG: hash function debug:')
                print (' > \x1b[32mKEY:', self.hashKey, '\x1b[0m')
            branchNames = ','.join(sorted(branches))
            self.hashKey = '%s_<%s>' % (self.hash, branchNames)
            self.hash = hashlib.sha224(self.hashKey).hexdigest()

        if debug:
            print ('DEBUG: hash function debug:')
            print (' > \x1b[32mKEY:', self.hashKey, '\x1b[0m')
            print (' > \x1b[33mHASH:', self.hash, '\x1b[0m')
Beispiel #14
0
    def warehouse(self, brutefile, ifsplitTEXT="Cache"):
        if brutefile != "Default":
            with open(brutefile, "r") as brute:
                for i in brute.readlines():
                    if self.type == "md5":
                        storekey = "%s:%s" % (md5(i.strip()).hexdigest(), i.strip())
                    elif self.type == "sha1":
                        storekey = "%s:%s" % (sha1(i.strip()).hexdigest(), i.strip())
                    elif self.type == "sha224":
                        storekey = "%s:%s" % (sha224(i.strip()).hexdigest(), i.strip())
                    elif self.type == "sha256":
                        storekey = "%s:%s" % (sha256(i.strip()).hexdigest(), i.strip())
                    elif self.type == "sha384":
                        storekey = "%s:%s" % (sha384(i.strip()).hexdigest(), i.strip())
                    elif self.type == "sha512":
                        storekey = "%s:%s" % (sha512(i.strip()).hexdigest(), i.strip())
                    self.catch.append(storekey)
                self.bruteforce()

        else:
            for i in set(ifsplitTEXT):
                if self.type == "md5":
                    storekey = "%s:%s" % (md5(i.strip()).hexdigest(), i.strip())
                elif self.type == "sha1":
                    storekey = "%s:%s" % (sha1(i.strip()).hexdigest(), i.strip())
                elif self.type == "sha224":
                    storekey = "%s:%s" % (sha224(i.strip()).hexdigest(), i.strip())
                elif self.type == "sha256":
                    storekey = "%s:%s" % (sha256(i.strip()).hexdigest(), i.strip())
                elif self.type == "sha384":
                    storekey = "%s:%s" % (sha384(i.strip()).hexdigest(), i.strip())
                elif self.type == "sha512":
                    storekey = "%s:%s" % (sha512(i.strip()).hexdigest(), i.strip())
                self.catch.append(storekey)
            self.bruteforce()
Beispiel #15
0
def register(request):
    if not request.method == 'POST':
        return render_to_response('people/register.html', {'emailform': EmailForm() })

    maillist=List(name=settings.MAILLIST_NAME, password=settings.MAILLIST_PASSWORD, email=settings.MAILLIST_EMAIL, main_url=settings.MAILLIST_URL, encoding='iso-8859-1')
    if models.Person.objects.filter(email__exact=request.POST.get('email')):
        ok = True
    else:
        ok = False
        for mail in maillist.get_all_members():
            if request.POST.get('email') in mail:
                ok = True
                break

    if not ok:
        return render_to_response('people/checkuser.html', {'error': True})

    user = User.objects.create_user(request.POST.get('email'), request.POST.get('email'), User.objects.make_random_password(length=8))
    user.is_active = False
    user.save()

    salt = hashlib.sha224(str(random.random())).hexdigest()[:5]
    activation_key = hashlib.sha224(salt+user.username).hexdigest()
    key_expires = datetime.datetime.today() + datetime.timedelta(2)

    profile = Profile(user = user, activation_key = activation_key, key_expires = key_expires)
    profile.save()


    email_subject = 'Confirmacao s.hal.vu'	
    email_body = 'Clica no link pra confirmar: http://s.hal.vu/confirm/'+profile.activation_key
    send_mail(email_subject, email_body,'*****@*****.**',[user.email])
    return render_to_response('people/checkuser.html')
def makeHoodNTriplesAnnotation(ligand_uri_dict, aPdbId, aRadius):
	rm = ''
	#make a hood uri
	hood_uri = base_uri+'/lighood_resource:'+hashlib.sha224(str(aPdbId)+str(aRadius)+str(random.random())).hexdigest()
	#type the hood
	rm += "<"+hood_uri+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:ligand_neighbourhood> .\n"
	rm += "<"+base_uri+"/lighood_vocabulary:ligand_neighbourhood> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class>  .\n"
	rm += "<"+hood_uri+"> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
	#link it to the pdb structure
	rm += "<"+base_uri+"/pdb:"+aPdbId+"> <"+base_uri+"/lighood_vocabulary:has_neighborhood> <"+hood_uri+"> .\n"
	#add the radius 
	radius_uri = base_uri+'/lighood_resource:'+hashlib.sha224(str(aRadius)+str(random.random())).hexdigest()
	rm += "<"+hood_uri+"> <"+base_uri+"/lighood_vocabulary:has_attribute> <"+radius_uri+">. \n"
	rm += "<"+radius_uri+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:radius> .\n"
	rm += "<"+base_uri+"/lighood_vocabulary:radius> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class>  .\n"
	rm += "<"+radius_uri+"> <"+"<http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
	rm += "<"+radius_uri+"> <"+base_uri+"/lighood_vocabulary:has_value> \""+str(aRadius)+"\". \n"
	for (ligand_uri, res_uri) in ligand_uri_dict.items():
		#add ligand 
		rm += "<"+hood_uri+"> <"+base_uri+"/lighood_vocabulary:has_member> <"+ligand_uri+"> .\n"
		#type the ligand
		rm += "<"+ligand_uri+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:ligand> .\n"
		rm += "<"+base_uri+"/lighood_vocabulary:ligand> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class>  .\n"
		rm += "<"+ligand_uri+"> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"
		for aru in res_uri:
			#add parts
			rm += "<"+hood_uri+"> <"+base_uri+"/lighood_vocabulary:has_member> <"+aru+"> .\n"
			#link ligand to neighbors
			rm += "<"+ligand_uri+"> <"+base_uri+"/lighood_vocabulary:has_neighbor> <"+aru+"> .\n"
			#type the neighbors
			rm += "<"+aru+"> <"+rdf+"type> <"+base_uri+"/lighood_vocabulary:neighbor> .\n"
			rm += "<"+base_uri+"/lighood_vocabulary:neighbor> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2000/01/rdf-schema#Class>  .\n"
			rm += "<"+aru+"> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://www.w3.org/2002/07/owl#NamedIndividual> .\n"

	return rm
Beispiel #17
0
 def post(self):
   emailUsuario = self.session.get('email')
   if emailUsuario == None:
     self.redirect("/login")
   else:
    password = cgi.escape(self.request.get('password'),quote=True)
    passwordSHA = str(hashlib.sha224(password).hexdigest())
    usuario_cambiar = ndb.gql("SELECT * FROM User WHERE email = :1 AND password = :2", emailUsuario, passwordSHA)
         
    if usuario_cambiar.count() == 0:
       self.response.out.write(FORM_CAMBIAR_PASSWORD_HTML)
       self.response.out.write("<p>El password actual es incorrecto</p>")
    else:
       new_password1 = cgi.escape(self.request.get('new_password1'),quote=True)
       new_password2 = cgi.escape(self.request.get('new_password2'),quote=True)
       if(len(new_password1) < 6):
         self.response.out.write("--- La contrasena debe tener al menos 6 caracteres")
       else:  
           if new_password1 != new_password2:
            self.response.out.write(FORM_CAMBIAR_PASSWORD_HTML)
            self.response.out.write("<p>Los passwords nuevos no coinciden</p>")
           else:
             if password == new_password1:
                 self.response.out.write(FORM_CAMBIAR_PASSWORD_HTML)
                 self.response.out.write("<p>El nuevo password debe ser distinto al anterior</p>")
             else:
                 newPasswordSHA = str(hashlib.sha224(new_password1).hexdigest())
                 usuario_cambiar.get().password = newPasswordSHA
                 usuario_cambiar.get().put()
                 self.response.out.write(FORM_CAMBIAR_PASSWORD_HTML)
                 self.response.out.write("<p>El password se ha modificado correctamente!!</p>")       
Beispiel #18
0
    def add_original_file(self, filep):
        orig_dir = self.path + '/original'
        if isinstance(filep, str):
            filep = [filep]
        self.load_originals()
        hashs = {}
        for iorig in self.original_file:
            rf = open(iorig, 'r')
            hashs[iorig] = hashlib.sha224(rf.read()).hexdigest()
            rf.close()

        for ifile in filep:
            assert(_os.path.isfile(ifile))
            rf = open(ifile, 'r')
            hash_ifile = hashlib.sha224(rf.read()).hexdigest()

            if hash_ifile in hashs.values():
                continue

            if ifile not in self.original_file:
                if not _os.path.isdir(orig_dir):
                    _os.mkdir(orig_dir)

                if not _os.path.isfile(orig_dir + '/' + _os.path.basename(ifile)):
                    _shutil.copy2(ifile, orig_dir)
                else:
                    i = 0
                    while True:
                        newfile = ifile + '_' + str(i)
                        if not _os.path.isfile(orig_dir + '/' + _os.path.basename(newfile)):
                            _shutil.copy(ifile, orig_dir + '/'+_os.path.basename(newfile))
                            break
                        else:
                            i += 1
        self.load_originals()
Beispiel #19
0
def warehouse(brutefile, passwordtype, ifsplitTEXT="Cache"):
    catch = []
    if brutefile != "Default":
        if passwordtype == "md5":
            catch = [(md5(x.strip()).hexdigest() + ":" + x.strip()) for x in open(brutefile, "r").readlines()]
        elif passwordtype == "sha1":
            catch = [(sha1(x.strip()).hexdigest() + ":" + x.strip()) for x in open(brutefile, "r").readlines()]
        elif passwordtype == "sha224":
            catch = [(sha224(x.strip()).hexdigest() + ":" + x.strip()) for x in open(brutefile, "r").readlines()]
        elif passwordtype == "sha256":
            catch = [(sha256(x.strip()).hexdigest() + ":" + x.strip()) for x in open(brutefile, "r").readlines()]
        elif passwordtype == "sha384":
            catch = [(sha384(x.strip()).hexdigest() + ":" + x.strip()) for x in open(brutefile, "r").readlines()]
        elif passwordtype == "sha512":
            catch = [(sha512(x.strip()).hexdigest() + ":" + x.strip()) for x in open(brutefile, "r").readlines()]
        return catch

    else:
        if passwordtype == "md5":
            catch = [(md5(x.strip()).hexdigest() + ":" + x.strip()) for x in ifsplitTEXT]
        elif passwordtype == "sha1":
            catch = [(sha1(x.strip()).hexdigest() + ":" + x.strip()) for x in ifsplitTEXT]
        elif passwordtype == "sha224":
            catch = [(sha224(x.strip()).hexdigest() + ":" + x.strip()) for x in ifsplitTEXT]
        elif passwordtype == "sha256":
            catch = [(sha256(x.strip()).hexdigest() + ":" + x.strip()) for x in ifsplitTEXT]
        elif passwordtype == "sha384":
            catch = [(sha384(x.strip()).hexdigest() + ":" + x.strip()) for x in ifsplitTEXT]
        elif passwordtype == "sha512":
            catch = [(sha512(x.strip()).hexdigest() + ":" + x.strip()) for x in ifsplitTEXT]
        return catch
Beispiel #20
0
    def save(self):
        new_user = User.objects.create_user(self.cleaned_data['primary_contact_email'], self.cleaned_data['primary_contact_email'], self.cleaned_data.get('password'))
        new_user.first_name = self.cleaned_data['primary_contact_first_name']
        new_user.last_name = self.cleaned_data['primary_contact_last_name']
        new_user.is_active = False
        new_user.save()
        
        salt = str(random.random())
        hash_salt = hashlib.sha224(salt).hexdigest()
        activation_key = hashlib.sha224(hash_salt + new_user.username).hexdigest()[:32]
        key_expires = datetime.datetime.today() + datetime.timedelta(days=1)
        
        key_obj = ActivationKey(user=new_user,activation_key=activation_key,key_expires=key_expires)
        key_obj.save()

        new_profile = UserProfile(user=new_user,
                                  account_type=UserProfile.ACCOUNT_ORGANIZATION,
                                  organization_name=self.cleaned_data['business_name'],
                                  organization_address=self.cleaned_data['business_address'],
                                  organization_city=self.cleaned_data['business_city'],
                                  organization_state=self.cleaned_data['business_state'],
                                  organization_postalzip=self.cleaned_data['business_zip'],
                                  organization_phone=self.cleaned_data['business_phone']
                        )
            
        new_profile.save()
        print new_profile
        
        return new_user
Beispiel #21
0
	def classify(self, text_list):  #  takes in a str or list of email text, with no headers.
		if type(text_list) is str:
			text_list = [text_list]
		output, allHashes = "", []
		classifname = hashlib.sha224("".join(text_list)).hexdigest()[:16] + '.tmp'
		for text in text_list:
			p_text = " ".join(text.splitlines())
			p_hash = hashlib.sha224(text).hexdigest()
			allHashes.append(p_hash)
			output += p_hash + '\t' + p_text + os.linesep
		f = open(classifname, 'w')
		f.write(output)
		f.flush()
		os.fsync(f)
		f.close()
		self.currentcmd = self.cmd + classifname
		output = os.popen(self.currentcmd, 'r')
		out = {}
		for line in output:
			try:
				if line.split('\t')[0] in allHashes:
					hashval, classval, prob = line.strip().split('\t')
					out[hashval] = (classval, prob)
			except:
				pass
		output.close()
		os.remove(classifname)
		return out
Beispiel #22
0
    def next_token (self):      # returns next valid token, or None if no match
        if self.token:
            self.token = hashlib.sha224 (`settings.shared_secret` + self.token).hexdigest()
        else:
            self.token = hashlib.sha224 (self.hashed_secret + `random.random()`).hexdigest()

        return self.token
Beispiel #23
0
    def save(self):
        email = self.cleaned_data["email"]
        first_name = self.cleaned_data["first_name"]
        last_name = self.cleaned_data["last_name"]
        password = self.cleaned_data["password"]
        password_c = self.cleaned_data["password_c"]
        bio = self.cleaned_data["bio"]
        random_username = hashlib.sha224(email).hexdigest()[:30]
        activation_code = hashlib.sha224(email).hexdigest()[:50]
        
        user = User()
        user.username = random_username
        user.email = email
        user.first_name = first_name
        user.last_name = last_name
        user.is_active = False
        user.set_password(password)
        user.save()

        user_profile = UserProfile()
        user_profile.bio = bio
        user_profile.user = user
        user_profile.activation_code = activation_code
        user_profile.save()
        send_user_activation_mail.delay(activation_code, email)
Beispiel #24
0
def generate_hash():
    import hashlib as h
    entry = [[e['entry']['id'], str(e['entry']['datetime']), 
              e['entry']['title'],
              '|'.join([h.sha1(e['entry']['description']).hexdigest(),
                        h.md5(e['entry']['description']).hexdigest(),
                        h.sha224(e['entry']['description']).hexdigest(),
                        h.sha256(e['entry']['description']).hexdigest(),
                        h.sha384(e['entry']['description']).hexdigest(),
                        h.sha512(e['entry']['description']).hexdigest()])]
              for e in cynotedb(cynotedb.entry.id>0).select(cynotedb.entry.id, 
                  cynotedb.entry.title, cynotedb.entry.datetime, 
                  cynotedb.entry.description).records]
    comment = [[e['comment']['id'], str(e['comment']['datetime']), 
                e['comment']['entry_id'],
                '|'.join([h.sha1(e['comment']['body']).hexdigest(),
                          h.md5(e['comment']['body']).hexdigest(),
                          h.sha224(e['comment']['body']).hexdigest(),
                          h.sha256(e['comment']['body']).hexdigest(),
                          h.sha384(e['comment']['body']).hexdigest(),
                          h.sha512(e['comment']['body']).hexdigest()])]
                for e in cynotedb(cynotedb.comment.id>0).select(cynotedb.comment.id, 
                    cynotedb.comment.entry_id, cynotedb.comment.datetime, 
                    cynotedb.comment.body).records]
    for e in entry:
        db.entry_hash.insert(eid=e[0], edatetime=e[1], etitle=e[2], ehash=e[3])
    for c in comment:
        db.comment_hash.insert(cid=c[0], cdatetime=c[1], eid=c[2], chash=c[3])
    db.log.insert(event='Entry hash generation. n=' + str(len(entry)), 
                  user=session.username)
    db.log.insert(event='Comment hash generation. n=' + str(len(comment)), 
                  user=session.username)
    return dict(entry=entry, comment=comment)
Beispiel #25
0
def classify(text):
	if not text:
		return None
	batch, hashes = {}, {}
	if type(text) is str:
		text = [text]
	text_prob = text
	# Compute hashes / keys
	# Process the overrides first
	for t in text:
		p_hash = hashlib.sha224(t).hexdigest()
		hashes[p_hash] = t
		over = overrides(t)
		if over:
			batch[p_hash] = over
			text_prob.remove(t)
	# Call the Maxent classifier on the remaining items
	if text_prob:
		maxent = Pycla('419classifier.ser.gz')
		result = maxent.classify(text_prob)
		failed = []
		for elem in result:
			if result[elem][1] > 0.4:
				batch[elem] = result[elem][0]
			else:
				failed.append(hashes[elem])
		# Attempt fallback on low probability classifications
		# Return None for unsuccessful.
		for t in failed:
			p_hash = hashlib.sha224(t).hexdigest()
			batch[p_hash] = fallback(t)
	return batch
Beispiel #26
0
Datei: hadd.py Projekt: GLP90/Xbb
    def __init__(self, fileNames, chunkNumber, submitTime='000000_000000', force=False, config=None, sampleIdentifier=None, inputDir=None, outputDir=None):
        self.fileNames = fileNames
        self.debug = 'XBBDEBUG' in os.environ
        self.submitTime = submitTime
        self.chunkNumber = chunkNumber
        self.config = config
        self.fileLocator = FileLocator(config=self.config)
        # -O option (reoptimizing baskets) leads to crashes...
        self.commandTemplate = "hadd -k  -ff {output} {inputs}"
        self.sampleIdentifier = sampleIdentifier
        self.force = force
        
        # use sampleTree class as replacement for hadd
        # this way baskets will be also optimized and unused branches can be stripped off
        self.useChain = True

        self.inputDir  = self.config.get('Directories', inputDir if inputDir else 'HADDin')
        self.outputDir = self.config.get('Directories', outputDir if outputDir else 'HADDout')
        self.scratchDir = self.config.get('Directories','scratch')

        treeHashes = []
        for fileName in self.fileNames: 
            treeHashes.append(hashlib.sha224(fileName).hexdigest())
        totalHash = hashlib.sha224('-'.join(sorted(treeHashes))).hexdigest()
        self.mergedFileName = '/'.join(self.fileNames[0].split('/')[:-4]) + '/' + totalHash + '/' + self.submitTime + '/0000/tree_%d.root'%chunkNumber
Beispiel #27
0
def upload():

    type_request = request.form['Type']

    if type_request == "Internet":

        imei = request.form['IMEI']
        latitude = request.form['Latitude']
        longitude = request.form['Longitude']
        description = request.form['Description']
        number = request.form['Number']
        time = request.form['Time']
        address_json = request.form['Address']
        address = json.loads(address_json)
        country = address["Country"]
        area = address["Administrative Area"]
        locality = address["Locality"]
        id = hashlib.sha224(imei + time).hexdigest()
 
        report = Report(type_request, imei, description, number, time, country, area, locality, latitude, longitude)

        try:

            db.session.add(report)
            db.session.commit() 

        except Exception:

            # exc_type, exc_value, exc_traceback = sys.exc_info()
            # lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
            # print ''.join('!! ' + line for line in lines)

            print "Error in uploading data, rolling back session"
            db.session.rollback()

    elif type_request == "SMS":
        print "reached here"
        country = request.form['Country']
        print "reached here"
        description = request.form['Description']
        print "reached here"
        time = request.form['Time']
        print "reached here"
        imei = request.form['IMEI']
        print "reached here"
        number = request.form['Number']
        print "reached here"
        id = hashlib.sha224(imei + time).hexdigest()
        print "reached here"
        report = Report(type_request, imei, description, number, time, country)
        print "reached here"
        try:
            db.session.add(report)
            db.session.commit() 
        except Exception:
            print "Error in uploading data, rolling back session"
            db.session.rollback()


    return jsonify("")
Beispiel #28
0
def cache_redis(sql, tupla=(), name_key='sql_cache', TTL=3600):  # 1hora
    global pool
    R_SERVER = redis.Redis(connection_pool=pool)
    # Create a hash key
    if len(tupla) == 0:
        hash = hashlib.sha224(sql).hexdigest()
    else:  # Se agrego la tupla para distingir el query por parametro
        hash = hashlib.sha224(sql + str(tupla)).hexdigest()
    key = name_key + ":" + hash

    # Check if data is in cache.
    if (R_SERVER.get(key)):
        data = cPickle.loads(R_SERVER.get(key))
        return {'status': 'OK', 'data': tratar_resultado_list_o_dict(data)}
    else:
        # Do MSSQL query
        try:
            conn = sqlconn_dic()
            cur = sqlcursor(conn)
            sqlexec(cur, sql, tupla)
            data = sqlfechall(cur)
            conn.close()

            # Put data into cache for 1 hour
            R_SERVER.set(key, cPickle.dumps(data))
            R_SERVER.expire(key, TTL);
            data = cPickle.loads(R_SERVER.get(key))
            return {'status': 'OK', 'data': tratar_resultado_list_o_dict(data)}
        except:
            return {'status': 'error', 'message': 'Error connectando a SQLServer.'}
    pool.release(R_SERVER)
Beispiel #29
0
    def login(self,username,password):
        db = MySQLdb.connect(host,sql_server_user ,sql_server_password,database)
        cursor = db.cursor()
        print "::::::::::::::: db connected :::::::::::::::"

        # sql = """CREATE TABLE IF NOT EXISTS users(uid VARCHAR(40) PRIMARY KEY,pw VARCHAR(255) NOT NULL,stat BOOLEAN NOT NULL DEFAULT FALSE);INSERT INTO users (uid,pw,stat) VALUES (admin,%s,1) ;""" %

        self.username = username;
        self.password = password;
        sql = "SELECT * FROM blueid.users WHERE uid='%s';" % username

        try:
            cursor.execute(sql)
            results = cursor.fetchall()
            for row in results:
                uid = row[0]
                pwd = row[1]
                stat = row[2]

            hashy = uid + pwd + str(randint(0, 999))
            print "this is the session key: " + hashlib.sha224(hashy).hexdigest()
            return hashlib.sha224(hashy).hexdigest()

        except:
           print "Username not existent or data missing"
        db.close()
Beispiel #30
0
 def maak_wachtwoord(self, event=None):
     if root.gebruiker.wachtwoord:
         self.oudwachtwoord = h.sha224(self.wachtwoord.get())
         self.oudwachtwoord = self.oudwachtwoord.digest()
         if not root.gebruiker.wachtwoord == self.oudwachtwoord:
             tkMessageBox.showwarning("Verkeerd wachtwoord", "Het ingevoerde wachtwoord is incorrect!")
             return
         if not (self.nieuwwachtwoord1.get() and self.nieuwwachtwoord2.get()):
             for lid in leden:
                 if lid.naam == root.gebruiker.naam:
                     lid.wachtwoord = ""
                     root.gebruiker.wachtwoord = lid.wachtwoord
             tkMessageBox.showwarning("Voltooid", "Uw wachtwoord is verwijderd!")
             root.show_frame(StreepScherm)
     if self.nieuwwachtwoord1.get() or self.nieuwwachtwoord2.get():
         if self.nieuwwachtwoord1.get() == self.nieuwwachtwoord2.get():
             self.nieuwwachtwoord = h.sha224(self.nieuwwachtwoord1.get())
             self.nieuwwachtwoord = self.nieuwwachtwoord.digest()
             for lid in leden:
                 if lid.naam == root.gebruiker.naam:
                     lid.wachtwoord = self.nieuwwachtwoord
                     root.gebruiker.wachtwoord = lid.wachtwoord
             tkMessageBox.showwarning("Voltooid", "Uw wachtwoord is veranderd!")
             root.show_frame(StreepScherm)
         else:
             tkMessageBox.showwarning("Verkeerd wachtwoord", "De ingevoerde wachtwoorden komen niet overeen!")
Beispiel #31
0
def get_routing_table_version():
    return hashlib.sha224(
        os.environ.get('ODAHUB_RP_PATTERN',
                       'built-in').encode('utf-8')).hexdigest()[:16]
Beispiel #32
0
def get_token():
    token = hashlib.sha224(pin.encode('utf-8'))
    return token.hexdigest()
 def get_hash_for_request(self, url_path, data):
     combined_data = {'url': url_path, 'data': data}
     return hashlib.sha224(str(combined_data).encode('utf-8')).hexdigest()
Beispiel #34
0
def hash_password(password):
    return hashlib.sha224(str(password).encode("ascii")).hexdigest()
Beispiel #35
0
def make_hash(s):
    s += settings.SECRET_KEY
    return hashlib.sha224(s.encode()).hexdigest()[:8]
Beispiel #36
0
def fingerprint(unpacked_data_stream):
    return _hashlib.sha224(unpacked_data_stream).hexdigest()
Beispiel #37
0
def miner(q, privatekey_readable, public_key_hashed, address):
    from Cryptodome.PublicKey import RSA
    Random.atfork()
    rndfile = Random.new()
    tries = 0
    firstrun = True
    begin = time.time()
    key = RSA.importKey(privatekey_readable)



    if pool_conf == 1:
        #do not use pools public key to sign, signature will be invalid

        self_address = address
        address = pool_address

        #ask for diff percentage
        s_pool = socks.socksocket()
        s_pool.settimeout(0.3)
        if tor_conf == 1:
            s_pool.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
        s_pool.connect((pool_ip_conf, 8525))  # connect to pool
        print("Connected")

        print("Miner: Asking pool for share qualification difficulty requirement")
        connections.send(s_pool, "diffp", 10)
        pool_diff_percentage = int(connections.receive(s_pool, 10))
        print("Miner: Received pool for share qualification difficulty requirement: {}%".format(pool_diff_percentage))
        s_pool.close()
        #ask for diff percentage

    while True:
        try:

            # calculate new hash
            nonces = 0
            # calculate difficulty
            s_node = socks.socksocket()
            if tor_conf == 1:
                s_node.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
            s_node.connect((node_ip_conf, int(port)))  # connect to local node

            connections.send(s_node, "blocklast", 10)
            blocklast = connections.receive(s_node, 10)
            db_block_hash = blocklast[7]

            connections.send(s_node, "diffget", 10)
            diff = connections.receive(s_node, 10)
            s_node.close()

            diff = int(diff[1])

            diff_real = int(diff)

            if pool_conf == 0:
                diff = int(diff)


            else:  # if pooled
                diff_pool = diff_real
                diff = percentage(pool_diff_percentage, diff_real)

                if diff > diff_pool:
                    diff = diff_pool

            mining_condition = bin_convert(db_block_hash)[0:diff]


            # block_hash = hashlib.sha224(str(block_send) + db_block_hash).hexdigest()


            while tries < diff_recalc_conf:
                start = time.time()

                nonce = hashlib.sha224(rndfile.read(16)).hexdigest()[:32]
                mining_hash = bin_convert(hashlib.sha224((address + nonce + db_block_hash).encode("utf-8")).hexdigest())

                end = time.time()
                if tries % 2500 == 0: #limit output
                    try:
                        cycles_per_second = 1/(end - start)
                        print("Thread{} {} @ {:.2f} cycles/second, difficulty: {}({}), iteration: {}".format(q, db_block_hash[:10], cycles_per_second, diff, diff_real, tries))
                    except:
                        pass
                tries = tries + 1

                if mining_condition in mining_hash:
                    tries = 0

                    print("Thread {} found a good block hash in {} cycles".format(q, tries))

                    # serialize txs

                    block_send = []
                    del block_send[:]  # empty
                    removal_signature = []
                    del removal_signature[:]  # empty

                    s_node = socks.socksocket()
                    if tor_conf == 1:
                        s_node.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
                    s_node.connect((node_ip_conf, int(port)))  # connect to config.txt node
                    connections.send(s_node, "mpget", 10)
                    data = connections.receive(s_node, 10)
                    s_node.close()

                    if data != "[]":
                        mempool = data

                        for mpdata in mempool:
                            transaction = (
                                str(mpdata[0]), str(mpdata[1][:56]), str(mpdata[2][:56]), '%.8f' % float(mpdata[3]), str(mpdata[4]), str(mpdata[5]), str(mpdata[6]),
                                str(mpdata[7]))  # create tuple
                            # print transaction
                            block_send.append(transaction)  # append tuple to list for each run
                            removal_signature.append(str(mpdata[4]))  # for removal after successful mining

                    # claim reward
                    block_timestamp = '%.2f' % time.time()
                    transaction_reward = (str(block_timestamp), str(address[:56]), str(address[:56]), '%.8f' % float(0), "0", str(nonce))  # only this part is signed!
                    # print transaction_reward

                    h = SHA.new(str(transaction_reward).encode("utf-8"))
                    signer = PKCS1_v1_5.new(key)
                    signature = signer.sign(h)
                    signature_enc = base64.b64encode(signature)

                    if signer.verify(h, signature):
                        print("Signature valid")

                        block_send.append((str(block_timestamp), str(address[:56]), str(address[:56]), '%.8f' % float(0), str(signature_enc.decode("utf-8")), str(public_key_hashed.decode("utf-8")), "0", str(nonce)))  # mining reward tx
                        print("Block to send: {}".format(block_send))

                        if not any(isinstance(el, list) for el in block_send):  # if it's not a list of lists (only the mining tx and no others)
                            new_list = []
                            new_list.append(block_send)
                            block_send = new_list  # make it a list of lists

                        #  claim reward
                        # include data

                        tries = 0

                        # submit mined block to node

                        if sync_conf == 1:
                            check_uptodate(300)

                        if pool_conf == 1:
                            mining_condition = bin_convert(db_block_hash)[0:diff_real]
                            if mining_condition in mining_hash:
                                print("Miner: Submitting block to all nodes, because it satisfies real difficulty too")
                                nodes_block_submit(block_send)

                            try:
                                s_pool = socks.socksocket()
                                s_pool.settimeout(0.3)
                                if tor_conf == 1:
                                    s_pool.setproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050)
                                s_pool.connect((pool_ip_conf, 8525))  # connect to pool
                                print("Connected")

                                print("Miner: Proceeding to submit mined block to pool")

                                connections.send(s_pool, "block", 10)
                                connections.send(s_pool, self_address, 10)
                                connections.send(s_pool, block_send, 10)
                                s_pool.close()

                                print("Miner: Block submitted to pool")

                            except Exception as e:
                                print("Miner: Could not submit block to pool")
                                pass

                        if pool_conf == 0:
                            nodes_block_submit(block_send)
                    else:
                        print("Invalid signature")
            tries = 0

        except Exception as e:
            print(e)
            time.sleep(0.1)
            if debug_conf == 1:
                raise
            else:
                pass
Beispiel #38
0
def generate_hash_key(salt, random_str_size=5):
    random_str = random_key(random_str_size)
    text = random_str + salt
    return hashlib.sha224(text.encode('utf-8')).hexdigest()
Beispiel #39
0
     3. SHA224
     4. SHA256
     5. SHA384
     6. SHA512
     0. All the Above
 """)
 scheme_hash = int(input(" Kindly type [1 - 6]: "))
 if scheme_hash == 1:
     print("The MD5 Hash for", string_to_hash, "is",
           md5(str.encode(string_to_hash)).hexdigest())
 elif scheme_hash == 2:
     print("The SHA1 Hash for", string_to_hash, "is",
           sha1(str.encode(string_to_hash)).hexdigest())
 elif scheme_hash == 3:
     print("The SHA224 Hash for", string_to_hash, "is",
           sha224(str.encode(string_to_hash)).hexdigest())
 elif scheme_hash == 4:
     print("The SHA256 Hash for", string_to_hash, "is",
           sha256(str.encode(string_to_hash)).hexdigest())
 elif scheme_hash == 5:
     print("The SHA384 Hash for", string_to_hash, "is",
           sha384(str.encode(string_to_hash)).hexdigest())
 elif scheme_hash == 6:
     print("The SHA512 Hash for", string_to_hash, "is",
           sha512(str.encode(string_to_hash)).hexdigest())
 elif scheme_hash == 0:
     print("The MD5 Hash for", string_to_hash, "is",
           md5(str.encode(string_to_hash)).hexdigest())
     print("The SHA1 Hash for", string_to_hash, "is",
           sha1(str.encode(string_to_hash)).hexdigest())
     print("The SHA224 Hash for", string_to_hash, "is",
Beispiel #40
0
    def compute_updates(self, lid, private, msg):
        """Determine what needs to be sent to the archiver.

        :param lid: The list id
        :param msg: The message object.

        :return None if the message could not be parsed
        """

        ojson = None
        if not lid:
            lid = normalize_lid(msg.get('list-id'))
        if self.cropout:
            crops = self.cropout.split(" ")
            # Regex replace?
            if len(crops) == 2:
                lid = re.sub(crops[0], crops[1], lid)
            # Standard crop out?
            else:
                lid = lid.replace(self.cropout, "")

        defaultEmptyString = lambda value: value and str(value) or ""
        msg_metadata = dict([(k, defaultEmptyString(msg.get(k)))
                             for k in self.keys])
        mid = hashlib.sha224(
            str("%s-%s" % (lid, msg_metadata['archived-at'])).encode(
                'utf-8')).hexdigest() + "@" + (lid if lid else "none")
        for key in ['to', 'from', 'subject', 'message-id']:
            try:
                hval = ""
                if msg_metadata.get(key):
                    for t in email.header.decode_header(msg_metadata[key]):
                        if t[1] == None or t[1].find("8bit") != -1:
                            hval += t[0].decode('utf-8') if type(
                                t[0]) is bytes else t[0]
                        else:
                            hval += t[0].decode(t[1], errors='ignore')
                    msg_metadata[key] = hval
            except Exception as err:
                print("Could not decode headers, ignoring..: %s" % err)
        mdate = None
        try:
            mdate = email.utils.parsedate_tz(msg_metadata.get('date'))
        except:
            pass
        if not mdate and msg_metadata.get('archived-at'):
            mdate = email.utils.parsedate_tz(msg_metadata.get('archived-at'))
        elif not mdate:
            print("Date (%s) seems totally wrong, setting to _now_ instead." %
                  mdate)
            mdate = time.gmtime()  # Get a standard 9-tuple
            mdate = mdate + (0, )  # Fake a TZ (10th element)

        # mdate calculations are all done, prepare the index entry
        epoch = email.utils.mktime_tz(mdate)
        mdatestring = time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(epoch))
        body = self.msgbody(msg)
        try:
            if 'content-type' in msg_metadata and msg_metadata[
                    'content-type'].find("flowed") != -1:
                body = convertToWrapped(body, character_set="utf-8")
            if isinstance(body, str):
                body = body.encode('utf-8')
        except Exception as err:
            try:
                body = body.decode(chardet.detect(body)['encoding'])
            except Exception as err:
                try:
                    body = body.decode('latin-1')
                except:
                    try:
                        if isinstance(body, str):
                            body = body.encode('utf-8')
                    except:
                        body = None

        attachments, contents = self.msgfiles(msg)
        irt = ""
        if body is not None or attachments:
            pmid = mid
            try:
                if archiver_generator == "full":
                    mid = generators.full(msg, body, lid, attachments)
                elif archiver_generator == "medium":
                    mid = generators.medium(msg, body, lid, attachments)
                elif archiver_generator == "cluster":
                    mid = generators.cluster(msg, body, lid, attachments)
                else:
                    mid = generators.legacy(msg, body, lid, attachments)
            except Exception as err:
                if logger:
                    logger.warning("Could not generate MID: %s", err)
                mid = pmid
            if 'in-reply-to' in msg_metadata:
                try:
                    try:
                        irt = "".join(msg_metadata['in-reply-to'])
                    except:
                        irt = msg_metadata.get('in-reply-to').__str__()
                except:
                    irt = ""
            ojson = {
                'from_raw':
                msg_metadata['from'],
                'from':
                msg_metadata['from'],
                'to':
                msg_metadata['to'],
                'subject':
                msg_metadata['subject'],
                'message-id':
                msg_metadata['message-id'],
                'mid':
                mid,
                'cc':
                msg_metadata.get('cc'),
                'epoch':
                epoch,
                'list':
                lid,
                'list_raw':
                lid,
                'date':
                mdatestring,
                'private':
                private,
                'references':
                msg_metadata['references'],
                'in-reply-to':
                irt,
                'body':
                body.decode('utf-8', 'replace')
                if type(body) is bytes else body,
                'attachments':
                attachments
            }

        return ojson, contents, msg_metadata, irt
Beispiel #41
0
            print 'Cannot get RCRemoteProxy property.'
            status = 1

    except:
        traceback.print_exc()
        status = 1

    if status == 0:
        print argv
        host = argv[1]
        yakuakeTabName = argv[2]
        path = argv[3]
        binary = argv[4]
        arguments = argv[5:]
        print 'path', path
        print 'binary', binary
        print 'arguments', arguments
        randomStuff = random_stuff_generator()
        password = getPassFor(host)
        hashedPassword = hashlib.sha224(randomStuff + password).hexdigest()
        if rcremote_proxy.run(randomStuff, hashedPassword, path, binary,
                              arguments, yakuakeTabName):
            print 'ok'
            sys.exit(0)
        else:
            print 'error'
            sys.exit(-1)
    else:
        print 'probleemm'
        sys.exit(-1)
Beispiel #42
0
    set_host_port_task, register_user_decorator, \
    confirmation_header, get_admin_csrf_token, \
    admin_csrf_token_mutex
from common import app


host = '0.0.0.0'
port = 8893
set_host_port_task(host, port, 3)


usernames = {} # From user_id to username
usernames_mutex = Lock()


calc_hash = lambda x: sha224(x.encode('utf-8')).hexdigest()


@app.route('/3', methods=['GET', 'POST'])
@register_user_decorator
def one():
    if flask.request.method == 'POST':
        with usernames_mutex:
            usernames[flask.request.cookies['user_id']] = flask.request.form['username']
    elif flask.request.cookies['user_id'] not in usernames.keys():
        return '<form method="POST"><p>Необходимо зарегистрироваться. Внимание! Крайне рекомендуем сделать ваше имя пользователя вашими именем и фамилией или какой-то кличкой или чем-то подобным, на английском языке, одним словом, но где имена собственные будут начинаиться с заглавных букв. Например, если вас зовут Никола Игрек, рекомендуем выбрать в качестве ника NikolaIgrek</p><p><input name="username" placeholder="Имя пользователя"></p><p><input type="submit" value="Зарегистрироваться"></p></form>'

    if solvers.get(flask.request.cookies.get('user_id'), False):
        return 'Вокруг начала играть музыка. Она то приближалась, то отдалялась, но в какой-то момент начала играть как будто в твоей голове. Ты пытался оглядываться, чтобы найти источник, но вокруг видел только черноту. За всем этим последовала яркая вспышка<br><br>Глава прочитана'
    return flask.render_template('3.html')
Beispiel #43
0
#   Service Identifiers
#

DEVICE_TO_DEVICE_SERVICE_ID = 0x01
ROUTER_TO_ROUTER_SERVICE_ID = 0x02
PROXY_SERVICE_ID = 0x03
LANE_SERVICE_ID = 0x04
CLIENT_SERVER_SERVICE_ID = 0x05

#
#   Hash Functions
#

_md5 = lambda x: hashlib.md5(x).digest()
_sha1 = lambda x: hashlib.sha1(x).digest()
_sha224 = lambda x: hashlib.sha224(x).digest()
_sha256 = lambda x: hashlib.sha256(x).digest()
_sha384 = lambda x: hashlib.sha384(x).digest()
_sha512 = lambda x: hashlib.sha512(x).digest()

hash_functions = (_md5, _sha1, _sha224, _sha256, _sha384, _sha512)

#
#   Result Codes
#

SUCCESS = 0
NO_DEVICE_TO_DEVICE_SERVICE = 1
NO_ROUTER_TO_ROUTER_SERVICE = 2
NO_PROXY_SERVICE = 3
NO_LANE_SERVICE = 4
Beispiel #44
0
def createTest(text):
    definition = {}

    tokens = list(parseParameters(text[len('processing.run('):-1]))
    cmdname = tokens[0]
    alg = QgsApplication.processingRegistry().algorithmById(cmdname)

    definition['name'] = 'Test ({})'.format(cmdname)
    definition['algorithm'] = cmdname

    params = {}
    results = {}

    i = 0
    for param in alg.parameters:
        if param.hidden:
            continue

        i += 1
        token = tokens[i]
        # Handle empty parameters that are optionals
        if param.optional and token is None:
            continue

        if isinstance(param, ParameterVector):
            schema, filepath = extractSchemaPath(token)
            p = {
                'type': 'vector',
                'name': filepath
            }
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name] = p
        elif isinstance(param, ParameterRaster):
            schema, filepath = extractSchemaPath(token)
            p = {
                'type': 'raster',
                'name': filepath
            }
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name] = p
        elif isinstance(param, ParameterTable):
            schema, filepath = extractSchemaPath(token)
            p = {
                'type': 'table',
                'name': filepath
            }
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name] = p
        elif isinstance(param, ParameterMultipleInput):
            multiparams = token.split(';')
            newparam = []

            # Handle datatype detection
            dataType = param.dataType()
            if dataType in ['points', 'lines', 'polygons', 'any vectors']:
                dataType = 'vector'
            else:
                dataType = 'raster'

            for mp in multiparams:
                schema, filepath = extractSchemaPath(mp)
                newparam.append({
                    'type': dataType,
                    'name': filepath
                })
            p = {
                'type': 'multi',
                'params': newparam
            }
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name] = p
        elif isinstance(param, ParameterFile):
            schema, filepath = extractSchemaPath(token)
            p = {
                'type': 'file',
                'name': filepath
            }
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name] = p
        elif isinstance(param, ParameterString):
            params[param.name] = token
        elif isinstance(param, ParameterBoolean):
            params[param.name] = token
        elif isinstance(param, ParameterNumber):
            if param.isInteger:
                params[param.name] = int(token)
            else:
                params[param.name] = float(token)
        else:
            if token[0] == '"':
                token = token[1:]
            if token[-1] == '"':
                token = token[:-1]
            params[param.name] = token

    definition['params'] = params

    for i, out in enumerate([out for out in alg.outputs if not out.hidden]):
        token = tokens[i - alg.getVisibleOutputsCount()]

        if isinstance(out, (OutputNumber, OutputString)):
            results[out.name] = str(out)
        elif isinstance(out, OutputRaster):
            if token is None:
                QMessageBox.warning(None,
                                    tr('Error'),
                                    tr('Seems some outputs are temporary '
                                       'files. To create test you need to '
                                       'redirect all algorithm outputs to '
                                       'files'))
                return

            dataset = gdal.Open(token, GA_ReadOnly)
            dataArray = nan_to_num(dataset.ReadAsArray(0))
            strhash = hashlib.sha224(dataArray.data).hexdigest()

            results[out.name] = {
                'type': 'rasterhash',
                'hash': strhash
            }
        elif isinstance(out, OutputVector):
            schema, filepath = extractSchemaPath(token)
            results[out.name] = {
                'type': 'vector',
                'name': filepath
            }
            if not schema:
                results[out.name]['location'] = '[The expected result data is not in the testdata directory. Please write it to processing/tests/testdata/expected. Prefer gml files.]'
        elif isinstance(out, OutputHTML) or isinstance(out, OutputFile):
            schema, filepath = extractSchemaPath(token)
            results[out.name] = {
                'type': 'file',
                'name': filepath
            }
            if not schema:
                results[out.name]['location'] = '[The expected result file is not in the testdata directory. Please redirect the output to processing/tests/testdata/expected.]'

    definition['results'] = results
    dlg = ShowTestDialog(yaml.dump([definition], default_flow_style=False))
    dlg.exec_()
Beispiel #45
0
import hashlib

m = hashlib.sha256()
m.update(b"Nobody inspects")
m.update(b" the spammish repetition")
print(m.digest())

print(hashlib.sha224(b"Nobody inspects the spammish repetition").hexdigest())

Beispiel #46
0
    def action_invoice_create(self,
                              cursor,
                              user,
                              ids,
                              journal_id=False,
                              group=False,
                              type='out_invoice',
                              context=None):
        invoice_dict = super(stock_picking,
                             self).action_invoice_create(cursor,
                                                         user,
                                                         ids,
                                                         journal_id,
                                                         group,
                                                         type,
                                                         context=context)

        for picking_key in invoice_dict:
            invoice = self.pool.get('account.invoice').browse(
                cursor, user, invoice_dict[picking_key], context=context)
            if not invoice.company_id.is_group_invoice_line:
                continue

            new_line_list = {}

            for line in invoice.invoice_line:

                # Build a key
                key = unicode(line.product_id.id) + ";" \
                    + unicode(line.discount) + ";" \
                    + unicode(line.price_unit) + ";" \
                    + line.name + ";"

                # Add the tax key part
                tax_tab = []
                for tax in line.invoice_line_tax_id:
                    tax_tab.append(tax.id)
                tax_tab.sort()
                for tax in tax_tab:
                    key = key + unicode(tax) + ";"

                # Add the sale order line part but check if the field exist because
                # it's install by a specific module (not from addons)
                if self.pool.get('ir.model.fields').search(
                        cursor,
                        user, [('name', '=', 'sale_order_lines'),
                               ('model', '=', 'account.invoice.line')],
                        context=context) != []:
                    order_line_tab = []
                    for order_line in line.sale_order_lines:
                        order_line_tab.append(order_line.id)
                    order_line_tab.sort()
                    for order_line in order_line_tab:
                        key = key + unicode(order_line) + ";"

                # Get the hash of the key
                hash_key = hashlib.sha224(key.encode('utf8')).hexdigest()

                # if the key doesn't already exist, we keep the invoice line
                # and we add the key to new_line_list
                if not new_line_list.has_key(hash_key):
                    new_line_list[hash_key] = {
                        'id': line.id,
                        'quantity': line.quantity,
                        'price_subtotal': line.price_subtotal,
                    }
                # if the key already exist, we update new_line_list and
                # we delete the invoice line
                else:
                    new_line_list[hash_key]['quantity'] = new_line_list[
                        hash_key]['quantity'] + line.quantity
                    new_line_list[hash_key]['price_subtotal'] = new_line_list[hash_key]['price_subtotal'] \
                                                            +  line.price_subtotal
                    self.pool.get('account.invoice.line').unlink(
                        cursor, user, line.id, context=context)

            # Write modifications made on invoice lines
            for hash_key in new_line_list:
                line_id = new_line_list[hash_key]['id']
                del new_line_list[hash_key]['id']
                self.pool.get('account.invoice.line').write(
                    cursor,
                    user,
                    line_id,
                    new_line_list[hash_key],
                    context=context)

        return invoice_dict
def compute_password_digest(message):
    """Helper method to compute the message digest for the given string.
    """
    return hashlib.sha224(message).hexdigest()
Beispiel #48
0
def createTest(text):
    definition = {}

    alg_id, parameters = splitAlgIdAndParameters(text)

    alg = QgsApplication.processingRegistry().createAlgorithmById(alg_id)

    definition['name'] = 'Test ({})'.format(alg_id)
    definition['algorithm'] = alg_id

    params = {}
    results = {}

    i = 0
    for param in alg.parameterDefinitions():
        if param.flags(
        ) & QgsProcessingParameterDefinition.FlagHidden or param.isDestination(
        ):
            continue

        if not param.name() in parameters:
            continue

        i += 1
        token = parameters[param.name()]
        # Handle empty parameters that are optionals
        if param.flags(
        ) & QgsProcessingParameterDefinition.FlagOptional and token is None:
            continue

        if isinstance(param, (QgsProcessingParameterVectorLayer,
                              QgsProcessingParameterFeatureSource)):
            schema, filepath = extractSchemaPath(token)
            p = {'type': 'vector', 'name': filepath}
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name()] = p
        elif isinstance(param, QgsProcessingParameterRasterLayer):
            schema, filepath = extractSchemaPath(token)
            p = {'type': 'raster', 'name': filepath}
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name()] = p
        elif isinstance(param, QgsProcessingParameterMultipleLayers):
            multiparams = token
            newparam = []

            # Handle datatype detection
            dataType = param.layerType()
            if dataType in [
                    QgsProcessing.TypeVectorAnyGeometry,
                    QgsProcessing.TypeVectorPoint,
                    QgsProcessing.TypeVectorLine,
                    QgsProcessing.TypeVectorPolygon, QgsProcessing.TypeVector
            ]:
                dataType = 'vector'
            else:
                dataType = 'raster'

            schema = None
            for mp in multiparams:
                schema, filepath = extractSchemaPath(mp)
                newparam.append({'type': dataType, 'name': filepath})
            p = {'type': 'multi', 'params': newparam}
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name()] = p
        elif isinstance(param, QgsProcessingParameterFile):
            schema, filepath = extractSchemaPath(token)
            p = {'type': 'file', 'name': filepath}
            if not schema:
                p['location'] = '[The source data is not in the testdata directory. Please use data in the processing/tests/testdata folder.]'

            params[param.name()] = p
        elif isinstance(param, QgsProcessingParameterString):
            params[param.name()] = token
        elif isinstance(param, QgsProcessingParameterBoolean):
            params[param.name()] = token
        elif isinstance(
                param,
            (QgsProcessingParameterNumber, QgsProcessingParameterDistance)):
            if param.dataType() == QgsProcessingParameterNumber.Integer:
                params[param.name()] = int(token)
            else:
                params[param.name()] = float(token)
        elif isinstance(param, QgsProcessingParameterEnum):
            if isinstance(token, list):
                params[param.name()] = [int(t) for t in token]
            else:
                params[param.name()] = int(token)
        elif isinstance(param, QgsProcessingParameterBand):
            params[param.name()] = int(token)
        elif token:
            if token[0] == '"':
                token = token[1:]
            if token[-1] == '"':
                token = token[:-1]
            params[param.name()] = token

    definition['params'] = params

    for i, out in enumerate([
            out for out in alg.destinationParameterDefinitions()
            if not out.flags() & QgsProcessingParameterDefinition.FlagHidden
    ]):
        if not out.name() in parameters:
            continue

        token = parameters[out.name()]

        if isinstance(out, QgsProcessingParameterRasterDestination):
            if token is None:
                QMessageBox.warning(
                    None, tr('Error'),
                    tr('Seems some outputs are temporary '
                       'files. To create test you need to '
                       'redirect all algorithm outputs to '
                       'files'))
                return

            dataset = gdal.Open(token, GA_ReadOnly)
            if dataset is None:
                QMessageBox.warning(
                    None, tr('Error'),
                    tr('Seems some outputs are temporary '
                       'files. To create test you need to '
                       'redirect all algorithm outputs to '
                       'files'))
                return

            dataArray = nan_to_num(dataset.ReadAsArray(0))
            strhash = hashlib.sha224(dataArray.data).hexdigest()

            results[out.name()] = {'type': 'rasterhash', 'hash': strhash}
        elif isinstance(out, (QgsProcessingParameterVectorDestination,
                              QgsProcessingParameterFeatureSink)):
            schema, filepath = extractSchemaPath(token)
            results[out.name()] = {'type': 'vector', 'name': filepath}
            if not schema:
                results[out.name(
                )]['location'] = '[The expected result data is not in the testdata directory. Please write it to processing/tests/testdata/expected. Prefer gml files.]'
        elif isinstance(out, QgsProcessingParameterFileDestination):
            schema, filepath = extractSchemaPath(token)
            results[out.name()] = {'type': 'file', 'name': filepath}
            if not schema:
                results[out.name(
                )]['location'] = '[The expected result file is not in the testdata directory. Please redirect the output to processing/tests/testdata/expected.]'

    definition['results'] = results
    dlg = ShowTestDialog(yaml.dump([definition], default_flow_style=False))
    dlg.exec_()
def hashForDictionary(metricsDictionary):
    arrToHash = []
    for key in sorted(metricsDictionary):
        arrToHash.append(key)
        arrToHash.append(metricsDictionary[key])
    return hashlib.sha224(str(arrToHash).encode('utf-8')).hexdigest()
Beispiel #50
0
def s3_sync_code(config, dry=False):
    global S3_CODE_PATH
    if S3_CODE_PATH is not None:
        return S3_CODE_PATH
    base = config.AWS_CODE_SYNC_S3_PATH
    has_git = True

    if config.FAST_CODE_SYNC:
        try:
            current_commit = subprocess.check_output(
                ["git", "rev-parse", "HEAD"]).strip().decode("utf-8")
        except subprocess.CalledProcessError as _:
            print("Warning: failed to execute git commands")
            current_commit = None

        file_name = str(timestamp) + "_" + hashlib.sha224(
            subprocess.check_output(["pwd"]) + str(current_commit).encode() +
            str(timestamp).encode()).hexdigest() + ".tar.gz"

        file_path = "/tmp/" + file_name

        tar_cmd = ["tar", "-zcvf", file_path, "-C", config.PROJECT_PATH]
        for pattern in config.FAST_CODE_SYNC_IGNORES:
            tar_cmd += ["--exclude", pattern]
        tar_cmd += ["-h", "."]

        remote_path = "%s/%s" % (base, file_name)

        upload_cmd = ["aws", "s3", "cp", file_path, remote_path]

        mujoco_key_cmd = [
            "aws", "s3", "sync", config.MUJOCO_KEY_PATH,
            "{}/.mujoco/".format(base)
        ]

        print(" ".join(tar_cmd))
        print(" ".join(upload_cmd))
        print(" ".join(mujoco_key_cmd))

        if not dry:
            subprocess.check_call(tar_cmd)
            subprocess.check_call(upload_cmd)
            subprocess.check_call(mujoco_key_cmd)

        S3_CODE_PATH = remote_path
        return remote_path
    else:
        try:
            current_commit = subprocess.check_output(
                ["git", "rev-parse", "HEAD"]).strip().decode("utf-8")
            clean_state = len(
                subprocess.check_output(["git", "status", "--porcelain"])) == 0
        except subprocess.CalledProcessError as _:
            print("Warning: failed to execute git commands")
            has_git = False
        dir_hash = base64.b64encode(subprocess.check_output(
            ["pwd"])).decode("utf-8")
        code_path = "%s_%s" % (
            dir_hash, (current_commit if clean_state else "%s_dirty_%s" %
                       (current_commit, timestamp)) if has_git else timestamp)
        full_path = "%s/%s" % (base, code_path)
        cache_path = "%s/%s" % (base, dir_hash)
        cache_cmds = ["aws", "s3", "cp", "--recursive"] + \
                     flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \
                     [cache_path, full_path]
        cmds = ["aws", "s3", "cp", "--recursive"] + \
               flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \
               [".", full_path]
        caching_cmds = ["aws", "s3", "cp", "--recursive"] + \
                       flatten(["--exclude", "%s" % pattern] for pattern in config.CODE_SYNC_IGNORES) + \
                       [full_path, cache_path]
        mujoco_key_cmd = [
            "aws", "s3", "sync", config.MUJOCO_KEY_PATH,
            "{}/.mujoco/".format(base)
        ]
        print(cache_cmds, cmds, caching_cmds, mujoco_key_cmd)
        if not dry:
            subprocess.check_call(cache_cmds)
            subprocess.check_call(cmds)
            subprocess.check_call(caching_cmds)
            try:
                subprocess.check_call(mujoco_key_cmd)
            except Exception:
                print('Unable to sync mujoco keys!')
        S3_CODE_PATH = full_path
        return full_path
# Сформировать словарь, где в качестве ключа используется название
# гарантированного алгоритма шифрования (md5, sha1, sha224, sha256, sha384, sha512),
# а в качестве значения - результат шифрования в шестнадцатеричном представлении
# { 'sha1': 'd0b…', 'md5', '1f3…',…}.
# Итог вывести отдельными операторами вывода в виде пар ключа и значения,
# отсортированных по возрастанию ключа:
# md5 1f3…
# sha1 d0b…

import hashlib

s = str(input())

md5 = (hashlib.md5(s.encode())).hexdigest()
sha1 = (hashlib.sha1(s.encode())).hexdigest()
sha224 = (hashlib.sha224(s.encode())).hexdigest()
sha256 = (hashlib.sha256(s.encode())).hexdigest()
sha384 = (hashlib.sha384(s.encode())).hexdigest()
sha512 = (hashlib.sha512(s.encode())).hexdigest()

encoded_dict = {
    'md5': md5,
    'sha1': sha1,
    'sha224': sha224,
    'sha256': sha256,
    'sha384': sha384,
    'sha512': sha512
}

for key, value in sorted(encoded_dict.items()):
    print(key, value)
Beispiel #52
0
def transform_hash(data, scheme=None):
    scheme = scheme or {"hash": "default"}
    hash_object = hashlib.sha224(str(data) + scheme.get('hash'))
    hex_dig = hash_object.hexdigest()
    return hex_dig
Beispiel #53
0
 def __repr__(self):
     return '<{} [{}, new:{}, changed:{}, created:{} expired:{}] {!r}>'.format(
         self.__class__.__name__,
         hashlib.sha224(self._id.encode('utf-8')).hexdigest() if self._id
         is not None else "-", self._new, self._changed, self._created,
         self.is_expired(), self._mapping)
Beispiel #54
0
def sha_hash_dict(d):
    return hashlib.sha224(json.dumps(d)).hexdigest()
Beispiel #55
0

def sha224hashcrack():
    hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash    ' + R +
                       ':' + Y + ' ')
    wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R +
                         ':' + Y + ' ')
    try:
        words = open(wordlist, 'r')
    except IOError, e:
        print("\n%s[%s!%s] ERROR: %s%s\n%s" % (R, Y, R, W, e, N))
        sys.exit()

    words = words.readlines()
    for word in words:
        hash = hashlib.sha224(word[:-1])
        value = hash.hexdigest()
        if hash01 == value:
            print(R + '\n[' + Y + '+' + R + ']' + W + ' Word' + R + ':' + W +
                  ' ' + word + '\n' + N)
            sys.exit()


def sha256hashcrack():
    hash01 = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Hash    ' + R +
                       ':' + Y + ' ')
    wordlist = raw_input(R + '[' + Y + '*' + R + ']' + W + ' Wordlist' + R +
                         ':' + Y + ' ')
    try:
        words = open(wordlist, 'r')
    except IOError, e:
def get_hash(s):
  return hashlib.sha224(s.encode()).hexdigest()
Beispiel #57
0
def get_file_name(rest_def):
    return hashlib.sha224(rest_def.encode()).hexdigest()+".jpg"
Beispiel #58
0
 def stringtohash(self,val):
     return hashlib.sha224(val).hexdigest()
Beispiel #59
0
def sha224hash(suppliedhash):
    hashed = hashlib.sha224(suppliedhash).hexdigest()
    return hashed
Beispiel #60
0
def get_sha(text=None):
    return sha224(text.encode('utf-8')).hexdigest()