def auth_flickr(request): from agro.sources import utils api, secret, url = 'e22dd4a81125531e047036ed1ab2a9e7', '72a484d250375bdf', '' token = '' user_name, user_id = '', '' frob = request.GET.get('frob', '') if frob: api_sig = md5.new('%sapi_key%sfrob%smethodflickr.auth.getToken' % (secret, api, frob)).hexdigest() params = urllib.urlencode({'api_key':api, 'frob':frob, 'method':'flickr.auth.getToken', 'api_sig':api_sig}) res = utils.get_remote_data("http://api.flickr.com/services/rest/?" + params) if res.get("stat", "") == "fail": log.error("flickr retrieve failed.") log.error("%s" % res.get("stat")) return False #token = res.get('auth') auth_res = res.getchildren()[0] token = auth_res.find('token').text user = auth_res.find('user') user_name = user.get('username') user_id = user.get('nsid') else: if request.method == 'POST': perms = 'read' api_sig = md5.new('%sapi_key%sperms%s' % (secret, api, perms)).hexdigest() params = urllib.urlencode({'api_key':api, 'perms':perms, 'api_sig':api_sig}) return HttpResponseRedirect('http://flickr.com/services/auth/?%s' % params) else: pass return render_to_response('flickr_auth.html', {'api':api, 'secret':secret, 'user_name':user_name, 'user_id':user_id, 'token':token,}, context_instance=RequestContext(request))
def create(user, path, name): """ insere un fichier 'name' dans le FilesPool se trouvant dans 'path' ce fichier appartien a l'utilisateur 'user' """ db = bsddb.btopen(folder+'index.db', 'c') if not os.path.exists(path): raise("Le fichier n'existe pas") basename = name basename = basename.replace('_', '') #on supprime les '_' du user user = user.replace('_', '') #generation de la cle de la base berkeley key = user+'_'+str(random.getrandbits(30))+'_'+basename while db.get(key): key = user+'_'+str(random.getrandbits(30))+'_'+basename #generation du chemin dans l'arborescence hash = md5.new(key).hexdigest() hash = hash[:4]+'/'+hash[4:8]+'/'+hash[8:] while os.path.exists(folder+hash): hash = md5.new(key+random.getrandbits(30)).hexdigest() hash = hash[:4]+'/'+hash[4:8]+'/'+hash[8:] #creation du fichier os.makedirs(folder+hash[:10]) # print 'path=', path, 'folder+hash=', folder+hash #attention copy de shutil pose probleme ! os.rename(path, folder+hash) #sauvegarde du chemin dans la base berkeley db[key] = hash db.close()
def crack(word): time.sleep(0.2) #Edit according to your system. print ''.join(word), md5.new(''.join(word)).hexdigest() value = md5.new(''.join(word)).hexdigest() if sys.argv[1] == value: print "\n[!] Cracked:",''.join(word),"\n" sys.exit(1)
def _unzipIterChunkyTest(self, compression, chunksize, lower, upper): """ unzipIterChunky should unzip the given number of bytes per iteration. """ junk = ' '.join([str(random.random()) for n in xrange(1000)]) junkmd5 = md5.new(junk).hexdigest() tempdir = filepath.FilePath(self.mktemp()) tempdir.makedirs() zfpath = tempdir.child('bigfile.zip').path self._makebigfile(zfpath, compression, junk) uziter = zipstream.unzipIterChunky(zfpath, tempdir.path, chunksize=chunksize) r = uziter.next() # test that the number of chunks is in the right ballpark; # this could theoretically be any number but statistically it # should always be in this range approx = lower < r < upper self.failUnless(approx) for r in uziter: pass self.assertEqual(r, 0) newmd5 = md5.new( tempdir.child("zipstreamjunk").open().read()).hexdigest() self.assertEqual(newmd5, junkmd5)
def update_feeds(): context = Context.objects.get(pk=1) feeds = Feed.objects.filter(enabled=True) for f in feeds: try: print "Starting %s..." % (f.title) stream = feedparser.parse(f.url) for e in stream['entries']: try: print "Getting entry id %s" % (e.id) if Item.objects.filter(md5sum=md5.new(e.link).hexdigest()).count() > 0: print 'Skipping duplicate item' else: e_date = e.updated_parsed if not e_date: e_date = datetime.datetime.utcnow() if hasattr(e, "summary"): e_content = e.summary.encode('ascii', 'xmlcharrefreplace') elif hasattr(e, "content"): e_content = e.content[0].value.encode('ascii', 'xmlcharrefreplace') i = Item(feed = f, md5sum = md5.new(e.link).hexdigest(), guid = e.id, title = e.title.encode('ascii', 'xmlcharrefreplace'), link = e.link, date = datetime.datetime.utcfromtimestamp(calendar.timegm(e_date)), content = e_content, context = context, ) i.save() print "%s - %s - Added!" % (f.title, i.title) except Exception, e: print e except Exception, e: print e
def encryptPasswordMD5(password,key): """returns a password hash""" m=md5.new() m.update(key) m.update(md5.new(password).digest()) m.update("AOL Instant Messenger (SM)") return m.digest()
def find_hotels(city, country, state): service = "http://api.ean.com/ean-services/rs/hotel/" version = "v3/" method = "list" other_elements = "&cid=YOUR ACCOUNT NUMBER HERE&customerIpAddress=50.148.140.1&customerUserAgent=OSX10.9.5&customerSessionId=123456&minorRev=30&locale=en_US¤cyCode=USD" response_type = "json" API_KEY = "YOUR API KEY HERE" API_secret= "YOUR API SECRET HERE" hash = md5.new() timestamp = str(int(time.time())) signature = md5.new(API_KEY + API_secret + timestamp).hexdigest() city = "%s" % (city) countryCode = "%s" % (country) state = "%s" % (state) print '\n------------------------------\nCheck out this list of hotel suggestions\n------------------------------' hotel_url = service + version + method + '?apiKey=' + API_KEY + '&sig=' + signature + '&_type=' + response_type + other_elements + '&city=' + city + '&countryCode=' + countryCode + '&stateProvinceCode=' + state response = urlopen(hotel_url) json_response = load(response) firstSix = json_response['HotelListResponse']['HotelList']['HotelSummary'][0:6] for hotels in firstSix: print "\nHotel Name: ", hotels["name"] print "Address: ", hotels["address1"] print "Rating: ", hotels["hotelRating"] print "Location: ", hotels["locationDescription"] exit()
def makePrivateKeyString_openssh(obj, passphrase): keyType = objectType(obj) if keyType == 'ssh-rsa': keyData = '-----BEGIN RSA PRIVATE KEY-----\n' p,q=obj.p,obj.q if p > q: (p,q) = (q,p) # p is less than q objData = [0, obj.n, obj.e, obj.d, q, p, obj.d%(q-1), obj.d%(p-1),Util.number.inverse(p, q)] elif keyType == 'ssh-dss': keyData = '-----BEGIN DSA PRIVATE KEY-----\n' objData = [0, obj.p, obj.q, obj.g, obj.y, obj.x] else: raise BadKeyError('unknown key type %s' % keyType) if passphrase: iv = common.entropy.get_bytes(8) hexiv = ''.join(['%02X' % ord(x) for x in iv]) keyData += 'Proc-Type: 4,ENCRYPTED\n' keyData += 'DEK-Info: DES-EDE3-CBC,%s\n\n' % hexiv ba = md5.new(passphrase + iv).digest() bb = md5.new(ba + passphrase + iv).digest() encKey = (ba + bb)[:24] asn1Data = asn1.pack([objData]) if passphrase: padLen = 8 - (len(asn1Data) % 8) asn1Data += (chr(padLen) * padLen) asn1Data = DES3.new(encKey, DES3.MODE_CBC, iv).encrypt(asn1Data) b64Data = base64.encodestring(asn1Data).replace('\n','') b64Data = '\n'.join([b64Data[i:i+64] for i in range(0,len(b64Data),64)]) keyData += b64Data + '\n' if keyType == 'ssh-rsa': keyData += '-----END RSA PRIVATE KEY-----' elif keyType == 'ssh-dss': keyData += '-----END DSA PRIVATE KEY-----' return keyData
def PwCrypt(self, password): """Obfuscate password RADIUS hides passwords in packets by using an algorithm based on the MD5 hash of the pacaket authenticator and RADIUS secret. If no authenticator has been set before calling PwCrypt one is created automatically. Changing the authenticator after setting a password that has been encrypted using this function will not work. @param password: plaintext password @type password: string @return: obfuscated version of the password @rtype: string """ if self.authenticator == None: self.authenticator = self.CreateAuthenticator() buf = password if len(password) % 16 != 0: buf += "\x00" * (16 - (len(password) % 16)) hash = md5.new(self.secret + self.authenticator).digest() result = "" last = self.authenticator while buf: hash = md5.new(self.secret + last).digest() for i in range(16): result += chr(ord(hash[i]) ^ ord(buf[i])) last = result[-16:] buf = buf[16:] return result
def __init__(self, name, created): """ Setup the class """ self.name = name self.folder = md5.new("%032x" % random.getrandbits(128)).hexdigest() self.share_code = md5.new("%032x" % random.getrandbits(128)).hexdigest() self.modified = datetime.now(pytz.utc) self.created = created
def continuity(url): import md5 format = '%25s: %s' # first fetch the file with the normal http handler opener = urllib2.build_opener() urllib2.install_opener(opener) fo = urllib2.urlopen(url) foo = fo.read() fo.close() m = md5.new(foo) print format % ('normal urllib', m.hexdigest()) # now install the keepalive handler and try again opener = urllib2.build_opener(HTTPHandler()) urllib2.install_opener(opener) fo = urllib2.urlopen(url) foo = fo.read() fo.close() m = md5.new(foo) print format % ('keepalive read', m.hexdigest()) fo = urllib2.urlopen(url) foo = '' while 1: f = fo.readline() if f: foo = foo + f else: break fo.close() m = md5.new(foo) print format % ('keepalive readline', m.hexdigest())
def save(self, *args, **kwargs): if not self.timestamp: self.timestamp = timezone.now() if not self.publish_key: self.publish_key = md5.new(''.join([random.choice(string.letters) for i in range(20)])).hexdigest() self.subscription_key = md5.new(self.publish_key + 'subscription_key').hexdigest() super(Stream, self).save(*args, **kwargs)
def api(self, site, method_name, GET={}, POST={}): if site == 'vk.com': GET['access_token'] = self.app_data[site]['access_token'] GET = urllib.urlencode(GET) POST = urllib.urlencode(POST) url = oauth_data[site]['url_api'] query = oauth_data[site]['query'] + method_name +'?'+ urllib.urlencode(self.settings_api) +'&'+ GET if POST != '': _POST = '&'+POST else: _POST = '' if acceessPermission[site]['nohttps']: sig = '&sig='+md5.new(query+_POST+self.app_data[site]['secret']).hexdigest() else: sig = '' res = self.openers[site].open(url+query+sig, POST) elif site == 'ok.ru': GET['application_key'] = self.user_data[site][3] GET['method'] = method_name keys = GET.keys() keys.sort() sig = '' for key in keys: sig += key +'='+ str(GET[key]) sig = md5.new(sig+self.app_data[site]['session_secret_key']).hexdigest().lower() GET['access_token'] = self.app_data[site]['access_token'] GET['sig'] = sig if self.app_data[site].has_key('api_server'): url = self.app_data[site]['api_server'] else: url = oauth_data[site]['url_api'] res = self.openers[site].open(url + oauth_data[site]['query'] + urllib.urlencode(GET)) elif site == 'disk.yandex.ru': pass return self._process_response(res, site)
def GET(self): dict = {} for i in range(random.randint(1,3)): # Access Points m = md5.new() ap = wifiobjects.accessPoint(self.randSent(8)) m.update(str(ap.fts)+ap.bssid) # Attached clients for z in range(random.randint(0,3)): client = wifiobjects.client(self.randSent(8)) # probes for x in range(random.randint(0,3)): client.updateProbes(self.randSent(8)) client.bssid = ap ap.addClients(client) dict[m.hexdigest()] = ap.__dict__ # Unattached Clients for y in range(random.randint(0,2)): m = md5.new() client = wifiobjects.client(self.randSent(8)) m.update(str(client.fts)+client.mac) # Unattached Client Probes for x in range(random.randint(0,3)): client.updateProbes(self.randSent(8)) dict[m.hexdigest()] = client.__dict__ # Give the JSON-encoded, dictionaried dictionary back pdb.set_trace() return json.dumps(dict)
def post(self): ip = self.request.get('ip') service = 'http://api.quova.com/' version = 'v1/' method = 'ipinfo/' apikey = '100.tkdykh8mvt7uut8ychhv' secret = 'Pd3c9pzT' hash = md5.new() timestamp = str(int(time.time())) sig = md5.new(apikey + secret + timestamp).hexdigest() url = service + version + method + ip + '?apikey=' + apikey + '&sig=' + sig + '&format=xml' xml = urllib2.urlopen(url).read() doc = parseString(xml) ip_address = doc.getElementsByTagName('ip_address')[0].firstChild.nodeValue organization = doc.getElementsByTagName('organization')[0].firstChild.nodeValue carrier = doc.getElementsByTagName('carrier')[0].firstChild.nodeValue sld = doc.getElementsByTagName('sld')[0].firstChild.nodeValue country = doc.getElementsByTagName('country')[0].firstChild.nodeValue state = doc.getElementsByTagName('state')[0].firstChild.nodeValue city = doc.getElementsByTagName('city')[0].firstChild.nodeValue postal_code = doc.getElementsByTagName('postal_code')[0].firstChild.nodeValue lat = doc.getElementsByTagName('latitude')[0].firstChild.nodeValue lon = doc.getElementsByTagName('longitude')[0].firstChild.nodeValue g = PyMap() g.key = "ABQIAAAAGcWIjwYvD9qHwmbKuSQEsxQ_LYszwfeN3sChNNHex23LZKwkgRTB3_7Qo5_EhYBGijp8h1khiBFjkg" g.maps[0].zoom = 12 s = [lat,lon, ip_address+'<br>'+organization+'<br>'+carrier+'<br>'+sld+'<br>'+country+'<br>'+state+'<br>'+city+'<br>'+postal_code+'<br>'+lat+'<br>'+lon] g.maps[0].setpoint(s) g.maps[0].center = (lat,lon) self.response.out.write(g.showhtml())
def _create_id(self): if hasattr(os, 'getpid'): pid = os.getpid() else: pid = '' self.id = md5.new( md5.new("%f%s%f%s" % (time.time(), id({}), random.random(), pid) ).hexdigest(), ).hexdigest() self.is_new = True if self.use_cookies: self.cookie[self.key] = self.id if self.cookie_domain: self.cookie[self.key]['domain'] = self.cookie_domain if self.secure: self.cookie[self.key]['secure'] = True self.cookie[self.key]['path'] = '/' if self.cookie_expires is not True: if self.cookie_expires is False: expires = datetime.fromtimestamp( 0x7FFFFFFF ) elif isinstance(self.cookie_expires, timedelta): expires = datetime.today() + self.cookie_expires elif isinstance(self.cookie_expires, datetime): expires = self.cookie_expires else: raise ValueError("Invalid argument for cookie_expires: %s" % repr(self.cookie_expires)) self.cookie[self.key]['expires'] = \ expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT" ) self.request['cookie_out'] = self.cookie[self.key].output(header='') self.request['set_cookie'] = False
def hash_file(FILE_NAME): ''' Description: This function will hash a file and return the hash object. The hash algorithm can be modified by changing the hashlib algorithm. This function should be able to hash objects of indefinite size. References: https://docs.python.org/2/library/hashlib.html http://www.pythoncentral.io/hashing-files-with-python/ input args: FILE_NAME is the path and name of the file to be hashed in string format. output: hasher is the HASH object created by the hashlib function ''' blockSize = 2 ** 16 fileHash = md5.new() try: f = open(FILE_NAME, 'rb') buf = f.read(blockSize) while len(buf) > 0: fileHash.update(buf) buf = f.read(blockSize) f.close() return fileHash except IOError: return md5.new('bad_hash')
def hashes(self, username, realm, password): uri = '@'.join((username, realm)) hash_src1 = ':'.join((username, realm, password)) hash_src1b = ':'.join((uri, realm, password)) ha1 = md5.new(hash_src1).hexdigest() ha1b = md5.new(hash_src1b).hexdigest() return (ha1, ha1b)
def manifest(request): import md5 import os from jakidy.settings import STATIC_URL path = os.path.join(os.path.dirname(__file__), 'static') filters = ['.js', '.css', '.gif', '.jpg', '.png'] hashes = [] files = [] data = "CACHE MANIFEST\n" for root, _, fnames in os.walk(path): for fn in fnames: if fn[fn.rfind('.'):] not in filters: continue with open(os.path.join(root, fn), 'rb') as f: hashes.append(md5.new(f.read()).hexdigest()) files.append('{}{}'.format(STATIC_URL, fn)) # index.html / homepage with open( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'templates', 'index.html'), 'rb') as f: hashes.append(md5.new(f.read()).hexdigest()) data = "{}{}\n# hash: {}\n".format( data, "\n".join(files), md5.new(''.join(hashes)).hexdigest()) return HttpResponse(data, mimetype='text/cache-manifest')
def getTEAPass(q, p, v): #RSA 公钥 pubkey = "F20CE00BAE5361F8FA3AE9CEFA495362FF7DA1BA628F64A347F0A8C012BF0B254A30CD92ABFFE7A6EE0DC424CB6166F8819EFA5BCCB20EDFB4AD02E412CCF579B1CA711D55B8B0B3AEB60153D5E0693A2A86F3167D7847A0CB8B00004716A9095D9BADC977CBB804DBDCBA6029A9710869A453F27DFDDF83C016D928B3CBF4C7" rsaPublickey = int(pubkey, 16) key = rsa.PublicKey(rsaPublickey, 3) #MD5 密码 p = md5.new(p).digest() #TEA 的KEY m = md5.new(p + ("%0.16X" % q).decode('hex')).digest() #RSA的加密结果 n = rsa.encrypt(p, key) #RSA 结果的长度 d = ("%0.4X" % len(n)).decode('hex') #RSA 加密结果 d += n #salt d += ("%0.16X" % q).decode('hex') #验证码长度 d += ("%0.4X" % len(v)).decode('hex') #验证码 d += v.upper() #TEA 加密并Base64编码 r = base64.b64encode(encrypt(d, m)) #对特殊字符进行替换 return r.replace('/', '-').replace('+', '*').replace('=', '_')
def ChangePassWord(request): u = Logged(request) if not(u): return HttpResponseRedirect('/books/login/') attempt = False #second attempt? if request.method == 'POST': form = change_password_form(request.POST) if form.is_valid(): f = form.cleaned_data try: if(f['NewPassWord'] != f['RepeatPassWord']): raise Exception m = md5.new() m.update (str(f['CurrentPassWord'])) if(str(u.PassWord) == str(m.hexdigest())): m2 = md5.new() m2.update(str(f['NewPassWord'])) u.PassWord = m2.hexdigest() u.save() return HttpResponseRedirect('/books/edituser/') else: raise Exception except Exception as bob: #if user does not exist. attempt = True else: form = change_password_form() return render(request,'password.html',{'form':form,'user':u, 'bids':bids(u), 'attempt' :attempt})
def check_pwd(self, pwd): if os.path.exists(self.keyfile): with open(self.keyfile, 'rb') as f: if md5.new(md5.new(((pwd).encode("base64")).encode("hex")).digest()).hexdigest() == f.read(): return True else: return False f.close() else: return False
def mkpasswd(pwd,hash='ssha'): """Generate hashed passwords. Originated from mkpasswd in Luma """ alg = { 'ssha':'Seeded SHA-1', 'sha':'Secure Hash Algorithm', 'smd5':'Seeded MD5', 'md5':'MD5', 'crypt':'Standard unix crypt' } # Don't add support for sambapasswords unless we're using it if (update_sambapassword): alg['lmhash'] = 'Lanman hash' alg['nthash'] = 'NT Hash' if hash not in alg.keys(): return "Algorithm <%s> not supported in this version." % hash else: salt = getsalt() if hash == "ssha": return "{SSHA}" + base64.encodestring(sha.new(str(pwd) + salt).digest() + salt) elif hash == "sha": return "{SHA}" + base64.encodestring(sha.new(str(pwd)).digest()) elif hash == "md5": return "{SHA}" + base64.encodestring(md5.new(str(pwd)).digest()) elif hash == "smd5": return "{SMD%}" + base64.encodestring(md5.new(str(pwd) + salt).digest() + salt) elif hash == "crypt": return "{CRYPT}" + crypt.crypt(str(pwd),getsalt(length=2)) # nt/lm-hash are used directly in their own password-attributes.. no need to prefix the hash elif hash == "lmhash": return smbpasswd.lmhash(pwd) elif hash == "nthash": return smbpasswd.nthash(pwd)
def test_eventteams_update(self): self.teams_auth.put() team_list = ['frc254', 'frc971', 'frc604'] request_body = json.dumps(team_list) request_path = '/api/trusted/v1/event/2014casj/team_list/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_0', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) db_eventteams = EventTeam.query(EventTeam.event == self.event.key).fetch(None) self.assertEqual(len(db_eventteams), 3) self.assertTrue('2014casj_frc254' in [et.key.id() for et in db_eventteams]) self.assertTrue('2014casj_frc971' in [et.key.id() for et in db_eventteams]) self.assertTrue('2014casj_frc604' in [et.key.id() for et in db_eventteams]) team_list = ['frc254', 'frc100'] request_body = json.dumps(team_list) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_0', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) db_eventteams = EventTeam.query(EventTeam.event == self.event.key).fetch(None) self.assertEqual(len(db_eventteams), 2) self.assertTrue('2014casj_frc254' in [et.key.id() for et in db_eventteams]) self.assertTrue('2014casj_frc100' in [et.key.id() for et in db_eventteams])
def __init__(self, *args, **kwargs): super(UserFile, self).__init__(*args, **kwargs) self.errors = [] self.errors_es = [] obj_list = {} for line in self.xml_text.split('\n'): num = self.xml_text.split('\n').index(line) + 1 if ('<map' in line): obj = MapElement(line, num) md5_obj = md5.new() md5_obj.update(obj.element_name) obj_hash = md5_obj.hexdigest() if obj_hash in obj_list.keys(): pass obj_list[obj_hash] = obj for item in obj.err_list: self.errors.append(item) for item in obj.err_list_es: self.errors_es.append(item) else: obj = XMLElement(line, num) md5_obj = md5.new() md5_obj.update(obj.element_name) obj_hash = md5_obj.hexdigest() obj_list[obj_hash] = obj for item in obj.err_list: self.errors.append(item) for item in obj.err_list_es: self.errors_es.append(item)
def info(model, msg): print '**** %s ****' % msg print 'md5(Wv) = ', md5.new(model.Wv.get_value()).hexdigest() print 'rstate = ', model.theano_rng.rstate print 'state_updates:' for s in model.theano_rng.state_updates: print '\t', md5.new(s[0].get_value()).hexdigest()
def test_awards_update(self): self.awards_auth.put() awards = [{'name_str': 'Winner', 'team_key': 'frc254'}, {'name_str': 'Winner', 'team_key': 'frc604'}, {'name_str': 'Volunteer Blahblah', 'team_key': 'frc1', 'awardee': 'Bob Bobby'}] request_body = json.dumps(awards) request_path = '/api/trusted/v1/event/2014casj/awards/update' sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_4', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) db_awards = Award.query(Award.event == self.event.key).fetch(None) self.assertEqual(len(db_awards), 2) self.assertTrue('2014casj_1' in [a.key.id() for a in db_awards]) self.assertTrue('2014casj_5' in [a.key.id() for a in db_awards]) awards = [{'name_str': 'Winner', 'team_key': 'frc254'}, {'name_str': 'Winner', 'team_key': 'frc604'}] request_body = json.dumps(awards) sig = md5.new('{}{}{}'.format('321tEsTsEcReT', request_path, request_body)).hexdigest() response = self.testapp.post(request_path, request_body, headers={'X-TBA-Auth-Id': 'tEsT_id_4', 'X-TBA-Auth-Sig': sig}, expect_errors=True) self.assertEqual(response.status_code, 200) db_awards = Award.query(Award.event == self.event.key).fetch(None) self.assertEqual(len(db_awards), 1) self.assertTrue('2014casj_1' in [a.key.id() for a in db_awards])
def _pkt_R(self): # # Startup Response # code = _unpack('!i', self.__read_bytes(4))[0] if code == 0: self.__authenticated = 1 #print 'Authenticated!' elif code == 1: raise InterfaceError('Kerberos V4 authentication is required by server, but not supported by this client') elif code == 2: raise InterfaceError('Kerberos V5 authentication is required by server, but not supported by this client') elif code == 3: self.__send(_pack('!i', len(self.__passwd)+5) + self.__passwd + '\0') elif code == 4: salt = self.__read_bytes(2) try: import crypt except: raise InterfaceError('Encrypted authentication is required by server, but Python crypt module not available') cpwd = crypt.crypt(self.__passwd, salt) self.__send(_pack('!i', len(cpwd)+5) + cpwd + '\0') elif code == 5: import md5 m = md5.new(self.__passwd + self.__userid).hexdigest() m = md5.new(m + self.__read_bytes(4)).hexdigest() m = 'md5' + m + '\0' self.__send(_pack('!i', len(m)+4) + m) else: raise InterfaceError('Unknown startup response code: R%d (unknown password encryption?)' % code)
def change_password(request, data, user): ''' API to change password :param request: :param data: :param user: ''' try: user_id = user.id old_password = data['old_password'].strip() new_password = data['new_password'].strip() if md5.new(old_password).hexdigest() != user.password: return custom_error( "The current password you have entered is incorrect.") user.password = md5.new(new_password).hexdigest() user.password_reset = False user.save() log.info("user : "******" : changed password") return json_response({"status": 1, "message": "Password changed successfully."}) except Exception as error: log.error("Change password failed : " + error.message) return custom_error("Failed to change the password.")
def generate_digest(self, auth, handler, username, password): import md5 import os # FIXME: This only does the MD5 algorithm (not MD5-sess) and # only the "auth" qop (quality of protection), not "auth-int" auth_info = parse_keqv_list(parse_http_list(auth)) A1 = md5.new(username + ":" + auth_info["realm"] + ":" + password).hexdigest() A2 = md5.new("POST:" + handler).hexdigest() cnonce = md5.new("%s:%s:%s" % (str(self), str(os.getpid()), str(time.time()))).hexdigest() response = md5.new(A1 + ":" + auth_info["nonce"] + ":00000001:" + cnonce + ":auth:" + A2).hexdigest() self.__auth_data = 'Digest username="******", realm="%s", nonce="%s", ' \ 'cnonce="%s", nc=00000001, qop=auth, uri="%s", ' \ 'response="%s"' % (username, auth_info["realm"], auth_info["nonce"], cnonce, handler, response)
import hmac import hashlib import md5 import urllib2 from django.conf import settings import phpserialize from models import WpOptions, WpUsers, WpUsermeta SITE_URL = WpOptions.objects.using('wordpress')\ .get(option_name='siteurl').option_value COOKIEHASH = md5.new(SITE_URL).hexdigest() LOGIN_URL = SITE_URL + "/wp-login.php" def _hmac(salt, data): return hmac.new(salt, msg=data, digestmod=hashlib.md5).hexdigest() def _generate_auth_cookie(username, password, expires): expires = str(expires) wp_salt = settings.LOGGED_IN_KEY + settings.LOGGED_IN_SALT pass_fragment = password[8:12] wp_hash = _hmac(wp_salt, username + pass_fragment + "|" + expires) auth_cookie = _hmac(wp_hash, username + "|" + expires) return auth_cookie def get_wordpress_user(request):
import string,md5,base64 ''' fab3e420d6d8a17b53b23ca4bb01866b 189f56eea9a9ba305dffa8425ba20048 2335667c646346b38c8f0f47b13fab13 f4709a7eef9d703920b910fc734b151c b74e57f21f5a315550a9e2f6869d4e44 40abc257b6f0e0420dc9ae9ba19c8c8c b74e57f21f5a315550a9e2f6869d4e44 AHzP 2335667c646346b38c8f0f47b13fab13 BcJH f4709a7eef9d703920b910fc734b151c N8tK 40abc257b6f0e0420dc9ae9ba19c8c8c QmHY 189f56eea9a9ba305dffa8425ba20048 9aSY HV15-9aSY-BcJH-N8tK-AHzP-QmHY ''' v=string.ascii_uppercase+string.ascii_lowercase+string.digits #v="HV15" for i in v: for j in v: for k in v: for l in v: text=i+j+k+l h=md5.new(base64.b64encode(text)).hexdigest() if h=="189f56eea9a9ba305dffa8425ba20048" or h=="2335667c646346b38c8f0f47b13fab13" or h=="f4709a7eef9d703920b910fc734b151c" or h=="b74e57f21f5a315550a9e2f6869d4e44" or h=="40abc257b6f0e0420dc9ae9ba19c8c8c": print h,text
def _sign(self, params): params = '&'.join(sorted(["%s=%s" % (k, v) for k, v in params.items()])) #print 's:',s return md5.new(params).hexdigest().lower()
interval = MINIMUM_INTERVAL # seconds try: url = config.get(section, 'url') except (ConfigParser.NoOptionError): logger.error('No URL found in section "%s", file "%s"' % (section, filename)) pass if feed_config.has_key(url): if feed_config[url]['interval'] > interval: feed_config[url]['interval'] = interval else: feed_config[url] = { 'interval': interval, 'md5': md5.new(url).hexdigest() } for url in feed_config: out_filename = os.path.join(DATAPATH, feed_config[url]['md5']) out_filename_temp = out_filename + '.tmp' last_modified = None last_etag = None if os.path.exists(out_filename): last_updated = time.time() - os.path.getmtime(out_filename) if last_updated < feed_config[url]['interval']: logger.info("Skipping fetch of %s, last updated %s seconds ago" % (url, int(last_updated))) continue
from cssdbpy import Connection import md5 if __name__ == '__main__': conn = Connection('127.0.0.1', 8888) for i in xrange(0, 10000): md5word = md5.new('word{}'.format(i)).hexdigest() create = conn.execute('zset', 'ztest', md5word, i) value = conn.execute('zget', 'ztest', md5word) conn.execute('zincr', 'ztest', md5word) conn.execute('zincr', 'ztest', md5word, 10) exists = conn.execute('zexists', 'ztest', md5word) delete = conn.execute('zdel', 'ztest', md5word) print md5word, value, create, exists, delete print conn.execute('zscan', 'ztest', '', 0, 1000, 10) print conn.execute('zrscan', 'ztest', 1000, 0, 10) conn.execute('zclean', 'words')
def handleUniqueConfig(labidname, line, nametags, studentlabdir, container_list, logger): retval = True targetlines = None #print('line is %s' % line) logger.debug('line is %s' % line) (each_key, each_value) = line.split('=', 1) each_key = each_key.strip() #print each_key # Note: config file has been validated # Split into four parts or five parts # NOTE: Split using ' : ' - i.e., "space colon space" values = [x.strip() for x in each_value.split(' : ')] line_at = findLineIndex(values) num_splits = line_at + 1 # NOTE: Split using ' : ' - i.e., "space colon space" values = [x.strip() for x in each_value.split(' : ', num_splits)] newtargetfile = values[0].strip() logger.debug('line_at is %d newtargetvalue = %s, values: %s' % (line_at, newtargetfile, str(values))) #print('newtargetfile is %s' % newtargetfile) # <cfgcontainername>:<exec_program>.<type> containername = None if ':' in newtargetfile: cfgcontainername, targetfile = newtargetfile.split(':', 1) else: ''' default to first container? ''' #print('first cont is %s' % container_list[0]) containername = container_list[0] targetfile = newtargetfile # Construct proper containername from cfgcontainername if containername is None: containername = labidname + "." + cfgcontainername + ".student" result_home = '%s/%s/%s' % (studentlabdir, containername, ".local/result/") if targetfile.startswith('/'): targetfile = os.path.join(result_home, targetfile[1:]) #print('targetfile is %s containername is %s' % (targetfile, containername)) logger.debug('targetfile is %s, containername is %s' % (targetfile, containername)) if containername is not None and containername not in container_list: print "Config line (%s) containername %s not in container list (%s), skipping..." % ( line, containername, str(container_list)) logger.debug( "Config line (%s) containername %s not in container list (%s), skipping..." % (line, containername, str(container_list))) # set nametags - value pair to NONE nametags[targetfile] = "NONE" return False command = values[line_at].strip() targetfname_list = [] if targetfile.startswith('~/'): targetfile = targetfile[2:] targetfname = os.path.join(studentlabdir, containername, targetfile) #print "targetfname is (%s)" % targetfname #print "labdir is (%s)" % studentlabdir targetfname_list.append(targetfname) #print "Current targetfname_list is %s" % targetfname_list tagstring = "NONE" # Loop through targetfname_list for current_targetfname in targetfname_list: if not os.path.exists(current_targetfname): # If file does not exist, treat as can't find token token = "NONE" logger.debug("No %s file does not exist\n" % current_targetfname) nametags[targetfile] = token return False else: # Read in corresponding file targetf = open(current_targetfname, "r") targetlines = targetf.readlines() targetf.close() targetfilelen = len(targetlines) #print('current_targetfname %s' % current_targetfname) # command has been validated if command == 'CHECKSUM': ''' Create a checksum of the targetfile ''' mymd5 = md5.new() targetlinestring = "".join(targetlines) mymd5.update(targetlinestring) tagstring = mymd5.hexdigest() nametags[targetfile] = tagstring #print('tag string is %s for eachkey %s' % (tagstring, each_key)) return True else: # config file should have been validated # - if still unknown command, then should exit logger.error('unknown command %s' % command) sys.exit(1)
def md5sum(self, fileObj): return md5.new(fileObj.data).digest().encode("hex")
def main(ldata, pdata, rdata, tdata, gwid): # this is common code to process packet information provided by the main gateway script (i.e. post_processing_gw.py) # these information are provided in case you need them arr = map(int, pdata.split(',')) dst = arr[0] ptype = arr[1] src = arr[2] seq = arr[3] datalen = arr[4] SNR = arr[5] RSSI = arr[6] #compute the MD5 digest (hash) from the clear gw id provided by post_processing_gw.py global gw_id_md5 gw_id_md5 = md5.new(gwid).hexdigest() #LoRaWAN packet if dst == 256: src_str = "%0.8X" % src else: src_str = str(src) if (src_str in key_Orion.source_list) or (len(key_Orion.source_list) == 0): #remove any space in the message as we use '/' as the delimiter #any space characters will introduce error in the json structure and then the curl command will fail ldata = ldata.replace(' ', '') # this part depends on the syntax used by the end-device # we use: TC/22.4/HU/85... # # but we accept also a_str#b_str#TC/22.4/HU/85... to indicate a Fiware-Service and Fiware-ServicePath # or simply 22.4 in which case, the nomemclature will be DEF # get number of '#' separator nsharp = ldata.count('#') nslash = 0 # no separator if nsharp == 0: # will use default Fiware-Service and Fiware-ServicePath data = ['', ''] # get number of '/' separator on ldata nslash = ldata.count('/') # contains ['', '', "s1", s1value, "s2", s2value, ...] data_array = data + re.split("/", ldata) else: data_array = re.split("#", ldata) # only 1 separator if nsharp == 1: # insert '' to indicate default Fiware-Service # as we assume that the only parameter indicate the Fiware-ServicePath data_array.insert(0, '') # if the length is greater than 2 if len(data_array[1]) < 3: data_array[1] = '' # we have 2 separators if nsharp == 2: # if the length of a fields is greater than 2 then we take it into account if len(data_array[0]) < 3: data_array[0] = '' if len(data_array[1]) < 3: data_array[1] = '' # get number of '/' separator on data_array[2] # because ldata may contain '/' as Fiware-ServicePath name nslash = data_array[2].count('/') # then reconstruct data_array data_array = [data_array[0], data_array[1]] + re.split( "/", data_array[2]) # at the end data_array contains # ["Fiware-Service", "Fiware-ServicePath", "s1", s1value, "s2", s2value, ...] # just in case we have an ending CR or 0 data_array[len(data_array) - 1] = data_array[len(data_array) - 1].replace('\n', '') data_array[len(data_array) - 1] = data_array[len(data_array) - 1].replace('\0', '') nomenclatures = [] # data to send data = [] data.append(data_array[0]) #Fiware-service (if '' default) data.append(data_array[1]) #Fiware-servicePath (if '' default) if nslash == 0: # old syntax without nomenclature key, so insert only one key # we use DEF nomenclatures.append("DEF") data.append(data_array[2]) else: # completing nomenclatures and data i = 2 while i < len(data_array) - 1: nomenclatures.append(data_array[i]) data.append(data_array[i + 1]) i += 2 connected = test_network_available() #if we got a response from the server, send the data to it if (connected): print("Orion: uploading") #here we append the device's address to get for instance Sensor2 #if packet come from a LoRaWAN device with 4-byte devAddr then we will have for instance Sensor01020304 #where the devAddr is expressed in hex format send_data(data, key_Orion.sensor_name + src_str, nomenclatures, tdata) else: print("Orion: not uploading") if (CloudNoInternet_enabled): print("Using CloudNoInternet") from CloudNoInternet import store_internet_pending # we call store_internet_pending to store the message for future upload store_internet_pending(ldata, pdata, rdata, tdata, gwid) # update connection_failure value global connection_failure connection_failure = not connected else: print "Source is not is source list, not sending with CloudOrion.py"
def hashfn(self,k): m=md5.new(str(k)) return base64.b64encode(m.digest())[:6] # limiting to 6 for readability only
import md5 # imports the md5 module to generate a hash password = '******' # encrypt the password we provided as 32 character string hashed_password = md5.new(password).hexdigest() print hashed_password #this will show you the hashed value # 5f4dcc3b5aa765d61d8327deb882cf99 -> nice!
magic = "CRYPTROLO" if len(sys.argv) < 4: print("Usage: %s key outfile infile1 [infile2] [...infileN]" % (sys.argv[0])) sys.exit() key = sys.argv[1] outfn = sys.argv[2] filenames = sys.argv[3:] random.seed(key) with open(outfn, "wb") as outfile: outfile.write("CRYPTROLO") for filename in filenames: with open(filename, "rb") as infile: content = infile.read() outfile.write(struct.pack(">L", len(filename))) outfile.write(filename) outfile.write(md5.new(content).digest().encode("hex")) outfile.write(struct.pack(">L", len(content))) for pos in xrange(0, len(content), 4): outfile.write( struct.pack( ">L", int(content[pos:pos + 4].encode('hex').ljust(8, '0'), 16) ^ random.getrandbits(32)))
type=unicode, help='output directory') parser.add_argument('font', metavar='font', type=unicode, help='font name') parser.add_argument('types', metavar='types', type=lambda s: s.split(','), help='output types') parser.add_argument('--hashes', action='store_true', help='add hashes to file names') args = parser.parse_args() f = fontforge.font() f.encoding = 'UnicodeFull' m = md5.new() cp = 0xf100 files = [] KERNING = 15 for dirname, dirnames, filenames in os.walk(args.input_dir): for filename in filenames: name, ext = os.path.splitext(filename) filePath = os.path.join(dirname, filename) size = os.path.getsize(filePath) if ext in ['.svg', '.eps']: m.update(filename + str(size) + ';') glyph = f.createChar(cp) glyph.importOutlines(filePath)
def __init__(self, fn=None): self.package = None self.version = 'none' self.parsed_version = None self.architecture = None self.maintainer = None self.source = None self.description = None self.depends = None self.provides = None self.replaces = None self.conflicts = None self.recommends = None self.suggests = None self.section = None self.filename_header = None self.file_list = [] self.file_all_list = [] self.md5 = None self.size = None self.installed_size = None self.filename = None self.isdeb = 0 if fn: # see if it is deb format f = open(fn, "r") magic = f.read(4) f.close() if (magic == "!<ar"): self.isdeb = 1 # compute the MD5. f = open(fn, "r") sum = md5.new() while 1: data = f.read(1024) if not data: break sum.update(data) f.close() if sys.version[:1] > '2': # when using Python 2.0 or newer self.md5 = sum.hexdigest() else: self.md5 = string.join( map((lambda x: "%02x" % ord(x)), sum.digest()), '') stat = os.stat(fn) self.size = stat[ST_SIZE] self.filename = os.path.basename(fn) ## sys.stderr.write(" extracting control.tar.gz from %s\n"% (fn,)) if self.isdeb: control = os.popen( "ar p " + fn + " control.tar.gz | tar xfzO - '*control'", "r") else: control = os.popen( "tar xfzO " + fn + " '*control.tar.gz' | tar xfzO - '*control'", "r") line = control.readline() while 1: if not line: break line = string.rstrip(line) lineparts = re.match(r'([\w-]*?):\s*(.*)', line) if lineparts: name = string.lower(lineparts.group(1)) value = lineparts.group(2) while 1: line = control.readline() if not line: break if line[0] != ' ': break line = string.rstrip(line) value = value + '\n' + line # don't allow package to override its own filename if name == "filename": self.filename_header = value else: if self.__dict__.has_key(name): self.__dict__[name] = value else: line = control.readline() control.close() if self.isdeb: data = os.popen("ar p " + fn + " data.tar.gz | tar tfz -", "r") else: data = os.popen( "tar xfzO " + fn + " '*data.tar.gz' | tar tfz -", "r") while 1: line = data.readline() if not line: break self.file_list.append(string.rstrip(line)) data.close() # bigger listing with all permissions if self.isdeb: data = os.popen("ar p " + fn + " data.tar.gz | tar tvfz -", "r") else: data = os.popen( "tar xfzO " + fn + " '*data.tar.gz' | tar tvfz -", "r") while 1: line = data.readline() if not line: break self.file_all_list.append(string.rstrip(line)) data.close() self.scratch_dir = None self.file_dir = None self.meta_dir = None
def calc_md5(p): hash = new() hash.update(p) return hash.hexdigest()
def md5_from_files(file_objects): "note the plural" m = md5.new() for f in file_objects: update_md5_from_file(m, f) return m.hexdigest()
import md5, time, csv counter = 0 print('### MD5 Dictonary Attack Tool ###') passHash = str(raw_input('Input the password hash to crack: ')) salt = str(raw_input('Input salt (leave empty for no salt): ')) wordfile = open('wordlist.csv', 'rb') reader = csv.reader(wordfile) startTime = time.time() for row in reader: counter += 1 currentPass = row[0] + salt digest = md5.new() digest.update(currentPass) currentHash = digest.hexdigest() if currentHash == passHash: stopTime = time.time() timeTaken = stopTime - startTime print('Password\t: %s' %currentPass) print('Time Taken \t: %s' %timeTaken) print('Attempts\t: %s' %counter) break
def MD5(): return md5.new()
def md5_from_key(boto_key): m = md5.new() for byte in boto_key: m.update(byte) return m.hexdigest()
def init(self): self.hasher = md5.new()
def md5_from_file(file_object): m = md5.new() update_md5_from_file(m, file_object) return m.hexdigest()
def _get_offset_file_idstr(file_1, file_2): return md5.new(file_1 + file_2).hexdigest()
def new(algo): if algo == 'md5': return md5.new() if algo == 'sha1': return sha.new() raise ValueError, "Bad checksum type"
def _gen_nonce(self): return md5.new("%s:%s:%s" % (str(random.random()), str( time.gmtime()), str(os.getpid()))).hexdigest()
def token(): m = md5.new() t = random.random() m.update(str(t)) return m.hexdigest()[:8]
def md5(): return md5.new()
def GenModuleUNIEncodeFile(ModuleObject, UniFileHeader='', Encoding=DT.TAB_ENCODING_UTF16LE): GenUNIFlag = False OnlyLANGUAGE_EN_X = True BinaryAbstract = [] BinaryDescription = [] # # If more than one language code is used for any element that would be present in the MODULE_UNI_FILE, # then the MODULE_UNI_FILE must be created. # for (Key, Value) in ModuleObject.GetAbstract() + ModuleObject.GetDescription(): if Key == DT.TAB_LANGUAGE_EN_X: GenUNIFlag = True else: OnlyLANGUAGE_EN_X = False for UserExtension in ModuleObject.GetUserExtensionList(): if UserExtension.GetUserID() == DT.TAB_BINARY_HEADER_USERID \ and UserExtension.GetIdentifier() == DT.TAB_BINARY_HEADER_IDENTIFIER: for (Key, Value) in UserExtension.GetBinaryAbstract(): if Key == DT.TAB_LANGUAGE_EN_X: GenUNIFlag = True else: OnlyLANGUAGE_EN_X = False BinaryAbstract.append((Key, Value)) for (Key, Value) in UserExtension.GetBinaryDescription(): if Key == DT.TAB_LANGUAGE_EN_X: GenUNIFlag = True else: OnlyLANGUAGE_EN_X = False BinaryDescription.append((Key, Value)) if not GenUNIFlag: return elif OnlyLANGUAGE_EN_X: return else: ModuleObject.UNIFlag = True ContainerFile = GetUniFileName(os.path.dirname(ModuleObject.GetFullPath()), ModuleObject.GetBaseName()) if not os.path.exists(os.path.dirname(ModuleObject.GetFullPath())): os.makedirs(os.path.dirname(ModuleObject.GetFullPath())) Content = UniFileHeader + '\r\n' Content += '\r\n' Content += FormatUniEntry('#string ' + DT.TAB_INF_ABSTRACT, ModuleObject.GetAbstract(), ContainerFile) + '\r\n' Content += FormatUniEntry('#string ' + DT.TAB_INF_DESCRIPTION, ModuleObject.GetDescription(), ContainerFile) \ + '\r\n' BinaryAbstractString = FormatUniEntry( '#string ' + DT.TAB_INF_BINARY_ABSTRACT, BinaryAbstract, ContainerFile) if BinaryAbstractString: Content += BinaryAbstractString + '\r\n' BinaryDescriptionString = FormatUniEntry('#string ' + DT.TAB_INF_BINARY_DESCRIPTION, BinaryDescription, \ ContainerFile) if BinaryDescriptionString: Content += BinaryDescriptionString + '\r\n' if not os.path.exists(ContainerFile): File = codecs.open(ContainerFile, 'wb', Encoding) File.write(u'\uFEFF' + Content) File.stream.close() Md5Sigature = md5.new(__FileHookOpen__(str(ContainerFile), 'rb').read()) Md5Sum = Md5Sigature.hexdigest() if (ContainerFile, Md5Sum) not in ModuleObject.FileList: ModuleObject.FileList.append((ContainerFile, Md5Sum)) return ContainerFile
class GlobalVars: false_positives = [] whitelisted_users = [] blacklisted_users = [] ignored_posts = [] auto_ignored_posts = [] startup_utc = datetime.utcnow().strftime("%H:%M:%S") latest_questions = [] api_backoff_time = 0 charcoal_room_id = "11540" meta_tavern_room_id = "89" socvr_room_id = "41570" blockedTime = { "all": 0, charcoal_room_id: 0, meta_tavern_room_id: 0, socvr_room_id: 0 } experimental_reasons = [] # Don't widely report these non_socvr_reasons = [] # Don't report to SOCVR non_tavern_reasons = [ # Don't report in the Tavern "all-caps body", "all-caps answer", "repeating characters in body", "repeating characters in title", "repeating characters in answer", "few unique characters in body", "few unique characters in answer", "title has only one unique char", "phone number detected in title", "offensive body detected", "no whitespace in body", "no whitespace in answer", ] non_tavern_sites = ["stackoverflow.com"] parser = HTMLParser.HTMLParser() wrap = Client("stackexchange.com") wrapm = Client("meta.stackexchange.com") wrapso = Client("stackoverflow.com") privileged_users = { charcoal_room_id: [ "117490", # Normal Human "66258", # Andy "31768", # ManishEarth "103081", # hichris123 "73046", # Undo "88521", # ProgramFOX "59776", # Doorknob "31465", # Seth "88577", # Santa Claus "34124", # Andrew Leach "54229", # apnorton "20459", # S.L. Barth "32436", # tchrist "30477", # Brock Adams "58529", # ferrybig "145208", # Robert Longson "178825", # Ms Yvette "171800", # JAL "64978", # PeterJ "125141", # Jeffrey Bosboom "54902", # bummi "135450", # M.A.R. "145604", # Quill "60548", # rene "121401", # michaelpri "116218", # JamesENL "82927", # Braiam "11606", # bwDraco "19761", # Ilmari Karonen "108271", # Andrew T. "171054", # Magisch "190011", # Petter Friberg "165661", # Tunaki "145086", # Wai Ha Lee "137665", # ByteCommander "147884", # wythagoras "186395", # Åna "193364", # Ashish Ahuja "163686", # Gothdo "145827", # angussidney "244748", # Supreme Leader SnokeDetector (angussidney's sock) "121520", # ArtOfCode "244382", # Lt. A. Code (ArtOfCode's sock to test things with) "137388", # QPaysTaxes "212311", # Ryan Bemrose "172397", # Kyll "224538", # FrankerZ "61202", # OldSkool "56166", # Jan Dvorak "133966", # DavidPostill "22839", # djsmiley2k "97389", # Kaz Wolfe "144962", # DJMcMayhem "139423", # NobodyNada "62118", # tripleee "130558", # Registered User "128113", # arda "164318", # Glorfindel "175347", # Floern "180274" # Alexander O'Mara ], meta_tavern_room_id: [ "315433", # Normal Human "244519", # CRABOLO "244382", # TGMCians "194047", # Jan Dvorak "158100", # rene "178438", # Manishearth "237685", # hichris123 "215468", # Undo "229438", # ProgramFOX "180276", # Doorknob "161974", # Lynn Crumbling "186281", # Andy "266094", # Unihedro "245167", # Infinite Recursion "230261", # Jason C "213575", # Braiam "241919", # Andrew T. "203389", # backwards-Seth "202832", # Mooseman "160017", # bwDraco "201151", # bummi "188558", # Frank "229166", # Santa Claus "159034", # Kevin Brown "203972", # PeterJ "188673", # Alexis King "258672", # AstroCB "227577", # Sam "255735", # cybermonkey "279182", # Ixrec "271104", # James "220428", # Qantas 94 Heavy "153355", # tchrist "238426", # Ed Cottrell "166899", # Second Rikudo "287999", # ASCIIThenANSI "208518", # JNat "284141", # michaelpri "260312", # vaultah "244062", # SouravGhosh "152859", # Shadow Wizard "201314", # apnorton "280934", # M.A.Ramezani "200235", # durron597 "148310", # Awesome Poodles / Brock Adams "168333", # S.L. Barth "257207", # Unikitty "244282", # DroidDev "163250", # Cupcake "298265", # BoomsPlus "253560", # josilber "244254", # misterManSam "188189", # Robert Longson "174699", # Ilmari Karonen "202362", # chmod 666 telkitty "289717", # Quill "237813", # bjb568 "311345", # Simon Klaver "171881", # rekire "260388", # Pandya "310756", # Ms Yvette "262399", # Jeffrey Bosboom "242209", # JAL "280883", # ByteCommander "302251", # kos "262823", # ArtOfCode "215067", # Ferrybig "308386", # Magisch "285368" # angussidney ], socvr_room_id: [ "1849664", # Undo "2581872", # hichris123 "1198729", # Manishearth "3717023", # Normal Human aka 1999 "2619912", # ProgramFOX "578411", # rene "1043380", # gunr2171 "2246344", # Sam "2756409", # TylerH "1768232", # durron597 "359284", # Kevin Brown "258400", # easwee "3622940", # Unihedron "3204551", # Deduplicator "4342498", # NathanOliver "4639281", # Tiny Giant "3093387", # josilber "1652962", # cimmanon "1677912", # Mogsdad "656243", # Lynn Crumbling "3933332", # Rizier123 "2422013", # cybermonkey "3478852", # Nisse Engström "2302862", # Siguza "1324", # Paul Roub "1743880", # Tunaki "1663001", # DavidG "2415822", # JAL "4174897", # Kyll "5299236", # Kevin Guan "4050842", # Thaillie "1816093", # Drew "874188", # Triplee "880772", # approxiblue "1835379", # Cerbrus "3956566", # JamesENL "2357233", # Ms Yvette "3155639", # AlexanderOMara "462627", # Praveen Kumar "4490559", # intboolstring "1364007", # Wai Ha Lee "1699210", # bummi "563532", # Rob "5389107", # Magisch "4099593", # bhargav-rao "1542723", # Ferrybig "2025923", # Tushar "5292302", # Petter Friberg "792066", # Braiam "5666987", # Ian "3160466", # ArtOfCode "5735775", # Ashish Ahuja "3476191", # Nobody Nada "2227743", # Eric D "821878", # Ryan Bemrose "1413395", # Panta Rei "4875631", # FrankerZ "2958086", # Compass "499214", # JanDvorak "5647260", # Andrew L. "559745" # Floern ] } code_privileged_users = None smokeDetector_user_id = { charcoal_room_id: "120914", meta_tavern_room_id: "266345", socvr_room_id: "3735529" } censored_committer_names = {"3f4ed0f38df010ce300dba362fa63a62": "Undo1"} commit = os.popen('git log --pretty=format:"%h" -n 1').read() commit_author = os.popen('git log --pretty=format:"%an" -n 1').read() if md5.new(commit_author).hexdigest() in censored_committer_names: commit_author = censored_committer_names[md5.new( commit_author).hexdigest()] commit_with_author = os.popen('git log --pretty=format:"%h (' + commit_author + ': *%s*)" -n 1').read() on_master = os.popen( "git rev-parse --abbrev-ref HEAD").read().strip() == "master" charcoal_hq = None tavern_on_the_meta = None socvr = None s = "" s_reverted = "" specialrooms = [] apiquota = -1 bodyfetcher = None se_sites = [] users_chatting = { meta_tavern_room_id: [], charcoal_room_id: [], socvr_room_id: [] } why_data = [] why_data_allspam = [] notifications = [] listen_to_these_if_edited = [] multiple_reporters = [] api_calls_per_site = {} api_request_lock = threading.Lock() config = ConfigParser.RawConfigParser() if os.path.isfile('config'): config.read('config') else: config.read('config.ci') latest_smokedetector_messages = { meta_tavern_room_id: [], charcoal_room_id: [], socvr_room_id: [] } # environ_or_none defined in helpers.py bot_name = environ_or_none("SMOKEDETECTOR_NAME") or "SmokeDetector" bot_repository = environ_or_none( "SMOKEDETECTOR_REPO") or "//github.com/Charcoal-SE/SmokeDetector" chatmessage_prefix = "[{}]({})".format(bot_name, bot_repository) site_id_dict = {} post_site_id_to_question = {} location = config.get("Config", "location") print location metasmoke_ws = None try: metasmoke_host = config.get("Config", "metasmoke_host") print metasmoke_host except ConfigParser.NoOptionError: metasmoke_host = None print "metasmoke host not found. Set it as metasmoke_host in the config file. See https://github.com/Charcoal-SE/metasmoke." try: metasmoke_key = config.get("Config", "metasmoke_key") except ConfigParser.NoOptionError: metasmoke_key = "" print "No metasmoke key found, which is okay if both are running on the same host" try: metasmoke_ws_host = config.get("Config", "metasmoke_ws_host") except ConfigParser.NoOptionError: metasmoke_ws_host = "" print "No metasmoke websocket host found, which is okay if you're anti-websocket" try: github_username = config.get("Config", "github_username") github_password = config.get("Config", "github_password") except ConfigParser.NoOptionError: github_username = None github_password = None
def H(s): return md5.new(s).digest()
def set_instance_autosave_id(): global instance_autosave_id_str instance_autosave_id_str = "_" + md5.new(str(os.urandom(32))).hexdigest()
#!/usr/bin/env python import md5 import sys doorid = sys.argv[1] counter = 0 doorhash = md5.new(doorid + '0').hexdigest() code = list('--------') while '-' in code: while True: doorhash = md5.new(doorid + str(counter)).hexdigest() counter += 1 if str(doorhash).startswith('00000'): break print doorhash[5], counter try: position = int(doorhash[5]) except ValueError: continue if position > 7 or code[position] != '-': continue code[position] = doorhash[6] print ''.join(code)