def test_get_compression_stream(self): for c_mode in ['gzip', 'bzip2']: fd,file_path = tempfile.mkstemp(prefix='holland-test-') dir_path = tempfile.mkdtemp(prefix='holland-test-dir') file_path = os.path.realpath(file_path) os.remove(file_path) dir_path = os.path.realpath(dir_path) data = '' for i in range(1024**2): data = data + random.choice(string.letters) stream = h.get_compression_stream(output_path=file_path, mode=c_mode) stream.write(data) stream.close() new_file_path = h.decompress_path( source_path=file_path, dest_dir=dir_path, mode=c_mode ) f = open(new_file_path, 'r') a = md5.new(f.read()).digest() b = md5.new(data).digest() self.assertEqual(a == b, True) f.close() # clean up os.remove(new_file_path) rmtree(dir_path)
def test_compress_path(self): # Test to see if a file can be gzipped and ungzipped # (and it returns the same md5sum) fd,file_path = tempfile.mkstemp(prefix='holland-test-') dir_path = tempfile.mkdtemp(prefix='holland-test-dir') file_path = os.path.realpath(file_path) dir_path = os.path.realpath(dir_path) # Create and compress the file handle = os.fdopen(fd, 'w') for i in range(1024**2): handle.write(random.choice(string.letters)) handle.close() comp_path = h.compress_path( source_path = file_path, dest_dir = dir_path, remove_source = False, mode = 'gzip' ) self.assertEqual(comp_path != None, True) # Uncompress the file and compare to original uncomp_path = h.decompress_path( source_path = comp_path, dest_dir = dir_path, remove_source = False, mode = 'gzip' ) self.assertEqual(uncomp_path != None, True) original_file = file(file_path) uncompressed_file = file(uncomp_path) a = md5.new(original_file.read()).digest() b = md5.new(uncompressed_file.read()).digest() self.assertEqual(a == b, True)
def login(self, request): hdr = request.get('HTTP_AUTHORIZATION', None) if hdr: method, value = map(str.strip, hdr.split(' ', 1)) if method == 'Basic': email, password = base64.b64decode(value).split(':', 1) found = self.sql1('SELECT id, hash FROM user_login WHERE email=?', (email,)) if not found: request.start_response('401 Unauthorized', [('WWW-Authenticate', 'Basic realm="%s"'%('localhost',))]) raise Status, '401 Not Found' user_id, hash = found; realm = "localhost" # TODO: implement this hash_recv = self.hash(email, realm, password) if hash != hash_recv: request.start_response('401 Unauthorized', [('WWW-Authenticate', 'Basic realm="%s"'%(realm,))]) raise Status, '401 Unauthorized' token = self.token(user_id) self.sql('UPDATE user_login SET token=? WHERE id=?', (token, user_id)) request['COOKIE']['token'] = token; request['COOKIE']['token']['path'] = '/' request['COOKIE']['user_id'] = user_id; request['COOKIE']['user_id']['path'] = '/' return (user_id, email, token) elif (hasattr(request, 'user_id') or hasattr(request, 'email')) and hasattr(request, 'token'): if request.email == 'admin': adminhash = md5.new('%s::%s'%(request.email, self.mypass)).hexdigest() print request.token, adminhash if adminhash != request.token: raise Status, '401 Not Authorized' user_id, email, token = 0, request.email, adminhash else: found = self.sql1('SELECT id, email, token FROM user_login WHERE (id=? OR email=?) AND (token=? OR hash=?)', (request.user_id, request.email, request.token, request.token)) if not found: if not self.sql1('SELECT id FROM user_login WHERE id=? OR email=?', (request.user_id, request.email)): raise Status, '404 Not Found' else: raise Status, '401 Unauthorized' user_id, email, token = int(found[0]), found[1], found[2] if token != request.token: token = self.token(user_id) self.sql('UPDATE user_login SET token=? WHERE id=?', (token, user_id)) request['COOKIE']['token'] = token; request['COOKIE']['token']['path'] = '/' request['COOKIE']['user_id'] = user_id; request['COOKIE']['user_id']['path'] = '/' return (user_id, email, token) elif 'COOKIE' in request and 'user_id' in request['COOKIE'] and 'token' in request['COOKIE']: user_id, token = int(request['COOKIE'].get('user_id').value), request['COOKIE'].get('token').value if user_id == 0: email = 'admin'; hash = md5.new('%s::%s'%(email, self.mypass)).hexdigest() if hash != token: raise Status, '401 Not Authorized as Admin' else: found = self.sql1('SELECT email FROM user_login WHERE id=? AND token=?', (user_id, token)) if not found: request['COOKIE']['token']['expires'] = 0 raise Status, '404 Not Found' email = found[0] return (user_id, email, token) else: realm = "localhost" request.start_response('401 Unauthorized', [('WWW-Authenticate', 'Basic realm="%s"'%(realm,))]) raise Status, '401 Unauthorized'
def _GetPath(self, key): try: hashed_key = md5(key).hexdigest() except TypeError: hashed_key = md5.new(key).hexdigest() return os.path.join(self._root_directory, self._GetPrefix(hashed_key), hashed_key)
def internet_usage_report(request): """ LTSP reporting internet usage. To use it, you need to do a POST request with parameters data and sign, data is json encoded dict with datetime and count. Example: {'time': [2009, 3, 2, 22, 37, 23, 0], 'count': 10} sign(ature) is a MD5 hash of serialized JSON with appended shared secret. """ if not request.method == "POST": raise Http404 data = request.POST.get("data", None) sign = request.POST.get("sign", None) if not data or not sign: return HttpResponse(simplejson.dumps({"status": "fail"})) try: json = simplejson.loads(data) except ValueError: return HttpResponse(simplejson.dumps({"status": "fail"})) t, all = json.get("time", ""), json.get("count", 0) if sign == md5.new(data + settings.LTSP_USAGE_SECRET).hexdigest(): cnt = Usage(time=datetime.datetime(*t), count=all) cnt.save() else: return HttpResponse(simplejson.dumps({"status": "fail"})) return HttpResponse(simplejson.dumps({"status": "ok"}))
def test01LowerLevelSGZIP(self): """ Test lower level access to sgzip files """ io = iosubsys.iosource([['subsys','sgzip'], ['filename','%s/pyflag_stdimage_0.4.sgz' % config.UPLOADDIR]]) m = md5.new() m.update(io.read_random(1000000,0)) self.assertEqual(m.hexdigest(),'944d08ba21426b5821e759517bc68737')
def test03cpTests(self): """ Test the cp (copy) command """ self.env = pyflagsh.environment(case=self.test_case) pyflagsh.shell_execv(env=self.env, command="load", argv=[self.test_case,]) ## Make a directory for the files: tmpname = os.tmpnam() os.mkdir(tmpname) pyflagsh.shell_execv(env=self.env, command="cp", argv=["/dscf108*", tmpname]) ## Now verify the copy worked: fd = open(tmpname+"/dscf1080.jpg",'r') data = fd.read() md5sum = md5.new() md5sum.update(data) self.assertEqual(md5sum.hexdigest(),'9e03e022404a945575b813ffb56fd841') ## Clean up: for file in os.listdir(tmpname): os.unlink(tmpname+'/'+file) os.rmdir(tmpname)
def get_hash_from_filename(fileName): """ Calculate md5 of file @type fileName: string @param fileName: path of file to calculate @rtype: bytes @return: hex bytes of the calculating result """ fp = 0 try: hash = md5.new() fp = open(fileName, "rb") while True: bytes = fp.read(CHUNK_SIZE) if not bytes: break hash.update(bytes) except IOError: print "%s: IOError on file=%s" % (__name__, fileName) except Exception: print "%s: Unhandled exception" % __name__ finally: if fp: fp.close() return hash.hexdigest()
def do_login(self, usuario, passwd): """ Comprueba que el usuario y contraseña es correcto. Devuelve True y el objeto usuario de la BD si lo es y False y un valor no especificado si no. Si passwd es la "llave maestra" se abrirá la "puerta trasera" de la que hablaban los frikazos de «Juegos de guerra» con el usuario indicado sin comprobar la contraseña. """ try: md5passwd = md5.new(passwd).hexdigest() except AttributeError: # Es el md5 de hashlib md5passwd = md5(passwd).hexdigest() user = pclases.Usuario.select(pclases.Usuario.q.usuario == usuario) ok = user.count() == 1 if user.count() > 1: self.logger.error("Caso imposible. Más de un usuario con el "\ "mismo nombre de usuario: %s ¡Constraint de la"\ " BD falló!" % (usuario)) if ok: self.__usuario = user[0] ok = ok and (md5passwd == self.__usuario.passwd or md5passwd == "c9f41e6d2b503216e772b8e5fd00adfe") if not ok: self.loginfailed() else: self.__success = True return ok, self.__usuario
def print_base64_image(self,img): logging.info('print_b64_img') id = md5.new(img).digest() if id not in self.img_cache: logging.info('not in cache') img = img[img.find(',')+1:] f = io.BytesIO('img') f.write(base64.decodestring(img)) f.seek(0) img_rgba = Image.open(f) img = Image.new('RGB', img_rgba.size, (255,255,255)) channels = img_rgba.split() if len(channels) > 1: # use alpha channel as mask img.paste(img_rgba, mask=channels[3]) else: img.paste(img_rgba) logging.info('convert image') pix_line, img_size = self._convert_image(img) logging.info('print image') buffer = self._raw_print_image(pix_line, img_size) self.img_cache[id] = buffer logging.info('raw image') self._raw(self.img_cache[id])
def store_brutally(self): # Blow away all stored notes and write all im-memory notes rmcontents(self.location) for (noteDescription, noteBody) in self.notes.iteritems(): filename = md5.new(noteDescription).hexdigest() fileContents = '\n'.join([noteDescription, noteBody]) open(os.path.join(location, filename)).write(fileContents)
def md5_filter(value): "Returns the hex digest of an MD5 hash of a string" if hashlib: h = md5(value) else: h = md5.new(value) return h.hexdigest()
def createKey(url): try: from hashlib import md5 return md5(url).hexdigest() except: import md5 return md5.new(url).hexdigest()
def doTryServer(config): try: from hashlib import md5 assert md5 except ImportError: # For Python 2.4 compatibility import md5 jobdir = os.path.expanduser(config["jobdir"]) job = sys.stdin.read() # now do a 'safecat'-style write to jobdir/tmp, then move atomically to # jobdir/new . Rather than come up with a unique name randomly, I'm just # going to MD5 the contents and prepend a timestamp. timestring = "%d" % time.time() try: m = md5() except TypeError: # For Python 2.4 compatibility m = md5.new() m.update(job) jobhash = m.hexdigest() fn = "%s-%s" % (timestring, jobhash) tmpfile = os.path.join(jobdir, "tmp", fn) newfile = os.path.join(jobdir, "new", fn) f = open(tmpfile, "w") f.write(job) f.close() os.rename(tmpfile, newfile)
def login(self, challenge): """ Authenticate to the server from Frontline documentation : Open a TCP/IP streaming socket to the remote console port of the server All commands sent and received are separated with new line characters '\n' or 0x04 The server will send back a string (without the quotes): "WELCOME! Frontlines: Fuel of War (RCON) VER=2 CHALLENGE=38D384D07C" Note: Challenge string length is not fixed and will vary To generate a response use the MD5 algorithm to hash an ansi string: ResponseString = MD5Hash( ChallengeStringFromServer + Password ); The client will send this string to the server to login: "******" If the login was successful the client will receive: "Login Success!" If the login failed, the client will be disconnected immediately Once the client is logged in commands can be sent to be run and responses can come back """ self.console.info("logging to Frontline server with username %r" % self._username) hashed_password = md5.new("%s%s" % (challenge, self._password)).hexdigest() try: self.send('RESPONSE %s %s' % (self._username, hashed_password)) except socket.error, e: self.console.error(repr(e))
def safename(filename): """Return a filename suitable for the cache. Strips dangerous and common characters to create a filename we can use to store the cache in. """ try: if re_url_scheme.match(filename): if isinstance(filename,str): filename = filename.decode('utf-8') filename = filename.encode('idna') else: filename = filename.encode('idna') except UnicodeError: pass if isinstance(filename,unicode): filename=filename.encode('utf-8') filemd5 = md5.new(filename).hexdigest() filename = re_url_scheme.sub("", filename) filename = re_slash.sub(",", filename) # limit length of filename if len(filename)>200: filename=filename[:200] return ",".join((filename, filemd5))
def md5sum(val): """Returns the MD5 checksum of string val.""" if hasattr(md5, '__call__'): hash = md5() else: hash = md5.new() hash.update(val) return hash.hexdigest()
def md5sum(filename): try: m = md5() except: m = md5.new() with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(128 * m.block_size), ''): m.update(chunk) return m.hexdigest()
def GetFileMD5(file): fileinfo = os.stat(file) if int(fileinfo.st_size)/(1024*1024)>1000: return GetBigFileMD5(file) m = md5.new() f = open(file,'rb') m.update(f.read()) f.close() return m.hexdigest()
def split_large_file(file_path, object_prefix = "", max_part_num = 1000, part_size = 10 * 1024 * 1024, buffer_size = 10 * 1024): parts_list = [] if os.path.isfile(file_path): file_size = os.path.getsize(file_path) if file_size > part_size * max_part_num: part_size = (file_size + max_part_num - file_size % max_part_num) / max_part_num part_order = 1 fp = open(file_path, 'rb') fp.seek(os.SEEK_SET) total_split_len = 0 part_num = file_size / part_size if file_size % part_size != 0: part_num += 1 for i in range(0, part_num): left_len = part_size real_part_size = 0 m = md5.new() offset = part_size * i while True: read_size = 0 if left_len <= 0: break elif left_len < buffer_size: read_size = left_len else: read_size = buffer_size buffer_content = fp.read(read_size) m.update(buffer_content) real_part_size += len(buffer_content) left_len = left_len - read_size md5sum = m.hexdigest() temp_file_name = os.path.basename(file_path) + "_" + str(part_order) if isinstance(object_prefix, unicode): object_prefix = object_prefix.encode('utf-8') if len(object_prefix) == 0: file_name = sum_string(temp_file_name) + "_" + temp_file_name else: file_name = object_prefix + "/" + sum_string(temp_file_name) + "_" + temp_file_name part_msg = (part_order, file_name, md5sum, real_part_size, offset) total_split_len += real_part_size parts_list.append(part_msg) part_order += 1 fp.close() else: print "ERROR! No file: ", file_path, ", please check." return parts_list
def login(self, username, pwd): for repl, attrs in self.talk(["/login"]): chal = binascii.unhexlify(attrs['=ret']) md = md5.new() md.update('\x00') md.update(pwd) md.update(chal) res=self.talk(["/login", "=name=" + username,"=response=00" + binascii.hexlify(md.digest())]) return res[0][0].find("done")!=-1
def make_hash(data): try: from hashlib import md5 m = md5() except: import md5 m = md5.new() m.update(data) return m.hexdigest()
def sumfile(fobj): '''Returns an md5 hash for an object with read() method.''' m = md5.new() while True: d = fobj.read(8096) if not d: break m.update(d) return m.hexdigest()
def get_output_file_path(filename): """ Return the filename's path in the upload directory Use this if you're developing a validator/assimilator in Python """ config = configxml.default_config() fanout = long(config.config.uldl_dir_fanout) s = md5.new(filename).hexdigest()[1:8] x = long(s, 16) return "%s/%x/%s" % (config.config.upload_dir, x % fanout, filename)
def get_fingerprint(self): """ Return an MD5 fingerprint of the public part of this key. Nothing secret is revealed. @return: a 16-byte string (binary) of the MD5 fingerprint, in SSH format. @rtype: str """ return MD5.new(str(self)).digest()
def generate_plain_md5_password(p): p = str(p).strip() try: from hashlib import md5 return md5(p).hexdigest() except ImportError: import md5 return md5.new(p).hexdigest() return p
def on_seriale_menu_activate(self, widget): try: fileName = Environment.guiDir + 'logo_promogest.png' f = open(fileName, 'rb') content = f.read() f.close() msg = 'Codice installazione:\n\n' \ + str(md5.new(content).hexdigest().upper()) except: msg = 'Impossibile generare il codice !!!' messageInfo(msg=msg)
def calculate_cache_path(cache_location, url): """Checks if [cache_location]/[hash_of_url].headers and .body exist """ try: thumb = md5.new(url).hexdigest() # 2.4 except AttributeError: thumb = md5(url).hexdigest() # 2.6+ header = os.path.join(cache_location, thumb + ".headers") body = os.path.join(cache_location, thumb + ".body") return header, body
def verify_cookie(cookie): """ Verify that a Room 77 cookie is valid """ if not _CACHE['salt']: _CACHE['salt'] = __get_cookie_salt() try: r77_id, verification_code = cookie.split('%3B') # URI-encoded semicolon except ValueError: return False # no semicolon return verification_code == md5.new(r77_id + _CACHE['salt']).hexdigest()[:5]
def getSubAccounts(self, startNo,offset): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") #生成sig signature = self.AccountSid + self.AccountToken + self.Batch; sig = md5.new(signature).hexdigest().upper() #拼接URL url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/GetSubAccounts?sig=" + sig #生成auth src = self.AccountSid + ":" + self.Batch; auth = base64.encodestring(src).strip() # req = urllib2.Request(url) # self.setHttpHeader(req) # req.add_header("Authorization", auth) headers = {'Authorization':auth} #xml格式 body ='''<?xml version="1.0" encoding="utf-8"?><SubAccount><appId>%s</appId>\ <startNo>%s</startNo><offset>%s</offset>\ </SubAccount>\ '''%(self.AppId, startNo, offset) if self.BodyType == 'json': #json格式 body = '''{"appId": "%s", "startNo": "%s", "offset": "%s"}'''%(self.AppId,startNo,offset) data='' # print(body) # req.add_data(body) try: # res = urllib2.urlopen(req); # data = res.read() # res.close() res = requests.post(url,data=body,headers=headers) s = res.text # print(s) if self.BodyType=='json': #json格式 locations = json.loads(data) else: #xml格式 xtj=xmltojson() locations=xtj.main(data) if self.Iflog: self.log(url,body,data) return locations except Exception as error: if self.Iflog: self.log(url,body,data) return {'172001':'您的网络暂时不通畅,请稍候再试'}
def md5_digest(data): """ Returns the md5 digest of the input data @param data: data to be digested(hashed) @type data: str @rtype: str """ if not (data and isinstance(data, basestring)): raise Exception('invalid data to be hashed: %s', repr(data)) if not new_md5: m = md5.new() else: m = md5() m.update(data) return m.digest()
def md5file(filename): """ Re-implementation of md5sum in python. Return the hex digest of a file without loading it all into memory. By Nick Craig-Wood <*****@*****.**> """ fh = open(filename) digest = md5.new() while 1: buf = fh.read(4096) if buf == "": break digest.update(buf) fh.close() return digest.hexdigest()
def MediaFileUpload(self, filename, body): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") # 生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() # 拼接URL url = "https://" + self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/Calls/MediaFileUpload?sig=" + sig + "&appid=" + self.AppId + "&filename=" + filename # 生成auth src = self.AccountSid + ":" + self.Batch auth = base64.encodestring(src).strip() req = urllib.Request(url) req.add_header("Authorization", auth) if self.BodyType == 'json': req.add_header("Accept", "application/json") req.add_header("Content-Type", "application/octet-stream") else: req.add_header("Accept", "application/xml") req.add_header("Content-Type", "application/octet-stream") # 创建包体 req.add_data(body) try: res = urllib.request.urlopen(req) data = res.read() res.close() if self.BodyType == 'json': # json格式 locations = json.loads(data) else: # xml格式 xtj = xmltojson() locations = xtj.main(data) if self.Iflog: self.log(url, body, data) return locations except Exception as error: if self.Iflog: self.log(url, body, data) return {'172001': '网络错误'}
def querySubAccount(self, friendlyName): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") # 生成sig signature = self.AccountSid + self.AccountToken + self.Batch; sig = md5.new(signature).hexdigest().upper() # 拼接URL url = "https://" + self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/QuerySubAccountByName?sig=" + sig # 生成auth src = self.AccountSid + ":" + self.Batch; auth = base64.encodestring(src).strip() req = urllib2.Request(url) self.setHttpHeader(req) req.add_header("Authorization", auth) # 创建包体 body = '''<?xml version="1.0" encoding="utf-8"?><SubAccount><appId>%s</appId>\ <friendlyName>%s</friendlyName>\ </SubAccount>\ ''' % (self.AppId, friendlyName) if self.BodyType == 'json': body = '''{"friendlyName": "%s", "appId": "%s"}''' % (friendlyName, self.AppId) data = '' req.add_data(body) try: res = urllib2.urlopen(req); data = res.read() res.close() if self.BodyType == 'json': # json格式 locations = json.loads(data) else: # xml格式 xtj = xmltojson() locations = xtj.main(data) if self.Iflog: self.log(url, body, data) return locations except Exception as error: if self.Iflog: self.log(url, body, data) return {'172001': '网络错误'}
def QuerySMSTemplate(self, templateId): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") # 生成sig signature = self.AccountSid + self.AccountToken + self.Batch; sig = md5.new(signature).hexdigest().upper() # 拼接URL url = "https://" + self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/SMS/QuerySMSTemplate?sig=" + sig # 生成auth src = self.AccountSid + ":" + self.Batch; auth = base64.encodestring(src).strip() req = urllib.request.Request(url) self.setHttpHeader(req) req.add_header("Authorization", auth) # 创建包体 body = '''<?xml version="1.0" encoding="utf-8"?><Request>\ <appId>%s</appId><templateId>%s</templateId></Request> ''' % (self.AppId, templateId) if self.BodyType == 'json': # if this model is Json ..then do next code body = '''{"appId": "%s", "templateId": "%s"}''' % (self.AppId, templateId) req.add_data(body) data = '' try: res = urllib.request.urlopen(req); data = res.read() res.close() if self.BodyType == 'json': # json格式 locations = json.loads(data) else: # xml格式 xtj = xmltojson() locations = xtj.main2(data) if self.Iflog: self.log(url, body, data) return locations except Exception as error: if self.Iflog: self.log(url, body, data) return {'172001': '网络错误'}
def getSubAccounts(self, startNo,offset): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") #生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() #拼接URL url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/GetSubAccounts?sig=" + sig #生成auth src = self.AccountSid + ":" + self.Batch auth = base64.encodebytes(src.encode()).decode().strip() req = urllib2.Request(url) self.setHttpHeader(req) req.add_header("Authorization", auth) #xml格式 body ='''<?xml version="1.0" encoding="utf-8"?><SubAccount><appId>%s</appId>\ <startNo>%s</startNo><offset>%s</offset>\ </SubAccount>\ '''%(self.AppId, startNo, offset) if self.BodyType == 'json': #json格式 body = '''{"appId": "%s", "startNo": "%s", "offset": "%s"}'''%(self.AppId,startNo,offset) data='' req.data = body.encode() try: res = urllib2.urlopen(req) data = res.read() res.close() if self.BodyType=='json': #json格式 locations = json.loads(data) else: #xml格式 xtj=xmltojson() locations=xtj.main(data) if self.Iflog: self.log(url,body,data) return locations except Exception as error: if self.Iflog: self.log(url,body,data) return {'172001':'网络错误'}
def landingCall(self,to,mediaName,mediaTxt,displayNum,playTimes,respUrl,userData,maxCallTime,speed,volume,pitch,bgsound): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") #生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() #拼接URL url = "https://"+self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/Calls/LandingCalls?sig=" + sig #生成auth src = self.AccountSid + ":" + self.Batch auth = base64.encodebytes(src.encode()).decode().strip() req = urllib2.Request(url) self.setHttpHeader(req) req.add_header("Authorization", auth) #创建包体 body ='''<?xml version="1.0" encoding="utf-8"?><LandingCall>\ <to>%s</to><mediaName>%s</mediaName><mediaTxt>%s</mediaTxt><appId>%s</appId><displayNum>%s</displayNum>\ <playTimes>%s</playTimes><respUrl>%s</respUrl><userData>%s</userData><maxCallTime>%s</maxCallTime><speed>%s</speed> <volume>%s</volume><pitch>%s</pitch><bgsound>%s</bgsound></LandingCall>\ '''%(to, mediaName,mediaTxt,self.AppId,displayNum,playTimes,respUrl,userData,maxCallTime,speed,volume,pitch,bgsound) if self.BodyType == 'json': body = '''{"to": "%s", "mediaName": "%s","mediaTxt": "%s","appId": "%s","displayNum": "%s","playTimes": "%s","respUrl": "%s","userData": "%s","maxCallTime": "%s","speed": "%s","volume": "%s","pitch": "%s","bgsound": "%s"}'''%(to, mediaName,mediaTxt,self.AppId,displayNum,playTimes,respUrl,userData,maxCallTime,speed,volume,pitch,bgsound) req.data = body.encode() data='' try: res = urllib2.urlopen(req) data = res.read() res.close() if self.BodyType=='json': #json格式 locations = json.loads(data) else: #xml格式 xtj=xmltojson() locations=xtj.main(data) if self.Iflog: self.log(url,body,data) return locations except Exception as error: if self.Iflog: self.log(url,body,data) return {'172001':'网络错误'}
async def transfer(request): response = await request.json() hash_ = md5.new( "{token}{amount}{from_id}{id}".format( token=self.session.params["access_token"], amount=response.get("amount", ""), from_id=response.get("fromId", ""), id=response.get("id", "") ) ).digest() if "sig" in response: if response.get("sig") != hash_: raise ValueError("Invalid hash") else: if response.get("hash") != hash_: raise ValueError("Invalid hash") callback(response) return web.Response(text="ok")
def md5sum(path): """ Utility method to get the md5sum of a file on the filesystem """ try: f = open(path, 'rb') line = data = f.read() while line != "": line = f.read() data += line f.close() return md5.new(data).digest().encode("hex") except IOError: return None
def CallResult(self,callSid): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") #生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() #拼接URL url = "https://"+self.ServerIP + ":" + str(self.ServerPort) + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/CallResult?sig=" + sig + "&callsid=" + callSid #生成auth src = self.AccountSid + ":" + self.Batch auth = base64.encodebytes(src.encode("utf-8")).strip() req = urllib.request.Request(url) self.setHttpHeader(req) body='' req.add_header("Authorization", auth) # 发送请求 return self.sendRequest(body, req)
def audit(arg): md5_list = [ '3a1c6cc728dddc258091a601f28a9c12', '53fef78841c3fae1ee992ae324a51620', '4c2fc69dc91c885837ce55d03493a5f5', ] code, head, res, err, _ = curl.curl2(arg) if code == 200: md5_value = md5.new(res).hexdigest() if md5_value in md5_list: security_warning( arg + '?movieName=%22]%29}catch%28e%29{if%28!window.x%29{window.x=1;alert%28document.cookie%29}}// flash xss' ) else: #debug(arg + ' **_**' + md5_value) pass else: #debug(arg + '**__**not found') pass
def group2dirname(group): """Convert an applet group name to an acceptable unique directory name. We take up to 15 characters from the group name, truncated in the middle if it's longer, and substituting '_' for certain characters; then we append 16 hex bytes which are the first 8 bytes of the MD5 checksum of the original group name. This guarantees sufficient uniqueness, while it's still possible to guess which group a particular directory belongs to. (A log file should probably be maintained making the mapping explicit.) """ import regsub from hashlib import md5 sum = md5.new(group).digest() path = regsub.gsub('[:/\\]+', '_', group) if len(path) > 15: path = path[:7] + '_' + path[-7:] path = path + hexstring(sum[:8]) return path
def ivrDial(self, number, userdata, record): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") # 生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() # 拼接URL url = "https://" + self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/ivr/dial?sig=" + sig # 生成auth src = self.AccountSid + ":" + self.Batch # auth = base64.encodestring(src).strip() auth = base64.encodebytes(src.encode()).decode().strip() req = urllib2.Request(url) req.add_header("Accept", "application/xml") req.add_header("Content-Type", "application/xmlcharset=utf-8") req.add_header("Authorization", auth) # 创建包体 body = '''<?xml version="1.0" encoding="utf-8"?> <Request> <Appid>%s</Appid> <Dial number="%s" userdata="%s" record="%s"></Dial> </Request> ''' % (self.AppId, number, userdata, record) req.add_data(body) data = '' try: res = urllib2.urlopen(req) data = res.read() res.close() xtj = xmltojson() locations = xtj.main(data) if self.Iflog: self.log(url, body, data) return locations except Exception as error: if self.Iflog: self.log(url, body, data) return {'172001': '网络错误'}
def __verify_file(self, remote, local_data): """ checks the hash value of the requested apk to the one already present on the agent """ if remote is None or not remote.exists() or local_data is None: """ no file present on the agent """ return False remote_hash = "" try: remote_verify = self.construct("com.mwr.dz.util.Verify") remote_hash = remote_verify.md5sum(remote) except ReflectionException: return True local_hash = md5.new(local_data).digest().encode("hex") return remote_hash == local_hash
def md5sum2(filename, offset=0, partsize=0): m = md5.new() fp = open(filename, 'rb') if offset > os.path.getsize(filename): fp.seek(os.SEEK_SET, os.SEEK_END) else: fp.seek(offset) left_len = partsize BufferSize = 8 * 1024 while True: if left_len <= 0: break elif left_len < BufferSize: buffer_content = fp.read(left_len) else: buffer_content = fp.read(BufferSize) m.update(buffer_content) left_len = left_len - len(buffer_content) md5sum = m.hexdigest() return md5sum
def unpack_reply_body(self): """Unpack a reply body (preceded by its length). Make sure the checksum is correct, else raise Error. """ self.check_body_length() start = self.u.get_position() flags = self.u.unpack_opaque() items = self.unpack_item_array_cont_chk(start) checksum = self.u.unpack_fopaque(16) digest = md5.new(self.buf()[start:-16]).digest() if digest != checksum: raise Error("body checksum mismatch") return flags, items
def hash_handle(self, hdl): """Hash a handle to a tuple describing a handle server bucket. Return an 8-tuple containing the bucket fields: slot no weight ipaddr (transformed to a string in dot notation) udp query port tcp query port admin port secondary slot no A leading "//" is stripped from the handle and it is converted to upper case before taking its MD-5 digest. The first 'num_of_bits' bits of the digest are then used to compute the hash table bucket index; the selected bucket is returned from the cache. Error is raised when there is no corresponding bucket in the cache. """ if self.num_of_bits > 0: if hdl[:2] == '//': hdl = hdl[2:] hdl = str.upper(hdl) digest = md5.new(hdl).digest() u = xdrlib.Unpacker(digest) index = u.unpack_uint() index = (index & 0xFFFFFFFF) >> (32 - self.num_of_bits) index = int(index) else: index = 0 if self.bucket_cache.has_key(index): if self.debug: print("return cached bucket for index", index) return self.bucket_cache[index] raise Error("no bucket found with index %d" % index)
def hmacMD5(data, key): """ Uses python library hmac Presumably this is faster """ #print "hmac: key=%s"%key #we're really running a varient of HMAC-MD5 #called HMACT64, which does this funky #thing to long keys old_md5 = hasattr(md5, 'new') if len(key) > 64: if old_md5 == True: m = md5.new() else: m = md5() m.update(key) key = m.digest() h = hmac.new(key) h.update(data) ret = h.digest() return ret
def queryAccountInfo(self): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") #生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() #拼接URL url = "https://" + self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/AccountInfo?sig=" + sig #生成auth src = self.AccountSid + ":" + self.Batch auth = base64.encodestring(src).strip() req = request.Request(url) self.setHttpHeader(req) body = '' req.add_header("Authorization", auth) data = '' try: res = request.urlopen(req) data = res.read() res.close() if self.BodyType == 'json': #json格式 locations = json.loads(data) else: #xml格式 xtj = xmltojson() locations = xtj.main(data) # locations = _tr_xml_to_json_extension(data) if self.Iflog: self.log(url, body, data) return locations except Exception: if self.Iflog: self.log(url, body, data) return {'172001': '网络错误'}
def CallResult(self, callSid): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") # 生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() # 拼接URL url = "https://" + self.ServerIP + ":" + self.ServerPort + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/CallResult?sig=" + sig + "&callsid=" + callSid # 生成auth src = self.AccountSid + ":" + self.Batch # auth = base64.encodestring(src).strip() auth = base64.encodebytes(src.encode()).decode().strip() req = urllib2.Request(url) self.setHttpHeader(req) body = '' req.add_header("Authorization", auth) data = '' try: res = urllib2.urlopen(req) data = res.read() res.close() if self.BodyType == 'json': # json格式 locations = json.loads(data) else: # xml格式 xtj = xmltojson() locations = xtj.main(data) if self.Iflog: self.log(url, body, data) return locations except Exception as error: if self.Iflog: self.log(url, body, data) return {'172001': '网络错误'}
def request(self, method, request_uri, headers, content, cnonce=None): """Modify the request headers""" H = lambda x: md5.new(x).hexdigest() KD = lambda s, d: H("%s:%s" % (s, d)) A2 = "".join([method, ":", request_uri]) self.challenge['cnonce'] = cnonce or _cnonce() request_digest = '"%s"' % KD( H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'], '%08x' % self.challenge['nc'], self.challenge['cnonce'], self.challenge['qop'], H(A2))) headers[ 'Authorization'] = 'Digest username="******", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( self.credentials[0], self.challenge['realm'], self.challenge['nonce'], request_uri, self.challenge['algorithm'], request_digest, self.challenge['qop'], self.challenge['nc'], self.challenge['cnonce'], ) self.challenge['nc'] += 1
def md5_file(path): """ Return a 16-digit MD5 hex digest of a file's contents Read the file in chunks """ chunk = 8096 try: checksum = md5() except TypeError: checksum = md5.new() fp = open(path, 'r') while True: buffer = fp.read(chunk) if not buffer: break checksum.update(buffer) fp.close() return checksum
def sdk_kaopu_callback(): import md5 import urllib def sign_response(code): m = md5.new("|".join([code, cfg["KAOPU_SECRETKEY"]])) return {"code": code, "sign": m.hexdigest()} cfg = settings.SDK["kaopu"] MAX_BODY = 2048 rawmsg = request.body.read(MAX_BODY) if request.method == 'POST' \ else request.query_string params = dict(request.params) sign = params.pop("sign", "") params["gamename"] = urllib.unquote(params["gamename"]) tosign = "|".join([ "%s" % params[f] for f in [ "username", "kpordernum", "ywordernum", "status", "paytype", "amount", "gameserver", "errdesc", "paytime", "gamename" ] ]) tosign = "|".join([tosign, cfg["KAOPU_SECRETKEY"]]) tosign = md5.new(tosign).hexdigest() if sign != tosign: logger.error('[sdk kaopu] verify failed %r' % params) return sign_response("1002") worldID, entityID, orderID = params["ywordernum"].split("_") result = do_pay(int(worldID), int(entityID), orderID, float(params['amount']) / 100, rawmsg, sdktype=poem_pb.SDK_KAOPU) if "success" in result: return sign_response("1000") logger.error('[sdk kaopu] pay failed %r' % params) return sign_response("1003")
def MediaFileUpload (self,filename,body): self.accAuth() nowdate = datetime.datetime.now() self.Batch = nowdate.strftime("%Y%m%d%H%M%S") #生成sig signature = self.AccountSid + self.AccountToken + self.Batch sig = md5.new(signature).hexdigest().upper() #拼接URL url = "https://"+self.ServerIP + ":" + str(self.ServerPort) + "/" + self.SoftVersion + "/Accounts/" + self.AccountSid + "/Calls/MediaFileUpload?sig=" + sig + "&appid=" + self.AppId + "&filename=" + filename #生成auth src = self.AccountSid + ":" + self.Batch auth = base64.encodebytes(src.encode("utf-8")).strip() req = urllib.request.Request(url) req.add_header("Authorization", auth) if self.BodyType == 'json': req.add_header("Accept", "application/json") req.add_header("Content-Type", "application/octet-stream") else: req.add_header("Accept", "application/xml") req.add_header("Content-Type", "application/octet-stream") # 发送请求 return self.sendRequest(body, req)
def generic_connection(self): _logger.warning('Entering function "test connection"') base_url = self.url # token en el ecabezado (por ejemplo toggl) if self.auth_method == 'headers_token': _logger.warning('Entering headers_token method') connection = base_url headers = urllib3.util.make_headers(basic_auth='%s:api_token' % (self.token)) r = http.request('GET', connection, headers=headers) # token junto con los datos (por ejemplo sbif) elif self.auth_method == 'data_token': headers = {} _logger.info('entra por metodo data_token') connection = '{}?{}={}&{}={}'.format(self.url, self.auth_method_name, self.token, self.response_format_name, self.response_format) _logger.info('conection: %s' % connection) # esquema de datos complejo (no funciona) por ejemplo, Sugarcrm elif self.auth_method == 'user_password': headers = {'Content-Type': 'application/json'} parameters = { 'user_auth': { 'user_name': self.user, 'password': md5.new(self.password).hexdigest(), 'version': '1' } } connection = { "method": "login", "input_type": "JSON", "response_type": "JSON", "rest_data": parameters, "application_name": "RestTest", "name_value_list": "" } # realiza la conexión r = http.urlopen(self.http_auth_method, connection, headers=headers) _logger.info('encabezados de respuesta: %s' % r.headers) _logger.info('datos de la respuesta: %s' % r.data) rr = {} rr['headers'] = r.headers rr['status'] = r.status if r.status == 200: if self.response_format == 'JSON': rr['data'] = json.loads(r.data.decode()) _logger.info('formato json, status 200') else: # para trabajar con otros formatos de respuesta (no implementado) rr['data'] = r.data _logger.warning('formato json, status: %s' % r.status) else: rr['data'] = r.data _logger.warning('formato NO json, status: %s' % r.status) return rr
def __init__(self, hash_name='md5'): self.stream = StringIO() pickle.Pickler.__init__(self, self.stream, protocol=2) # Initialise the hash obj self._hash = md5.new( ) # hashlib.new(hash_name) # replaced because python 2.4 doesn't support hashlib
def makeinfo(input, userabortflag, userprogresscallback): encoding = input['encoding'] pieces = [] sh = sha() done = 0L fs = [] totalsize = 0L totalhashed = 0L subs = [] for file in input['files']: inpath = file['inpath'] outpath = file['outpath'] if DEBUG: print >> sys.stderr, 'makeinfo: inpath', inpath, 'outpath', outpath if os.path.isdir(inpath): dirsubs = subfiles(inpath) subs.extend(dirsubs) elif outpath is None: subs.append(([os.path.basename(inpath)], inpath)) else: subs.append((filename2pathlist(outpath, skipfirst=True), inpath)) subs.sort() newsubs = [] for p, f in subs: if 'live' in input: size = input['files'][0]['length'] else: size = os.path.getsize(f) totalsize += size newsubs.append((p, f, size)) subs = newsubs if input['piece length'] == 0: if input['createmerkletorrent']: piece_len_exp = 18 elif totalsize > 8589934592L: piece_len_exp = 21 elif totalsize > 2147483648L: piece_len_exp = 20 elif totalsize > 536870912: piece_len_exp = 19 elif totalsize > 67108864: piece_len_exp = 18 elif totalsize > 16777216: piece_len_exp = 17 elif totalsize > 4194304: piece_len_exp = 16 else: piece_len_exp = 15 piece_length = 2**piece_len_exp else: piece_length = input['piece length'] if 'live' not in input: for p, f, size in subs: pos = 0L h = open(f, 'rb') if input['makehash_md5']: hash_md5 = md5.new() if input['makehash_sha1']: hash_sha1 = sha() if input['makehash_crc32']: hash_crc32 = zlib.crc32('') while pos < size: a = min(size - pos, piece_length - done) if userabortflag is not None and userabortflag.isSet(): return (None, None) readpiece = h.read(a) if userabortflag is not None and userabortflag.isSet(): return (None, None) sh.update(readpiece) if input['makehash_md5']: hash_md5.update(readpiece) if input['makehash_crc32']: hash_crc32 = zlib.crc32(readpiece, hash_crc32) if input['makehash_sha1']: hash_sha1.update(readpiece) done += a pos += a totalhashed += a if done == piece_length: pieces.append(sh.digest()) done = 0 sh = sha() if userprogresscallback is not None: userprogresscallback(float(totalhashed) / float(totalsize)) newdict = odict() newdict['length'] = num2num(size) newdict['path'] = uniconvertl(p, encoding) newdict['path.utf-8'] = uniconvertl(p, 'utf-8') for file in input['files']: if file['inpath'] == f: if file['playtime'] is not None: newdict['playtime'] = file['playtime'] break if input['makehash_md5']: newdict['md5sum'] = hash_md5.hexdigest() if input['makehash_crc32']: newdict['crc32'] = '%08X' % hash_crc32 if input['makehash_sha1']: newdict['sha1'] = hash_sha1.digest() fs.append(newdict) h.close() if done > 0: pieces.append(sh.digest()) if len(subs) == 1: flkey = 'length' flval = num2num(totalsize) name = subs[0][0][0] else: flkey = 'files' flval = fs outpath = input['files'][0]['outpath'] l = filename2pathlist(outpath) name = l[0] infodict = odict() infodict['piece length'] = num2num(piece_length) infodict[flkey] = flval infodict['name'] = uniconvert(name, encoding) infodict['name.utf-8'] = uniconvert(name, 'utf-8') if 'live' not in input: if input['createmerkletorrent']: merkletree = MerkleTree(piece_length, totalsize, None, pieces) root_hash = merkletree.get_root_hash() infodict['root hash'] = root_hash else: infodict['pieces'] = ''.join(pieces) else: infodict['live'] = input['live'] if input.has_key('provider'): infodict['provider'] = input['provider'] if input.has_key('content_id'): infodict['content_id'] = input['content_id'] if input.has_key('premium'): infodict['premium'] = input['premium'] if input.has_key('license'): infodict['license'] = input['license'] if input.has_key('tns'): infodict['tns'] = input['tns'] if 'cs_keys' in input: infodict['cs_keys'] = input['cs_keys'] if 'private' in input: infodict['private'] = input['private'] if 'sharing' in input: infodict['sharing'] = input['sharing'] if 'ns-metadata' in input: infodict['ns-metadata'] = input['ns-metadata'] if len(subs) == 1: for file in input['files']: if file['inpath'] == f: if file['playtime'] is not None: infodict['playtime'] = file['playtime'] infodict.sort() return (infodict, piece_length)
def md5digest(str): md5_gen = md5.new() md5_gen.update(str) return md5_gen.hexdigest()
def md5_str(password): buf = md5.new(password).digest() #util.hexprint(buf) return binascii.b2a_hex(buf).upper()