class ProductionConfig(BaseConfig): """Production specified configuration class.""" DEBUG = False TESTING = False # We are fixing this because sometimes when we recreate application # Device users may not be reachable. AES_IV = md5("univerdustry".encode("utf-8")).hexdigest()[:16].encode("utf-8") # This for avoiding token invalidation. Because every time application # built, secret key will change. When secret key changed token will be # invalid. SECRET_KEY = md5("univerdustry".encode("utf-8")).hexdigest()
def get_md5(text): """ 对密码进行加密处理 :param text: 原密码 :return: 加密后的密码 """ return md5((text + 'DomainManagerSystem').encode('utf-8')).hexdigest()
def md5(val: str) -> str: """ >>> md5('abc0') '577571be4de9dcce85a041ba0410f29f' """ # pylint: disable=unexpected-keyword-arg return _md5.md5(val.encode(), usedforsecurity=False).hexdigest()
def send_msg(request): if request.method == "POST": body_unicode = request.body.decode('utf-8') # body = json.loads(body_unicode) mobile = request.POST.get("mobile") imageCode = request.POST.get("imageCode") type = request.POST.get("type") ret={"status":200} parameters = {"action":"smsService.zbbSend","jsonString":""} url = "http://hzl-api.jufeng.co/api" appid = "1493037951465" appsecret = "aa5009503ead3a60180779f1d1db3227" token = appid+appsecret m = _md5.md5() m.update(token.encode()) token = m.hexdigest() temp = {"mobile":mobile,"type":type,"token":token,"tokenUserId":1,"imageCode":imageCode} temp = json.dumps(temp).encode(encoding='utf_8') temp =base64.b64encode(temp) parameters["jsonString"] = temp req = requests.post(url,parameters) resp = json.loads(req.text) print(resp) if resp["isSuccess"]: print(resp["data"]) request.session["check_num"] = {str(type):{mobile:resp["data"]}} # print(request.session["check_num"]) else: ret["status"] = 500 ret["msg"] = resp["description"] print(ret["msg"]) return HttpResponse(json.dumps(ret)) return render_to_response("signup.html",ret)
def process_item(self, item, spider): time = datetime.now().microsecond authKey = 'whahaha' authSecret = md5( (authKey + str(time - 1000)).encode('utf-8')).hexdigest() requestData = { 'time': str(time), 'authKey': authKey, 'authSecret': authSecret, 'md5url': 'w2w2d323232323' } requestData['url'] = item['url'] requestData['originUrl'] = item['originUrl'] requestData['originName'] = item['originName'] requestData['title'] = item['title'] requestData['keywords'] = item['keywords'] requestData['abstracts'] = item['abstracts'] requestData['content'] = item['content'] requestData['type'] = item['type'] requestData['group'] = item['group'] requestData['status'] = item['status'] requestData['pageUrls'] = item['pageUrls'] response = requests.post(self.url, data=json.dumps(requestData), headers=self.headers) print(response.text) return item
def calculate_hash(ip, port, server_id): """ @param ip: The Server IP @param port: The Port the File is served on @param server_id: The database server_id @return: the hash """ logger.info('Calculating hash for: %s ' % server_id) sock = socket.socket() sock.connect((ip, port)) # noinspection PyArgumentList sock.send(bytes('%s\n' % server_id, encoding='ascii')) sock.settimeout(5000) try: _hash = md5() while True: data = sock.recv(2 ** 14) _hash.update(data) if data == bytes(): break finally: sock.close() return _hash.hexdigest()
def dispatch_packet(self, packet, sql=None): self.upstream.send(packet) logger.debug(f"== Send packet ==") dump(packet) if self.server.settings["dump_packet_to_file"] == "1" and sql: dir_name = self.dir_name + "/dump/" + md5(sql.encode()).hexdigest() if not os.path.isdir(dir_name): os.mkdir(dir_name) with open(dir_name + "/" + "sql.txt", "w") as sql_write: sql_write.write(sql) while True: _header = self.upstream.recv(5) _length = struct.unpack("<I", (_header[0:3] + b"\x00"))[0] _sequenceId = struct.unpack("<B", _header[3:4])[0] _packetType = struct.unpack("<B", _header[4:])[0] _payload = self.upstream.recv(_length - 1) _packet = _header + _payload logger.debug(f"== Read packet of {_sequenceId} ==") dump(_packet) self.send_packet(_packet) if self.server.settings["dump_packet_to_file"] == "1" and sql: cap_file = dir_name + "/" + str(_sequenceId) + ".cap" with open(cap_file, "wb") as wf: wf.write(_packet) if _packetType == Flags.EOF or _packetType == Flags.OK: return
def md5(string): """ >>> md5("test") '098f6bcd4621d373cade4e832627b4f6' """ return _md5.md5(string.encode('utf-8')).hexdigest()
def save_image(html): file_path = '{}/{}.{}'.format(os.getcwd(), md5(html).hexdigest(), 'jpg') print(file_path) if not os.path.exists(file_path): with open(file_path, 'wb') as f: f.write(html) f.close()
def temporary(cls, request, image): from _md5 import md5 file_obj = image name = 'temporary_for_event' + str(datetime.datetime.now()) filename = md5(name.encode( 'utf8')).hexdigest() + '.' + file_obj.name.split('.')[-1] from django.core.files.storage import default_storage with default_storage.open('temporary/' + filename, 'wb+') as destination: for chunk in file_obj.chunks(): destination.write(chunk) if request.POST['rotate'] == 'undefined': angle = 0 else: angle = 360 - int(request.POST['rotate']) reduced = Image.open('media/temporary/' + filename) new_response = reduced.rotate(angle).crop( (float(request.POST['crop_x1']), float(request.POST['crop_y1']), float(request.POST['crop_x2']), float(request.POST['crop_y2']))) quality_val = 85 reduced = helper.create_medium_image(new_response) reduced.save('media/temporary/' + filename, quality=quality_val, optimize=True, progressive=True) return '/media/temporary/' + filename
def download_save_image(url): ''' 下载图片,并保存到本地 ''' try: # 封装请求 # print('to find' ,url) response = requests.get(url, headers=HEADERS) # 如果是图片就返回content好,如果是网页就text content = response.content # print(content) # 图片的存路径,因为图片可能会重复,所以加一个md5的命名规则,避免重复下载 file_path = '{0}/images/{1}.{2}'.format(os.getcwd(), md5(content).hexdigest(), 'jpg') # os.getcwd()为目前的相对路径 # # 我是把图片都放在images文件夹下,如果还要分得更细,可以再创建一个"./images/图片集名字"这样的文件夹 dir = '{0}/images'.format(os.getcwd()) # 如果没有images文件夹,就新建一个 if not os.path.exists(dir): os.mkdir(dir) # 这个就是创建图片(因为用了md5,所以不允许有重复的图片) if not os.path.exists(file_path): with open(file_path, 'wb') as f: f.write(content) print('图片保存成功:' + url) except: return None
def save_image(content): file_path = '{0}/{1}.{2}'.format(os.getcwd(), md5(content).hexdigest(), 'jpg') if not os.path.exists(file_path): with open(file_path, 'wb') as f: f.write(content) f.close()
def insert_pagina(mongodb, png_image, numero_dta: str, filename: str, npagina: int) -> (ObjectId, bool): """ Insere um png no fs.files. Se existir arquivo com mesmo md5 e filename, considera inserção repetida e retorna _id do existente. :param mongodb: Conexão e banco MongoDB :param png_image: conteúdo da imagem :param numero_dta: metadata.numero_dta :param filename: filename :param npagina: metadata.npagina :return: (ObjectId, True|False) ObjectId gerado ou _id de arquivo se já existe True se arquivo existe False se não existe e foi gerado novo documento """ fs = GridFS(mongodb) content = png_image m = md5() m.update(content) grid_out = fs.find_one({'md5': m.hexdigest()}) if grid_out: if grid_out.filename == filename: logger.warning(' Arquivo %s Pagina %s MD5 %s ' 'tentativa de inserir pela segunda vez!!' % (filename, npagina, m.hexdigest())) # File exists, abort! return grid_out._id, True # Insert File params = {'numero_dta': numero_dta, 'pagina': npagina} return fs.put(content, filename=filename, metadata=params), False
class BaseConfig: """Base configuration class for application.""" DEBUG = False ENCODING = "utf-8" DATETIME_FORMAT = "%d-%m-%Y" LANGUAGES = ["en", "tr"] RESTFUL_JSON = {'cls': CustomJsonEncoder} SECRET_KEY = md5("univerdustry".encode(ENCODING)).hexdigest() # Enable CSRF tokens in the Forms. WTF_CSRF_ENABLED = True FILES_PATH = os.path.join("application", "files") FLOWER_APP_URL = os.getenv("FLOWER_URL") CELERY_BROKER_URL = os.getenv("CELERY_BROKER_URL") CELERY_RESULT_BACKEND = os.getenv("CELERY_BROKER_URL") MONGO_REST = os.getenv("MONGO_REST") SCHOLAR_REST = os.getenv("SCHOLAR_REST") ELASTICSEARCH = os.getenv("ELASTICSEARCH") VECTORIZER = os.getenv("VECTORIZER") APACHE_TIKA = os.getenv("APACHE_TIKA") DB_HISTORY_DAYS_LIMIT = 30
def get_response(self, word): ts = "" + str(int(time.time() * 1000)) salt = ts + str(int(random.random() * 10)) need_md5_str = "fanyideskweb" + word + salt + "mmbP%A-r6U3Nw(n]BjuEU" md5_final = _md5.md5() md5_final.update(need_md5_str.encode()) sign = md5_final.hexdigest() form_data = { "i": word, "from": "AUTO", "to": "AUTO", "smartresult": "dict", "client": "fanyideskweb", "salt": salt, "sign": sign, "ts": ts, "bv": "02a6ad4308a3443b3732d855273259bf", "doctype": "json", "version": "2.1", "keyfrom": "fanyi.web", "action": "FY_BY_REALTlME" } response = requests.post(url=self.url, data=form_data, headers=self.headers[random.randint( 0, len(self.headers) - 1)]) return response.content.decode()
def __init__( self, *, port: Optional[int] = None, user: Optional[str] = None, password: Optional[str] = None, host: Optional[str] = None, database: Optional[str] = None, pool_size: int = 5, overflow: int = 10, conn_str: Optional[str] = None, ) -> None: if conn_str is None: if any(arg is None for arg in (port, user, password, host, database)): raise ValueError( "If conn_str is not given then all of the arguments port, user, password, host, database must be provided." ) else: conn_str = ( f"postgresql://{user}:{urlquote(password)}@{host}:{port}/{database}" ) else: if any(arg is not None for arg in (port, user, password, host, database)): raise ValueError( "If conn_str is given, none of the arguments port, user, password, host, database are allowed." ) self.app_name = "flowmachine" try: self.app_name = "-".join((self.app_name, os.getlogin())) except (FileNotFoundError, OSError): logger.info( f"Couldn't get username for application name, using '{self.app_name}'" ) connect_args = {"application_name": self.app_name} self.engine = sqlalchemy.create_engine( conn_str, echo=False, strategy="threadlocal", pool_size=pool_size, max_overflow=overflow, pool_timeout=None, connect_args=connect_args, ) # Unique-to-db id for this connection, to allow use of a common redis instance with # multiple databases conn_id = md5(str(self.engine.url.host).encode()) conn_id.update(str(self.engine.url.port).encode()) conn_id.update(str(self.engine.url.database).encode()) self.conn_id = conn_id.hexdigest() self.max_connections = pool_size + overflow if self.max_connections > os.cpu_count(): warnings.warn( f"Maximum number of connections (pool size + overflow = {self.max_connections}) is greater than the available cpu cores ({os.cpu_count()})." ) self.__check_flowdb_version()
def get_file_name(self): """Формируем системное имя файла из md5 суммы самого файла :return: str """ self.file.seek(0) f_data = self.file.read() self.file.seek(0) return md5(f_data).hexdigest()
def calc_file_hash(filepath): """ Calculates hash of file :param filepath: path to the file :return: hash as string """ with open(filepath, 'rb') as f: return md5(f.read()).hexdigest()
def saveImage(content): path = r'C:\Users\Leon\Desktop\jiepai' # 文件包括三个部分,分别是路径、文件名、后缀,这里用md5码作为文件名是为了避免重复 filePath = '{0}/{1}.{2}'.format(path, md5(content).hexdigest(), 'jpg') if not os.path.exists(filePath): with open(filePath, 'wb') as f: f.write(content) f.close()
def adjacent(z0, path): dzs = {"U": -1j, "D": 1j, "L": -1, "R": 1} directions = zip(dzs.items(), md5((data + path).encode()).hexdigest()) for (dpath, dz), s in directions: if s in "bcdef": z = z0 + dz if 0 <= z.real <= 3 and 0 <= z.imag <= 3: yield z, path + dpath
def byte_after_leading_zeroes(leading_zeroes_count): leading_zeroes = "0" * leading_zeroes_count for n in count(): hash = md5() hash.update(INPUT + str(n).encode()) digest = hash.hexdigest() if digest.startswith(leading_zeroes): yield digest[leading_zeroes_count], digest[leading_zeroes_count + 1]
def sign(params): ''' https://pay.weixin.qq.com/wiki/doc/api/tools/cash_coupon.php?chapter=4_3 ''' params = [(str(key), str(val)) for key, val in params.iteritems() if val] sorted_params_string = '&'.join('='.join(pair) for pair in sorted(params)) sign = '{}&key={}'.format(sorted_params_string, settings.WEIXIN_PAY_API_KEY) return md5(sign).hexdigest().upper()
def test_getUser_api(self): with self.app.test_client() as client: response = self.client.get('/api/admin2/1234567', follow_redirects=True, content_type='application/json') print(response.data) self.assertEquals(response.status_code, 200) self.assertEquals(response.data.decode('utf-8'), md5('1234567'.encode()).hexdigest())
def save_video(content): save_path = os.getcwd() + '/mp4/' + str(datetime.date.today()) filepath = '{0}/{1}.{2}'.format(save_path, md5(content).hexdigest(), 'mp4') if not os.path.isdir(save_path): os.makedirs(save_path) with open(filepath, 'wb') as f: f.write(content) f.close() print('保存视频文件成功------------------------')
def part_two(self, data: str) -> Union[int, str]: """Solve part two.""" target = bytearray([0, 0, 0]) for index in itertools.count(): if md5(f'{data}{index}'.encode()).digest().startswith(target): return index return 0
def part_one(self, data: str) -> Union[int, str]: """Solve part one.""" target = bytearray([0, 0, 16]) for index in itertools.count(): if md5(f'{data}{index}'.encode('ascii')).digest()[0:3] < target: return index return 0
def count(self): sql, params = self.object_list.query.sql_with_params() sql = sql % params cache_key = md5(sql.encode('utf-8')).hexdigest() rows_count = cache.get(cache_key) if not rows_count: rows_count = self.object_list.count() cache.set(cache_key, rows_count, 60 * 60) # 过期时间 return int(rows_count)
def md5_password(password): m = _md5.md5() m.update(password.encode()) temp = m.hexdigest() www=[] for i in range(len(temp)): aaa = chr(int(ord(str(temp[i]))) ^ int(ord("s"))) www.append(aaa) return "".join(www)
def verify(challenge, phone): seccode = md5((Geetest.key + challenge).encode()).hexdigest() query = { "id": id, "seccode": seccode, "idType": "1", "idValue": md5(phone.encode()).hexdigest(), "challenge": challenge, "user_ip": "1.2.3.4", "timestamp": time.time(), "crash": "0", } print("query:", query) resp = requests.post(Geetest.API_URL, data=query) print("response:", resp) result = resp.content print("result:", ) pprint(json.loads(result.decode())) return result.json()['success']
def get_smallest_adventcoin_num_sub(z, zeros, start, step): n = start z = z.encode('utf8') zeros_str = "0" * zeros while True: hash = _md5.md5(b"%s%i" % (z, n)).hexdigest() if hash[:zeros] == zeros_str: return n n = n + step
def mine(data, difficulty, start=0): secret_key = data.encode() prefix = "0" * difficulty h0 = md5(secret_key).copy i = start while True: i += 1 hash_ = h0() hash_.update(b"%d" % i) if hash_.hexdigest().startswith(prefix): return i
def now_playing_last_fm(artist, track): update_now_playing_sig = md5(("api_key" + r.API_KEY + "artist" + artist + "method" + "track.updateNowPlaying" + "sk" + r.SK + "track" + track + r.SECRET).encode('utf-8')).hexdigest() url = "http://ws.audioscrobbler.com/2.0/?method=track.updateNowPlaying" + \ "&api_key=" + r.API_KEY + \ "&api_sig=" + update_now_playing_sig + \ "&artist=" + artist + \ "&format=json" + \ "&sk=" + r.SK + \ "&track=" + track req = requests.post(url).text json_obj = json.loads(req)
def scrobble(artist, track): # this gives us a timestamp, casted to integer ts = time.time() scrobbling_sig = md5(("api_key" + r.API_KEY + "artist" + artist + "method" + "track.scrobble" + "sk" + r.SK + "timestamp" + str(ts) + "track" + track + r.SECRET).encode('utf-8')).hexdigest() req = requests.post( "http://ws.audioscrobbler.com/2.0/?method=track.scrobble" + "&api_key=" + r.API_KEY + "&api_sig=" + scrobbling_sig + "&artist=" + artist + "&format=json" + "&sk=" + r.SK + "×tamp=" + str(ts) + "&track=" + track).text json_obj = json.loads(req)
def md5_digest(string): return md5(string.encode()).hexdigest()
if PY2: import _md5 import _sha import _sha256 import _sha512 md5 = type(_md5.new()) sha1 = type(_sha.new()) sha224 = type(_sha256.sha224()) sha256 = type(_sha256.sha256()) sha384 = type(_sha512.sha384()) sha512 = type(_sha512.sha512()) elif PY33: import _md5 import _sha1 import _sha256 import _sha512 md5 = type(_md5.md5()) sha1 = type(_sha1.sha1()) sha224 = type(_sha256.sha224()) sha256 = type(_sha256.sha256()) sha384 = type(_sha512.sha384()) sha512 = type(_sha512.sha512()) HASH = type(_hashlib.new('md5'))