def _create_data(self, arr): """ Create data buffer from array. @param arr: input array. @type arr: numpy.ndarray @return: the created data buffer. @rtype: buffer """ from struct import pack from base64 import standard_b64encode from zlib import compress data = arr.tostring() if self.compressor == 'gz': osize = len(data) data = compress(data) zsize = len(data) if self.encoding == 'base64': data = standard_b64encode(data) if self.compressor == 'gz': size = pack('iiii', 1, osize, osize, zsize) else: size = pack('i', len(data)) if self.encoding == 'base64': size = standard_b64encode(size) return b''.join([size, data])
def authenticate(self, username, password): self.status = "" self.conn.write("HELO 127.0.0.1") self.status = self.parse(self.conn.read()) if self.status == "250": self.conn.write("STARTTLS") self.status = self.parse(self.conn.read()) if self.status == "220": self.conn.SSL() self.status = "" self.conn.write("HELO 127.0.0.1") self.status = self.parse(self.conn.read()) if self.status == "250": self.conn.write("AUTH LOGIN") self.status = self.parse(self.conn.read()) self.conn.write(base64.standard_b64encode(username)) self.status = self.parse(self.conn.read()) self.conn.write(base64.standard_b64encode(password)) self.status = self.parse(self.conn.read()) if self.status == "535": return False else: self.auth = True return True return
def _make_response(self, nonce, salt, iteration_count): """Make a response for the first challenge from the server. :return: the response or a failure indicator. :returntype: `sasl.Response` or `sasl.Failure` """ self._salted_password = self.Hi(self.Normalize(self.password), salt, iteration_count) self.password = None # not needed any more if self.channel_binding: channel_binding = b"c=" + standard_b64encode(self._gs2_header + self._cb_data) else: channel_binding = b"c=" + standard_b64encode(self._gs2_header) # pylint: disable=C0103 client_final_message_without_proof = (channel_binding + b",r=" + nonce) client_key = self.HMAC(self._salted_password, b"Client Key") stored_key = self.H(client_key) auth_message = ( self._client_first_message_bare + b"," + self._server_first_message + b"," + client_final_message_without_proof ) self._auth_message = auth_message client_signature = self.HMAC(stored_key, auth_message) client_proof = self.XOR(client_key, client_signature) proof = b"p=" + standard_b64encode(client_proof) client_final_message = (client_final_message_without_proof + b"," + proof) return Response(client_final_message)
def authorized_sfk(appid, token): """ Request Userbase by API """ import base64 import requests # authorize scp in userbase b64_uid = base64.standard_b64encode(CONFIG['scp_userbase']['uid']) b64_token = base64.standard_b64encode(CONFIG['scp_userbase']['token']) headers = { 'Authorization' : 'Nptv %s:%s' % (b64_uid, b64_token), 'Content-Type' : 'application/json' } userbase_host = CONFIG['scp_userbase']['host'] if CONFIG['scp_userbase']['host'] else 'userbase.staging.nptv.home' url = 'http://%s/v2/applications/%s/auth.json' % (userbase_host, appid) data = json.dumps({'token' : token}) try: resp = requests.post(url, data=data, headers=headers) except Exception as e: LOGGER.error('Authorization request fails: %s; appid %s; token %s' % (str(e), appid, token)) return False if int(resp.status_code) == 204: LOGGER.info('Authorization passed') return True else: LOGGER.info('Authorization rejected: appid %s; token %s' % (appid, token)) return False
def test_int8array(self): def test_int8_msg(rostype, data): msg = {"data": data} inst = ros_loader.get_message_instance(rostype) c.populate_instance(msg, inst) self.validate_instance(inst) return inst.data for msgtype in ["TestChar", "TestUInt8"]: rostype = "rosbridge_library/" + msgtype int8s = range(0, 256) ret = test_int8_msg(rostype, int8s) self.assertEqual(ret, str(bytearray(int8s))) str_int8s = str(bytearray(int8s)) b64str_int8s = standard_b64encode(str_int8s) ret = test_int8_msg(rostype, b64str_int8s) self.assertEqual(ret, str_int8s) for msgtype in ["TestUInt8FixedSizeArray16"]: rostype = "rosbridge_library/" + msgtype int8s = range(0, 16) ret = test_int8_msg(rostype, int8s) self.assertEqual(ret, str(bytearray(int8s))) str_int8s = str(bytearray(int8s)) b64str_int8s = standard_b64encode(str_int8s) ret = test_int8_msg(rostype, b64str_int8s) self.assertEqual(ret, str_int8s)
def contact_admin(): """ Contact Admin - This can allow anonymous users to post spam, so for them, I'll add some "poor man's captcha" """ view_info = {} view_info['errors'] = [] captcha = stackhelper.gen_pwd() view_info['anon_captcha'] = captcha view_info['anon_captcha_base64'] = base64.standard_b64encode(captcha) req = request.vars if req.form_submitted: if req.send_b: if req.subject and req.message: if auth_user.is_auth() or\ (not auth_user.is_auth() and\ base64.standard_b64encode(req.captcha_response) == req.c): db.admin_messages.insert( auth_user_id=auth_user.get_user_id(), subject=parse_content(req.subject), message=parse_content(req.message), creation_date=request.now, read_flag=False) redirect(URL(r=request, c='default', f='index')) else: view_info['errors'].append( 'Invalid humanity challenge response, please try ' 'again') return dict(request=request, view_info=view_info) else: view_info['errors'].append( 'Both Subject and Message are required fields') return dict(request=request, view_info=view_info) else: redirect(URL(r=request, c='default', f='index')) else: return dict(request=request, view_info=view_info)
def user_fs_event(self,event): msg = "SRC=%s" % base64.standard_b64encode(event.src_path_rel) if event.event_type == 'moved': msg += " DEST=%s" % base64.standard_b64encode(event.dest_path_rel) self.log.info( "EVENT=%s %s" % (event.event_type, msg) )
def requestAsString(self): output = {'code' : 0} auth = base64.standard_b64encode("%s:%s" % (self.username, self.password)) timeout = 60 con = httplib.HTTPConnection(self.server, self.port, True, timeout) headers = {"User-Agent": "vmcatcher"} if (self.username != None) and (self.password != None): auth = base64.standard_b64encode("%s:%s" % (self.username, self.password)) headers["Authorization"] = "Basic %s" % (auth) try: con.request("GET" , self.path, headers=headers) except socket.gaierror as exp: output['error'] = exp.strerror output['code'] = 404 log.error("Failed to connect to %s:%s error %s:%s" % (self.server,self.port, output['code'], output['code'])) return output except socket.error as exp: output['error'] = exp.strerror output['code'] = 404 log.error("Failed to connect to %s:%s error %s:%s" % (self.server,self.port, output['code'], output['code'])) return output responce = con.getresponse() httpstatus = responce.status if httpstatus == 200: output['responce'] = responce.read() else: output['error'] = responce.reason output['code'] = httpstatus return output
def _query_api(self, clip): # Adapted from SpeechRecognition source code, modified to get text onsets flac_data = clip.get_flac_data( convert_rate = None if clip.sample_rate >= 16000 else 16000, convert_width = None if clip.sample_width >= 2 else 2 ) model = "{0}_BroadbandModel".format("en-US") url = "https://stream.watsonplatform.net/speech-to-text/api/v1/recognize?{0}".format(urlencode({ "profanity_filter": "false", "continuous": "true", "model": model, "timestamps": "true", })) request = Request(url, data = flac_data, headers = { "Content-Type": "audio/x-flac", "X-Watson-Learning-Opt-Out": "true", }) if hasattr("", "encode"): # Python 2.6 compatibility authorization_value = base64.standard_b64encode("{0}:{1}".format(self.username, self.password).encode("utf-8")).decode("utf-8") else: authorization_value = base64.standard_b64encode("{0}:{1}".format(self.username, self.password)) request.add_header("Authorization", "Basic {0}".format(authorization_value)) try: response = urlopen(request, timeout=None) except HTTPError as e: raise Exception("recognition request failed: {0}".format(getattr(e, "reason", "status {0}".format(e.code)))) except URLError as e: raise Exception("recognition connection failed: {0}".format(e.reason)) response_text = response.read().decode("utf-8") result = json.loads(response_text) return result
def fetch(self, parser=None): file = None; buffer = None try: # first get login, password if parser and parser.engine: urlopts = parser.engine.urlcfgfile.urlopts(self.orig_uri.url) or {} else: urlopts = {} login = urlopts.get('login', '') #.encode('utf8') password = urlopts.get('password', '') #.encode('utf8') # Create request instance request = core.UrlRequest(self.orig_uri.url) auth = '%s:%s'%(login, password) try: auth = base64.standard_b64encode(auth) except TypeError: auth = base64.standard_b64encode(auth.encode()).decode() request.add_header("Authorization", "Basic %s"%auth) file = core.urlopen(request) buffer = file.read() finally: if file: try: file.close() except: pass return (None, buffer)
def to_dict(self): return { "encrypted_data": base64.standard_b64encode(self.encrypt()), "iv": base64.standard_b64encode(self.iv), "version": self.VERSION, "cipher": "aes-256-cbc", }
def user_view(request): # passed """ get the user information which included name, infor, Pic. Params: ----------- GET # uid: int Return: ------- user_json: json | example: { uid:1, username:"******", intro:"xxx", avatar:=i213adskfa, city:"xxx", contact:"xxx", pic_list:[{pid:122, time:20140503, galname:"xx", content:iasdfadfas}],{..}} """ if request.method=='GET': user_list=User.objects.filter(uid=int(request.GET['uid'])) if len(user_list)==0: return HttpResponse(dumps({'code':'user not exist'})) user=user_list[0] json_pic_list=[{'pid':pic.pid,'time':pic.time, 'galname':pic.gal.galname, 'content':standard_b64encode(pic.content.read())} for pic in Pic.objects.filter(user=user)] return HttpResponse(dumps({'uid':user.uid,'username':user.username,'intro':user.intro, 'avatar':standard_b64encode(user.avatar.read()), 'city':user.city,'contact':user.contact,'pic_list':json_pic_list})) else: return HttpResponse(dumps({'code':'undefined'}))
def ldappassword(secret, schema='SHA', salt=None): '''Return password hash to be used as userPassword value''' hash_types = { 'SHA': 'sha1', 'SSHA': 'sha1', 'MD5': 'md5', 'SMD5': 'md5', } try: htype = hash_types[schema] except KeyError: raise errors.AnsibleFilterError( 'Unknown/unsupported storage schema: {}'.format(schema)) h = hashlib.new(htype) h.update(secret.encode()) if schema in ('SSHA', 'SMD5'): if salt is None: salt = base64.standard_b64encode(os.urandom(4)) h.update(salt) else: salt = ''.encode() rv = base64.standard_b64encode(h.digest()+salt) return '{{{}}}{}'.format(schema, rv.decode())
def _signatureGenerator(self, path, httpMethod, query, payload, dTime): """ Signature Generator, used to authenticate user in portal """ m = hashlib.sha256() queryStr = "" if len(query): for k,v in query.items(): queryStr += k + "=" + str(v) + "&" queryStr = queryStr[0:-1] m.update(payload) payloadHash = base64.standard_b64encode(m.digest()) toBeHashed = (self.username+"\n"+ self.password+ "\n"+ httpMethod+"\n"+ self.host+"\n"+ path+"\n"+ queryStr+"\n"+ dTime+"\n"+ payloadHash) m2 = hashlib.sha256() m2.update(toBeHashed) return base64.standard_b64encode(m2.digest())
def generalize(self, inst, encoding='jpg', width=160, height=120, qual=30): if isinstance(inst, self.imgCls): if (len(inst.data) == 0): return {} cvImg = self.bridge.imgmsg_to_cv(inst,"rgb8") size = cv.GetSize(cvImg) img = Image.fromstring("RGB", size, cvImg.tostring()) if width != size[0] or height != size[1]: img = img.resize((width,height),Image.NEAREST) buf = StringIO.StringIO() if encoding == 'png': img.save(buf, 'PNG', optimize=1) else: img.save(buf, 'JPEG', quality=qual) data = buf.getvalue() buf.close() return {'uri':'data:image/' + encoding + ';base64,' + standard_b64encode(data)} elif hasattr(inst,'__slots__'): obj = {} for i in xrange(len(inst.__slots__)): field = inst.__slots__[i] if (hasattr(inst,'_slot_types') and inst._slot_types[i] == 'uint8[]'): obj[field] = self.generalize(standard_b64encode(getattr(inst,field)), encoding, width, height, qual) else: obj[field] = self.generalize(getattr(inst,field), encoding, width, height, qual) return obj elif isinstance(inst,tuple) or isinstance(inst,list): return [self.generalize(x, encoding, width, height, qual) for x in inst] else: return inst
def get(self, *args): class_name = self.get_query_argument("class_name") print (class_name) width = Img.WIDTH uuid_str = gen_uuid_32() key = class_name + ":" + str(width) try: page = int(self.get_query_argument("page")) except: page = 1 try: res_str = self._redis.hget(key, page) except: res_str = "" if res_str: res_b64 = base64.standard_b64encode(res_str) encrypt_str = uuid_str[:13] + res_b64 + uuid_str[:22] print "from redis", page self.set_header("Content-Type", "text/plain") self.write(encrypt_str) else: l = LeanCloudApi(class_name) limit_num = Img.LIMIT_NUM obj_list = l.get_skip_obj_list(page - 1, limit_num=limit_num) result = [] for i in obj_list: img_ID = i.get("ID") img_url = i.get("File").url img_href = img_url.split("/")[-1].replace(".", "_") if "gif" in img_href.lower(): img_url = img_url + "?vframe/jpg/offset/1|imageMogr2/thumbnail/%sx/interlace/1" % width else: img_url = img_url + "?imageMogr2/thumbnail/%sx/interlace/1" % width ori_width = i.get("width") ori_height = i.get("height") try: height = width * ori_height / ori_width each_res = {"href": img_href, "id": img_ID, "image": img_url, "width": width, "height": height} except TypeError: each_res = random.choice(default_res) result.append(each_res) res = {"total": limit_num, "result": result} res_str = json_encode(res) try: self._redis.hset(key, page, res_str) except: pass res_b64 = base64.standard_b64encode(res_str) encrypt_str = uuid_str[:13] + res_b64 + uuid_str[:22] self.set_header("Content-Type", "text/plain") self.write(encrypt_str)
def tonzbget(self, args, hname): if ('data' not in args): return 0 if ('nzbget_url' in self.cgen): if (len(self.cgen['nzbget_url'])): rq_url = 'http://' + self.cgen['nzbget_user'] + ':' + self.cgen['nzbget_pwd'] + '@' + self.cgen[ 'nzbget_url'] + '/xmlrpc' print rq_url try: server = ServerProxy(rq_url) except Exception as e: print 'Error contacting NZBGET ' + str(e) return 0 try: myrq = args['data'].replace("warp?", "") pulrlparse = dict(urlparse.parse_qsl(myrq)) if ('m' in args): pulrlparse['m'] = args['m'] # ~ print pulrlparse res = self.wrp.beam(pulrlparse) # ~ print res.headers if ('Location' in res.headers): # ~ for redirect log.info('tonzbget: Warp is treated as 302 redirector') geturl_rq = res.headers['Location'] r = requests.get(geturl_rq, verify=False) nzbname = 'nzbfromNZBmegasearcH' if ('content-disposition' in r.headers): rheaders = r.headers['content-disposition'] idxsfind = rheaders.find('=') if (idxsfind != -1): nzbname = rheaders[idxsfind + 1:len(rheaders)].replace('"', '') nzbcontent64 = standard_b64encode(r.content) server.append(nzbname, '', False, nzbcontent64) else: # ~ for downloaded log.info('tonzbget: Warp gets full content') nzbname = 'nzbfromNZBmegasearcH' if ('content-disposition' in res.headers): rheaders = res.headers['content-disposition'] idxsfind = rheaders.find('=') if (idxsfind != -1): nzbname = rheaders[idxsfind + 1:len(rheaders)].replace('"', '') # ~ print res.data nzbcontent64 = standard_b64encode(res.data) server.append(nzbname, '', False, nzbcontent64) except Exception as e: # ~ print 'Error connecting server or downloading nzb '+str(e) log.info('Error connecting server or downloading nzb: ' + str(e)) return 0 return 1
def generateUtdid(): timestamp = int(time.time()) - 60 * 60 * 8 i31 = random.randrange(1 << 31) imei = hashCode(str(i31)) msg = struct.pack('!2i2bi', timestamp, i31, 3, 0, imei) key = b'd6fc3a4a06adbde89223bvefedc24fecde188aaa9161' data = hmac.new(key, msg, hashlib.sha1).digest() msg += struct.pack('!i', hashCode(base64.standard_b64encode(data))) return base64.standard_b64encode(msg)
def authenticate_scram_sha1(self, database_name, username, password): # Totally stolen from pymongo.auth user = username.encode("utf-8").replace('=', "=3D").replace(',', "=2C") nonce = base64.standard_b64encode(str(SystemRandom().random())[2:].encode("utf-8")) first_bare = "n={0},r={1}".format(user, nonce) cmd = SON([("saslStart", 1), ("mechanism", "SCRAM-SHA-1"), ("autoAuthorize", 1), ("payload", Binary("n,," + first_bare))]) result = yield self.__run_command(database_name, cmd) server_first = result["payload"] parsed = auth._parse_scram_response(server_first) iterations = int(parsed['i']) salt = parsed['s'] rnonce = parsed['r'] if not rnonce.startswith(nonce): raise MongoAuthenticationError("Server returned an invalid nonce.") without_proof = "c=biws,r=" + rnonce salted_pass = auth._hi(auth._password_digest(username, password).encode("utf-8"), base64.standard_b64decode(salt), iterations) client_key = hmac.HMAC(salted_pass, "Client Key", sha1).digest() stored_key = sha1(client_key).digest() auth_msg = ','.join((first_bare, server_first, without_proof)) client_sig = hmac.HMAC(stored_key, auth_msg, sha1).digest() client_proof = "p=" + base64.standard_b64encode(auth._xor(client_key, client_sig)) client_final = ','.join((without_proof, client_proof)) server_key = hmac.HMAC(salted_pass, "Server Key", sha1).digest() server_sig = base64.standard_b64encode( hmac.HMAC(server_key, auth_msg, sha1).digest()) cmd = SON([("saslContinue", 1), ("conversationId", result["conversationId"]), ("payload", Binary(client_final))]) result = yield self.__run_command(database_name, cmd) if not result["ok"]: raise MongoAuthenticationError("Authentication failed") parsed = auth._parse_scram_response(result["payload"]) if parsed['v'] != server_sig: raise MongoAuthenticationError("Server returned an invalid signature.") # Depending on how it's configured, Cyrus SASL (which the server uses) # requires a third empty challenge. if not result["done"]: cmd = SON([("saslContinue", 1), ("conversationId", result["conversationId"]), ("payload", Binary(''))]) result = yield self.__run_command(database_name, cmd) if not result["done"]: raise MongoAuthenticationError("SASL conversation failed to complete.")
def post(self): username = self.request.get("username") query = User.all() query.filter("username ="******"new")) and user: self.response.out.write(base64.standard_b64encode(user.salt)) else: self.response.out.write(base64.standard_b64encode(os.urandom(16)))
def auth(environ, start_response): # Retrieve values of POST fields form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ) challenge_b64 = form['challenge'].value challenge = base64.standard_b64decode(challenge_b64) mac = form['mac'].value db = sqlite3.connect(params.base_dir+ 'db/authserver-app.sqlite') db.text_factory = str cursor = db.cursor() cursor.execute("""SELECT userid, challenge, phone_serial_number FROM users WHERE challenge=?""", (sqlite3.Binary(challenge),)) response = cursor.fetchone() userid = response[0] serial_number = response[2] hmac_server = hmac.new(serial_number, msg=challenge, digestmod=hashlib.sha256).digest() hmac_server_b64 = base64.standard_b64encode(hmac_server) hmac_server_b64 = hmac_server_b64.strip() mac = mac.strip() if hmac_server_b64 == mac: server_respone = "ok" # Generate session_key session_key = os.urandom(64) session_key_b64 = base64.standard_b64encode(session_key) current_time = datetime.datetime.now() available_time = datetime.timedelta(seconds=30) session_key_exp = current_time + available_time # Store session_key and session_key_exp in db cursor = db.cursor() cursor.execute("""UPDATE users SET session_key=?, session_key_exp=? WHERE userid=?""", (sqlite3.Binary(session_key), session_key_exp, userid)) db.commit() else: server_respone = "fail" response_body = "<html>"+ hmac_server_b64+ "<br />" response_body += mac.strip() + "<br />"+ server_respone +"</html>" status = '200 OK' response_headers = [ ('Content-Type', 'text/html'), ('Content-Length', str(len(response_body))) ] start_response(status, response_headers) return [response_body]
def put_ssec(sse_c_key, sse_c_algo, **kwargs): sse_c_key_b64 = standard_b64encode(sse_c_key) sse_c_key_md5_b64 = standard_b64encode( hashlib.md5(sse_c_key).hexdigest().decode("hex")) session = create_session() obj, res = session.meta.client.put_object( SSECustomerKey=sse_c_key_b64, SSECustomerKeyMD5=sse_c_key_md5_b64, SSECustomerAlgorithm=sse_c_algo, **kwargs)
def toDict(self): self.kodi_helper.log(msg='Provide RSA Keys to dict') # Get the DER Base64 of the keys encrypted_key = self.rsa_key.exportKey() data = { "encryption_key": base64.standard_b64encode(self.encryption_key), 'sign_key': base64.standard_b64encode(self.sign_key), 'rsa_key': base64.standard_b64encode(encrypted_key) } return data
def b64encode_file(filename, data): # encode filename and data using the standard alphabet, so that ";" can be # used as delimiter. if isinstance(filename, unicode): filename = filename.encode('utf-8') filenameb64 = base64.standard_b64encode(filename or '') datab64 = base64.standard_b64encode(data) filename = "filenameb64:%s;datab64:%s" % ( filenameb64, datab64 ) return filename.encode('ascii')
def get_posts(keyword): memc = ac_core.get_memc() posts = memc.get("search_posts_"+base64.standard_b64encode(keyword)) if posts: return print_posts(posts) posts = find_posts(keyword) if not posts: return 'NO RELATED POSTS!' memc.set("search_posts_"+base64.standard_b64encode(keyword), posts, 3600) return print_posts(posts)
def encrypt( self, raw ): """ Returns the encrypted value encoded in b64 with salt, iv and cipher text separated by #. """ raw = pad(raw) salt = Random.new().read(SALT_LENGTH) iv = Random.new().read(BS) key = pbkdf2(self.passphrase, salt, ITERATION_COUNT, keylen=KEY_LENGTH) cipher = AES.new( key, AES.MODE_CBC, iv ) cipher_text = cipher.encrypt( raw ) return ( '#'.join([base64.standard_b64encode(salt)+'\n', base64.standard_b64encode(iv)+'\n', base64.standard_b64encode(cipher_text)+'\n']) )
def recognize_ibm(self, audio_data, username, password, language = "en-US", show_all = False): """ Performs speech recognition on ``audio_data`` (an ``AudioData`` instance), using the IBM Speech to Text API. The IBM Speech to Text username and password are specified by ``username`` and ``password``, respectively. Unfortunately, these are not available without `signing up for an account <https://console.ng.bluemix.net/registration/>`__. Once logged into the Bluemix console, follow the instructions for `creating an IBM Watson service instance <http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/doc/getting_started/gs-credentials.shtml>`__, where the Watson service is "Speech To Text". IBM Speech to Text usernames are strings of the form XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX, while passwords are mixed-case alphanumeric strings. The recognition language is determined by ``language``, an RFC5646 language tag with a dialect like ``"en-US"`` (US English) or ``"zh-CN"`` (Mandarin Chinese), defaulting to US English. The supported language values are listed under the ``model`` parameter of the `audio recognition API documentation <http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/speech-to-text/api/v1/#recognize_audio_sessionless12>`__, in the form ``LANGUAGE_BroadbandModel``, where ``LANGUAGE`` is the language value. Returns the most likely transcription if ``show_all`` is false (the default). Otherwise, returns the `raw API response <http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/speech-to-text/api/v1/#recognize_audio_sessionless12>`__ as a JSON dictionary. Raises a ``speech_recognition.UnknownValueError`` exception if the speech is unintelligible. Raises a ``speech_recognition.RequestError`` exception if the speech recognition operation failed, if the key isn't valid, or if there is no internet connection. """ assert isinstance(audio_data, AudioData), "Data must be audio data" assert isinstance(username, str), "`username` must be a string" assert isinstance(password, str), "`password` must be a string" flac_data = audio_data.get_flac_data( convert_rate = None if audio_data.sample_rate >= 16000 else 16000, # audio samples should be at least 16 kHz convert_width = None if audio_data.sample_width >= 2 else 2 # audio samples should be at least 16-bit ) model = "{0}_BroadbandModel".format(language) url = "https://stream.watsonplatform.net/speech-to-text/api/v1/recognize?{0}".format(urlencode({ "profanity_filter": "false", "continuous": "true", "model": model, })) request = Request(url, data = flac_data, headers = {"Content-Type": "audio/x-flac"}) if hasattr("", "encode"): authorization_value = base64.standard_b64encode("{0}:{1}".format(username, password).encode("utf-8")).decode("utf-8") else: authorization_value = base64.standard_b64encode("{0}:{1}".format(username, password)) request.add_header("Authorization", "Basic {0}".format(authorization_value)) try: response = urlopen(request) except HTTPError as e: raise RequestError("recognition request failed: {0}".format(getattr(e, "reason", "status {0}".format(e.code)))) # use getattr to be compatible with Python 2.6 except URLError as e: raise RequestError("recognition connection failed: {0}".format(e.reason)) response_text = response.read().decode("utf-8") result = json.loads(response_text) # return results if show_all: return result if "results" not in result or len(result["results"]) < 1 or "alternatives" not in result["results"][0]: raise UnknownValueError() transcription = [] for utterance in result["results"]: if "alternatives" not in utterance: raise UnknownValueError() for hypothesis in utterance["alternatives"]: if "transcript" in hypothesis: transcription.append(hypothesis["transcript"]) return "\n".join(transcription)
def checkPassword(self, encoded_password, password): if isinstance(encoded_password, unicode): encoded_password = encoded_password.encode('ascii') if self.match(encoded_password): encoded = encoded_password[encoded_password.find(b'}') + 1:] if len(encoded) > 28: # Backwards compatible, hexencoded sha1 and bogus salt encoded = standard_b64encode(a2b_hex(encoded[-40:])) return encoded == self.encodePassword(password)[5:] # Backwards compatible, hexdigest and no prefix encoded_password = standard_b64encode(a2b_hex(encoded_password[-40:])) return encoded_password == self.encodePassword(password)[5:]
def to_dict(self): try: cid = convert_to_string(self.caveat_id) except UnicodeEncodeError: cid = convert_to_string(standard_b64encode(self.caveat_id_bytes)) return { 'cid': cid, 'vid': ( standard_b64encode(self.verification_key_id) if self.verification_key_id else None ), 'cl': self.location }
def get(self, class_name=None): width = Img.WIDTH uuid_str = gen_uuid_32() key = self._class_name + ':' + str(width) try: page = int(self.get_query_argument('page')) except: page = 1 try: res_str = self._redis.hget(key, page) except: res_str = '' if res_str: res_b64 = base64.standard_b64encode(res_str) encrypt_str = uuid_str[:13] + res_b64 + uuid_str[:22] self.write(encrypt_str) else: l = self._leancloud_api limit_num = Img.LIMIT_NUM obj_list = l.get_skip_obj_list(page-1, limit_num=limit_num) result = [] for i in obj_list: img_ID = i.get('ID') img_url = i.get('File').url img_url = img_url + '?imageMogr2/thumbnail/%sx' % width ori_width = i.get('width') ori_height = i.get('height') try: height = width*ori_height/ori_width each_res = {'id': img_ID, 'image': img_url, 'width': width, 'height': height} except TypeError: each_res = random.choice(default_res) result.append(each_res) res = {'total': limit_num, 'result': result} res_str = json_encode(res) try: self._redis.hset(key, page, res_str) except: pass res_b64 = base64.standard_b64encode(res_str) encrypt_str = uuid_str[:13] + res_b64 + uuid_str[:22] self.set_header("Content-Type", "text/plain") self.write(encrypt_str)
def test_from_api_repr_bigtable(self): qualifier_encoded = base64.standard_b64encode(b'q').decode('ascii') resource = _copy_and_update(self.BASE_RESOURCE, { 'sourceFormat': 'BIGTABLE', 'bigtableOptions': { 'ignoreUnspecifiedColumnFamilies': True, 'readRowkeyAsString': False, 'columnFamilies': [ { 'familyId': 'familyId', 'type': 'type', 'encoding': 'encoding', 'columns': [ { 'qualifierString': 'q', 'fieldName': 'fieldName1', 'type': 'type1', 'encoding': 'encoding1', 'onlyReadLatest': True, }, { 'qualifierEncoded': qualifier_encoded, 'fieldName': 'fieldName2', 'type': 'type2', 'encoding': 'encoding2', }, ], 'onlyReadLatest': False, } ], }, }) ec = external_config.ExternalConfig.from_api_repr(resource) self._verify_base(ec) self.assertEqual(ec.source_format, 'BIGTABLE') self.assertIsInstance(ec.options, external_config.BigtableOptions) self.assertEqual(ec.options.ignore_unspecified_column_families, True) self.assertEqual(ec.options.read_rowkey_as_string, False) self.assertEqual(len(ec.options.column_families), 1) fam1 = ec.options.column_families[0] self.assertIsInstance(fam1, external_config.BigtableColumnFamily) self.assertEqual(fam1.family_id, 'familyId') self.assertEqual(fam1.type_, 'type') self.assertEqual(fam1.encoding, 'encoding') self.assertEqual(len(fam1.columns), 2) self.assertFalse(fam1.only_read_latest) col1 = fam1.columns[0] self.assertEqual(col1.qualifier_string, 'q') self.assertEqual(col1.field_name, 'fieldName1') self.assertEqual(col1.type_, 'type1') self.assertEqual(col1.encoding, 'encoding1') self.assertTrue(col1.only_read_latest) self.assertIsNone(col1.qualifier_encoded) col2 = ec.options.column_families[0].columns[1] self.assertEqual(col2.qualifier_encoded, b'q') self.assertEqual(col2.field_name, 'fieldName2') self.assertEqual(col2.type_, 'type2') self.assertEqual(col2.encoding, 'encoding2') got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource)
def add_supported_app( app_config: Dict[str, Any], source_app: AppInfo, target_apps: Sequence[AppInfo], ) -> None: def fix_uuids( copy_from: int, copy_to: int, previous_length: int, key: Any, value: Any, ) -> plistlib.UID: if isinstance(value, plistlib.UID): value.data = compute_new_uid(copy_from, copy_to, previous_length, value.data) return None activation_group_cond = app_config["BTTActivationGroupCondition"] # Condition is a base64-encoded binary plist parsed_plist = plistlib.loads( base64.urlsafe_b64decode(activation_group_cond)) # Note to future self: I have no clue how plist's work - just what I gathered # from reading and reversing the existing file # # figure out the right operators try: center = parsed_plist["$objects"].index(source_app.bundle_name) use_bundle = True except: center = parsed_plist["$objects"].index(source_app.app_name) use_bundle = False # start searching back from the located bundle / app name # keep track of any index with a forward ref to our bundle/app name # or a forward ref to another item that has one to it (transitive) idxs_to_search = [center] for i in range(center, 0, -1): obj_at_pos = parsed_plist["$objects"][i] if not isinstance(obj_at_pos, dict): continue for k, v in obj_at_pos.items(): # if the curre if isinstance(v, plistlib.UID): if v.data in idxs_to_search: # Keep track of the new forward ref idxs_to_search.append(i) copy_from = idxs_to_search[-1] # The first item in our predicate will have forward refs to all the required bits # including the predicate # transitively search for it copy_to = max((v.data for k, v in parsed_plist["$objects"][copy_from].items() if isinstance(v, plistlib.UID))) # search for potential forward refs from copy_to onwards while True: if not isinstance(parsed_plist["$objects"][copy_to], dict): break new_max = max((v.data for k, v in parsed_plist["$objects"][copy_to].items() if isinstance(v, plistlib.UID))) if new_max <= copy_to: break copy_to = new_max root_levels_to_add = [] for target_app in target_apps: new_items = copy.deepcopy(parsed_plist["$objects"][copy_from:(copy_to + 1)]) previous_length = len(parsed_plist["$objects"]) print(f"Adding {target_app} - starting at ID {previous_length}") root_levels_to_add.append(previous_length) fix_callable = partial(fix_uuids, copy_from, copy_to, previous_length) for item_pos, item in enumerate(new_items): if isinstance(item, plistlib.UID): fix_callable(None, item) elif isinstance(item, (list, dict)): recursive_modify_collection(item, fix_callable) elif isinstance(item, str): if use_bundle and item == source_app.bundle_name: new_items[item_pos] = target_app.bundle_name elif not use_bundle and item == source_app.app_name: new_items[item_pos] = target_app.app_name parsed_plist["$objects"].extend(new_items) # find top level pointer to all the apps for item_pos, item in enumerate(parsed_plist["$objects"]): if not isinstance(item, dict): continue if "NS.objects" not in item: continue # search for our minimum range - i.e. the first item that denoted the app entry we copied if plistlib.UID(copy_from) in item["NS.objects"]: for new_root in root_levels_to_add: item["NS.objects"].append(plistlib.UID(new_root)) break else: raise ValueError( "Could not append new app - could not locate root level list") print("Added to root tree - dumping plist and we'll be done") app_config["BTTActivationGroupCondition"] = base64.standard_b64encode( plistlib.dumps( parsed_plist, fmt=plistlib.FMT_BINARY, sort_keys=True, )).decode("ascii")
def md5_sum(content): return base64.standard_b64encode(hashlib.md5(content).digest())
def encrypt(data): data = sanitize(data) return standard_b64encode(cipher.encrypt(data)).decode('utf-8')
def sanitize(data): data1 = data.encode('utf-8') pad = block_size - len(data1) % block_size print("pad=" + str(pad)) data1 = data1 + (pad * chr(pad)).encode('utf-8') print("data:=" + data1) return data1 def encrypt(data): data = sanitize(data) return standard_b64encode(cipher.encrypt(data)).decode('utf-8') #return standard_b64encode(cipher.encrypt(data)).decode('utf-8') key = "B374A26A71490437AA024E4FADD5B497" print("key:=" + key) cipher = AES.new(key, AES.MODE_ECB) block_size = 16 data = "\x00" * 14 + 'ab' data = sanitize(data) print(standard_b64encode(cipher.encrypt(data)).decode('utf-8')) cipher = AES.new(key, AES.MODE_CFB) block_size = 16 data = "\x00" * 14 + 'ab' data = sanitize(data) print(standard_b64encode(cipher.encrypt(data)).decode('utf-8'))
def _authenticate_scram(credentials, sock_info, mechanism): """Authenticate using SCRAM.""" username = credentials.username if mechanism == 'SCRAM-SHA-256': digest = "sha256" digestmod = hashlib.sha256 data = saslprep(credentials.password).encode("utf-8") else: digest = "sha1" digestmod = hashlib.sha1 data = _password_digest(username, credentials.password).encode("utf-8") source = credentials.source cache = credentials.cache # Make local _hmac = hmac.HMAC user = username.encode("utf-8").replace(b"=", b"=3D").replace(b",", b"=2C") nonce = standard_b64encode( (("%s" % (SystemRandom().random(), ))[2:]).encode("utf-8")) first_bare = b"n=" + user + b",r=" + nonce cmd = SON([('saslStart', 1), ('mechanism', mechanism), ('payload', Binary(b"n,," + first_bare)), ('autoAuthorize', 1)]) res = sock_info.command(source, cmd) server_first = res['payload'] parsed = _parse_scram_response(server_first) iterations = int(parsed[b'i']) if iterations < 4096: raise OperationFailure("Server returned an invalid iteration count.") salt = parsed[b's'] rnonce = parsed[b'r'] if not rnonce.startswith(nonce): raise OperationFailure("Server returned an invalid nonce.") without_proof = b"c=biws,r=" + rnonce keys = cache.data if keys: client_key, server_key = keys else: salted_pass = _hi(digest, data, standard_b64decode(salt), iterations) client_key = _hmac(salted_pass, b"Client Key", digestmod).digest() server_key = _hmac(salted_pass, b"Server Key", digestmod).digest() cache.data = (client_key, server_key) stored_key = digestmod(client_key).digest() auth_msg = b",".join((first_bare, server_first, without_proof)) client_sig = _hmac(stored_key, auth_msg, digestmod).digest() client_proof = b"p=" + standard_b64encode(_xor(client_key, client_sig)) client_final = b",".join((without_proof, client_proof)) server_sig = standard_b64encode( _hmac(server_key, auth_msg, digestmod).digest()) cmd = SON([('saslContinue', 1), ('conversationId', res['conversationId']), ('payload', Binary(client_final))]) res = sock_info.command(source, cmd) parsed = _parse_scram_response(res['payload']) if not compare_digest(parsed[b'v'], server_sig): raise OperationFailure("Server returned an invalid signature.") # Depending on how it's configured, Cyrus SASL (which the server uses) # requires a third empty challenge. if not res['done']: cmd = SON([('saslContinue', 1), ('conversationId', res['conversationId']), ('payload', Binary(b''))]) res = sock_info.command(source, cmd) if not res['done']: raise OperationFailure('SASL conversation failed to complete.')
import json import requests import os import base64 import polyline as poly #Username for PTV username=os.environ.get("PTV_username") #API password for PTV password=os.environ.get("PTV_password") #API key for Tollguru Tolls_Key = os.environ.get("TOLLGURU_API_KEY") #'Authorization' parameter takes "Basic " followed by base64 encodes form of username:password #Sample : 'Authorization' : 'Basic eHRvazofSZGrghdtyc56TRkZGEtYrDGFREGTgvbeQxZGI0Njg=' autho = base64.standard_b64encode(bytes(f"{username}:{password}",'utf-8')).decode('utf-8') header={'Content-Type' : 'application/json','Authorization' : f"Basic {autho}"} '''Fetching geocodes from PTV''' def get_geocode_from_ptv(address): address_actual=address #storing the actual address before CGI encoding address=address.replace(" ", "%20").replace(",","%2C") ptv_geocoding_url = f"https://xserver2-europe-eu-test.cloud.ptvgroup.com/services/rest/XLocate/locations/{address}" res=requests.get(ptv_geocoding_url,headers=header).json() return(res['results'][0]['location']['referenceCoordinate']) # Returns a dictionary {'x':long,'y':lat} eg:{'x': -72.470237792, 'y': 42.174369817} '''Fetching Polyline from PTV''' def get_polyline_from_ptv(source_geocode_dict,destination_geocode_dict): ptv_url="https://xserver2-europe-eu-test.cloud.ptvgroup.com/services/rs/XRoute/experimental/calculateRoute" payload= {
def sendNZB(nzb, proper=False): # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-return-statements ''' Sends NZB to NZBGet client :param nzb: nzb object :param proper: True if this is a Proper download, False if not. Defaults to False ''' if sickbeard.NZBGET_HOST is None: logger.log( 'No NZBget host found in configuration. Please configure it.', logger.WARNING) return False addToTop = False nzbgetprio = 0 category = sickbeard.NZBGET_CATEGORY if nzb.show.is_anime: category = sickbeard.NZBGET_CATEGORY_ANIME url = 'http{0}://{1}:{2}@{3}/xmlrpc'.format( 's' if sickbeard.NZBGET_USE_HTTPS else '', sickbeard.NZBGET_USERNAME, sickbeard.NZBGET_PASSWORD, sickbeard.NZBGET_HOST) nzbGetRPC = xmlrpclib.ServerProxy(url) try: if nzbGetRPC.writelog( 'INFO', 'SickRage connected to drop off {0} any moment now.'.format( nzb.name + '.nzb')): logger.log('Successful connected to NZBget', logger.DEBUG) else: logger.log( 'Successful connected to NZBget, but unable to send a message', logger.WARNING) except httplib.socket.error: logger.log( 'Please check your NZBget host and port (if it is running). NZBget is not responding to this combination', logger.WARNING) return False except xmlrpclib.ProtocolError as e: if e.errmsg == 'Unauthorized': logger.log('NZBget username or password is incorrect.', logger.WARNING) else: logger.log('Protocol Error: ' + e.errmsg, logger.ERROR) return False dupekey = '' dupescore = 0 # if it aired recently make it high priority and generate DupeKey/Score for curEp in nzb.episodes: if dupekey == '': if curEp.show.indexer == 1: dupekey = 'SickRage-' + str(curEp.show.indexerid) elif curEp.show.indexer == 2: dupekey = 'SickRage-tvr' + str(curEp.show.indexerid) dupekey += '-' + str(curEp.season) + '.' + str(curEp.episode) if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): addToTop = True nzbgetprio = sickbeard.NZBGET_PRIORITY else: category = sickbeard.NZBGET_CATEGORY_BACKLOG if nzb.show.is_anime: category = sickbeard.NZBGET_CATEGORY_ANIME_BACKLOG if nzb.quality != Quality.UNKNOWN: dupescore = nzb.quality * 100 if proper: dupescore += 10 nzbcontent64 = None if nzb.resultType == 'nzbdata': data = nzb.extraInfo[0] nzbcontent64 = standard_b64encode(data) logger.log('Sending NZB to NZBget') logger.log('URL: ' + url, logger.DEBUG) try: # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old command nzbget_version_str = nzbGetRPC.version() nzbget_version = try_int( nzbget_version_str[:nzbget_version_str.find('.')]) if nzbget_version == 0: if nzbcontent64: nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64) else: if nzb.resultType == 'nzb': if not nzb.provider.login(): return False data = nzb.provider.get_url(nzb.url, returns='content') if data is None: return False nzbcontent64 = standard_b64encode(data) nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64) elif nzbget_version == 12: if nzbcontent64 is not None: nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False, nzbcontent64, False, dupekey, dupescore, 'score') else: nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False, nzb.url, False, dupekey, dupescore, 'score') # v13+ has a new combined append method that accepts both (url and content) # also the return value has changed from boolean to integer # (Positive number representing NZBID of the queue item. 0 and negative numbers represent error codes.) elif nzbget_version >= 13: nzbget_result = nzbGetRPC.append( nzb.name + '.nzb', nzbcontent64 if nzbcontent64 is not None else nzb.url, category, nzbgetprio, False, False, dupekey, dupescore, 'score') > 0 else: if nzbcontent64 is not None: nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False, nzbcontent64) else: nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False, nzb.url) if nzbget_result: logger.log('NZB sent to NZBget successfully', logger.DEBUG) return True else: logger.log( 'NZBget could not add {0} to the queue'.format(nzb.name + '.nzb'), logger.WARNING) return False except Exception: logger.log( 'Connect Error to NZBget: could not add {0} to the queue'.format( nzb.name + '.nzb'), logger.WARNING) return False
def setUp(self): self.confirmation = { 'Timestamp': '2014-12-11T19:21:18.852Z', 'SubscribeURL': 'https://example-subscribe-url.com/path/to/verify_endpoint?verify_token=long_token&challenge=challenge', 'Token': 'very_long_token', 'Message': 'You have chosen to subscribe to the topic arn:aws:sns:us-east-1:<number>:topic.\nTo confirm the subscription, visit the SubscribeURL included in this message.', 'Type': 'SubscriptionConfirmation', 'TopicArn': 'arn:aws:sns:us-east-1:<number>:topic' } self.signed_confirmation = { 'Timestamp': '2014-12-11T19:21:18.852Z', 'SubscribeURL': 'https://example-subscribe-url.com/path/to/verify_endpoint?verify_token=long_token&challenge=challenge', 'Token': 'very_long_token', 'Message': 'You have chosen to subscribe to the topic arn:aws:sns:us-east-1:<number>:topic.\nTo confirm the subscription, visit the SubscribeURL included in this message.', 'MessageId': 'message-id', 'Type': 'SubscriptionConfirmation', 'SignatureVersion': '1', 'SigningCertURL': 'https://sns.us-east-1.amazonaws.com/SimpleNotificationService-d6d679a1d18e95c2f9ffcf11f4f9e198.pem', 'TopicArn': 'arn:aws:sns:us-east-1:<number>:topic' } self.notification = { "Type": "Notification", "MessageId": "message-id", "TopicArn": "topic-arn", "Message": "[{\"type\":\"goals\",\"id\":\"scrubbed\",\"ownerId\":\"scrubbed\",\"action\":\"updated\",\"updatedAt\":\"2014-12-11T20:23:43Z\"}]", "Timestamp": "2014-12-11T20:23:44.182Z", "SignatureVersion": "1", "SigningCertURL": "https://sns.us-east-1.amazonaws.com/SimpleNotificationService-d6d679a1d18e95c2f9ffcf11f4f9e198.pem", "UnsubscribeURL": "https://sns.us-east-1.amazonaws.com/?Action=Unsubscribe&SubscriptionArn=subscription-arn" } # Generate a signature for the necessary messages # privkey.pem created with the following command: # openssl req -nodes -sha1 -x509 -newkey rsa:2048 -days 999999 \ # -keyout tests/files/privkey.pem -out tests/files/certificate.pem # and given a password of 'test' with open('tests/files/privkey.pem') as key_file: key_content = key_file.read().encode('utf8') key = default_backend().load_pem_private_key(key_content, None) strings_to_sign = { # Hardcode these so our test suite is less of an echo chamber 'signed_confirmation': 'Message\nYou have chosen to subscribe to the topic arn:aws:sns:us-east-1:<number>:topic.\nTo confirm the subscription, visit the SubscribeURL included in this message.\nMessageId\nmessage-id\nSubscribeURL\nhttps://example-subscribe-url.com/path/to/verify_endpoint?verify_token=long_token&challenge=challenge\nTimestamp\n2014-12-11T19:21:18.852Z\nToken\nvery_long_token\nTopicArn\narn:aws:sns:us-east-1:<number>:topic\nType\nSubscriptionConfirmation\n', 'notification': 'Message\n[{"type":"goals","id":"scrubbed","ownerId":"scrubbed","action":"updated","updatedAt":"2014-12-11T20:23:43Z"}]\nMessageId\nmessage-id\nTimestamp\n2014-12-11T20:23:44.182Z\nTopicArn\ntopic-arn\nType\nNotification\n' } for message, string_to_sign in strings_to_sign.items(): signer = key.signer(PKCS1v15(), hashes.SHA1()) signer.update(string_to_sign.encode('utf8')) signature = standard_b64encode(signer.finalize()) getattr(self, message)['Signature'] = signature.decode('utf8')
import base64 flag = "011010010110110001101100010111110110110101101001011100110111001101011111011110010110111101110101" alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" outfile = open("encrypted_lines.txt", "w") with open("quotes.txt") as f: for line in f: encoded = base64.standard_b64encode(bytes(line.rstrip())) if (encoded.rstrip().endswith("==") and flag != ""): shift = flag[:4] flag = flag[4:] charToChange = encoded[-3:-2] offset = int(shift, 2) newChar = (alphabet[alphabet.find(charToChange[:1]) + offset]) encoded = encoded[:-3] + newChar + "==" elif (encoded.rstrip().endswith("=") and flag != ""): shift = flag[:2] flag = flag[2:] charToChange = encoded[-2:-1] offset = int(shift, 2) newChar = (alphabet[alphabet.find(charToChange[:1]) + offset]) encoded = encoded[:-2] + newChar + "=" outfile.write(encoded + "\n") outfile.close()
def draw(self, path, start_x, start_y, width, height): self.image_id += 1 # dictionary to store the command arguments for kitty # a is the display command, with T going for immediate output # i is the id entifier for the image cmds = {'a': 'T', 'i': self.image_id} # sys.stderr.write('{0}-{1}@{2}x{3}\t'.format( # start_x, start_y, width, height)) # finish initialization if it is the first call if self.needs_late_init: self._late_init() with warnings.catch_warnings(record=True): # as warn: warnings.simplefilter('ignore', self.backend.DecompressionBombWarning) image = self.backend.open(path) # TODO: find a way to send a message to the user that # doesn't stop the image from displaying # if warn: # raise ImageDisplayError(str(warn[-1].message)) box = (width * self.pix_row, height * self.pix_col) if image.width > box[0] or image.height > box[1]: scale = min(box[0] / image.width, box[1] / image.height) image = image.resize( (int(scale * image.width), int(scale * image.height)), self.backend.LANCZOS) if image.mode != 'RGB' and image.mode != 'RGBA': image = image.convert('RGB') # start_x += ((box[0] - image.width) // 2) // self.pix_row # start_y += ((box[1] - image.height) // 2) // self.pix_col if self.stream: # encode the whole image as base64 # TODO: implement z compression # to possibly increase resolution in sent image # t: transmissium medium, 'd' for embedded # f: size of a pixel fragment (8bytes per color) # s, v: size of the image to recompose the flattened data # c, r: size in cells of the viewbox cmds.update({ 't': 'd', 'f': len(image.getbands()) * 8, 's': image.width, 'v': image.height, }) payload = base64.standard_b64encode(bytearray().join( map(bytes, image.getdata()))) else: # put the image in a temporary png file # t: transmissium medium, 't' for temporary file (kitty will delete it for us) # f: size of a pixel fragment (100 just mean that the file is png encoded, # the only format except raw RGB(A) bitmap that kitty understand) # c, r: size in cells of the viewbox cmds.update({ 't': 't', 'f': 100, }) with NamedTemporaryFile(prefix='ranger_thumb_', suffix='.png', delete=False) as tmpf: image.save(tmpf, format='png', compress_level=0) payload = base64.standard_b64encode( tmpf.name.encode(self.fsenc)) with temporarily_moved_cursor(int(start_y), int(start_x)): for cmd_str in self._format_cmd_str(cmds, payload=payload): self.stdbout.write(cmd_str) # catch kitty answer before the escape codes corrupt the console resp = b'' while resp[-2:] != self.protocol_end: resp += self.stdbin.read(1) if b'OK' in resp: return else: raise ImageDisplayError('kitty replied "{r}"'.format(r=resp))
def upload_file(self, filename): with open(filename, "rb") as f: content = f.read() meta = self.distribution.metadata data = { ":action": "doc_upload", "name": meta.get_name(), "content": (os.path.basename(filename), content), } # set up the authentication credentials = _encode(self.username + ":" + self.password) credentials = standard_b64encode(credentials) if not six.PY2: credentials = credentials.decode("ascii") auth = "Basic " + credentials body, ct = self._build_multipart(data) msg = "Submitting documentation to %s" % (self.repository) self.announce(msg, log.INFO) # build the Request # We can't use urllib2 since we need to send the Basic # auth right with the first request schema, netloc, url, params, query, fragments = urllib.parse.urlparse( self.repository ) assert not params and not query and not fragments if schema == "http": conn = http_client.HTTPConnection(netloc) elif schema == "https": conn = http_client.HTTPSConnection(netloc) else: raise AssertionError("unsupported schema " + schema) data = "" try: conn.connect() conn.putrequest("POST", url) content_type = ct conn.putheader("Content-type", content_type) conn.putheader("Content-length", str(len(body))) conn.putheader("Authorization", auth) conn.endheaders() conn.send(body) except socket.error as e: self.announce(str(e), log.ERROR) return r = conn.getresponse() if r.status == 200: msg = "Server response (%s): %s" % (r.status, r.reason) self.announce(msg, log.INFO) elif r.status == 301: location = r.getheader("Location") if location is None: location = "https://pythonhosted.org/%s/" % meta.get_name() msg = "Upload successful. Visit %s" % location self.announce(msg, log.INFO) else: msg = "Upload failed (%s): %s" % (r.status, r.reason) self.announce(msg, log.ERROR) if self.show_response: print("-" * 75, r.read(), "-" * 75)
def test_from_api_repr_bigtable(self): qualifier_encoded = base64.standard_b64encode(b"q").decode("ascii") resource = _copy_and_update( self.BASE_RESOURCE, { "sourceFormat": "BIGTABLE", "bigtableOptions": { "ignoreUnspecifiedColumnFamilies": True, "readRowkeyAsString": False, "columnFamilies": [ { "familyId": "familyId", "type": "type", "encoding": "encoding", "columns": [ { "qualifierString": "q", "fieldName": "fieldName1", "type": "type1", "encoding": "encoding1", "onlyReadLatest": True, }, { "qualifierEncoded": qualifier_encoded, "fieldName": "fieldName2", "type": "type2", "encoding": "encoding2", }, ], "onlyReadLatest": False, } ], }, }, ) ec = external_config.ExternalConfig.from_api_repr(resource) self._verify_base(ec) self.assertEqual(ec.source_format, "BIGTABLE") self.assertIsInstance(ec.options, external_config.BigtableOptions) self.assertEqual(ec.options.ignore_unspecified_column_families, True) self.assertEqual(ec.options.read_rowkey_as_string, False) self.assertEqual(len(ec.options.column_families), 1) fam1 = ec.options.column_families[0] self.assertIsInstance(fam1, external_config.BigtableColumnFamily) self.assertEqual(fam1.family_id, "familyId") self.assertEqual(fam1.type_, "type") self.assertEqual(fam1.encoding, "encoding") self.assertEqual(len(fam1.columns), 2) self.assertFalse(fam1.only_read_latest) col1 = fam1.columns[0] self.assertEqual(col1.qualifier_string, "q") self.assertEqual(col1.field_name, "fieldName1") self.assertEqual(col1.type_, "type1") self.assertEqual(col1.encoding, "encoding1") self.assertTrue(col1.only_read_latest) self.assertIsNone(col1.qualifier_encoded) col2 = ec.options.column_families[0].columns[1] self.assertEqual(col2.qualifier_encoded, b"q") self.assertEqual(col2.field_name, "fieldName2") self.assertEqual(col2.type_, "type2") self.assertEqual(col2.encoding, "encoding2") got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource)
def to_base64(self, namespace, lookup): other = lookup.find(self.ref_secret, self.ref_value) with ReadFile(other.filename(namespace)) as input: data = json.loads(input) return base64.standard_b64encode(data['public'])
def _convert_from_ros_binary(field_type, field_value): field_value = base64.standard_b64encode(field_value) return field_value
# Récupération des constantes logger.info('Récupération des constantes depuis le YAML...') CYCLOS_CONSTANTS = None with open("/cyclos/cyclos_constants.yml", 'r') as cyclos_stream: try: CYCLOS_CONSTANTS = yaml.load(cyclos_stream) except yaml.YAMLError as exc: assert False, exc # Impression billets eusko logger.info('Impression billets eusko...') logger.debug(str(CYCLOS_CONSTANTS['payment_types']['impression_de_billets_d_eusko']) + "\r\n" + str(CYCLOS_CONSTANTS['currencies']['eusko']) + "\r\n" + str(CYCLOS_CONSTANTS['account_types']['compte_de_debit_eusko_billet']) + "\r\n" + str(CYCLOS_CONSTANTS['account_types']['stock_de_billets'])) r = requests.post(eusko_web_services + 'payment/perform', headers={'Authorization': 'Basic {}'.format(base64.standard_b64encode(b'demo:demo').decode('utf-8'))}, # noqa json={ 'type': CYCLOS_CONSTANTS['payment_types']['impression_de_billets_d_eusko'], 'amount': 126500, 'currency': CYCLOS_CONSTANTS['currencies']['eusko'], 'from': 'SYSTEM', 'to': 'SYSTEM', }) logger.info('Impression billets eusko... Terminé !') logger.debug(r.json()) logger.info('Fin du script !')
def http_request( host_creds, endpoint, retries=3, retry_interval=3, max_rate_limit_interval=60, **kwargs ): """ Makes an HTTP request with the specified method to the specified hostname/endpoint. Ratelimit error code (429) will be retried with an exponential back off (1, 2, 4, ... seconds) for at most `max_rate_limit_interval` seconds. Internal errors (500s) will be retried up to `retries` times , waiting `retry_interval` seconds between successive retries. Parses the API response (assumed to be JSON) into a Python object and returns it. :param host_creds: A :py:class:`mlflow.rest_utils.MlflowHostCreds` object containing hostname and optional authentication. :return: Parsed API response """ hostname = host_creds.host auth_str = None if host_creds.username and host_creds.password: basic_auth_str = ("%s:%s" % (host_creds.username, host_creds.password)).encode("utf-8") auth_str = "Basic " + base64.standard_b64encode(basic_auth_str).decode("utf-8") elif host_creds.token: auth_str = "Bearer %s" % host_creds.token headers = dict(_DEFAULT_HEADERS) if auth_str: headers["Authorization"] = auth_str if host_creds.server_cert_path is None: verify = not host_creds.ignore_tls_verification else: verify = host_creds.server_cert_path if host_creds.client_cert_path is not None: kwargs["cert"] = host_creds.client_cert_path def request_with_ratelimit_retries(max_rate_limit_interval, **kwargs): response = requests.request(**kwargs) time_left = max_rate_limit_interval sleep = 1 while response.status_code == 429 and time_left > 0: _logger.warning( "API request to {path} returned status code 429 (Rate limit exceeded). " "Retrying in %d seconds. " "Will continue to retry 429s for up to %d seconds.", sleep, time_left, ) time.sleep(sleep) time_left -= sleep response = requests.request(**kwargs) sleep = min(time_left, sleep * 2) # sleep for 1, 2, 4, ... seconds; return response cleaned_hostname = strip_suffix(hostname, "/") url = "%s%s" % (cleaned_hostname, endpoint) for i in range(retries): response = request_with_ratelimit_retries( max_rate_limit_interval, url=url, headers=headers, verify=verify, **kwargs ) if response.status_code >= 200 and response.status_code < 500: return response else: _logger.error( "API request to %s failed with code %s != 200, retrying up to %s more times. " "API response body: %s", url, response.status_code, retries - i - 1, response.text, ) time.sleep(retry_interval) raise MlflowException( "API request to %s failed to return code 200 after %s tries" % (url, retries) )
def generateBinary(self, ofile=None): bin = {} self.env.buildExceptionTable() codereprs = [] for co in self.codeobjs: co.resolveExceptions(self.env) cr = CodeRepr() cr.makeFromCode(co) codereprs.append(cr) # Generate Code Image objbuf = [] buf = bytearray() lmap = {} for co in self.codeobjs: bcf = co.toBytes() objbuf.append(bcf) #Generate Header #Magic Number buf += (struct.pack("=B", ord('G'))) #GGGD buf += (struct.pack("=B", ord('G'))) #GGGD buf += (struct.pack("=B", ord('G'))) #GGGD buf += (struct.pack("=B", ord('D'))) #GGGD #Flags buf += (struct.pack("=B", 0)) #NModules buf += (struct.pack("=B", len(self.modules))) #Nobjs buf += (struct.pack("=H", len(objbuf))) #Exceptions etable, emtable, emtablelen = self.env.getBinaryExceptionTable() rtable = self.generateResourceTable() buf += struct.pack("=H", len(etable)) #Unused --> now is num of cnatives buf += struct.pack("=H", len(self.cnatives)) #ram_start buf += struct.pack("=I", 0) #data_start buf += struct.pack("=I", 0) #data_end buf += struct.pack("=I", 0) #data_bss buf += struct.pack("=I", 0) cobsz = 4 * len(objbuf) + (len(buf) + 4) + ( len(etable) * 8 + emtablelen) + 4 * len(self.cnatives) #res_table if rtable: buf += struct.pack("=I", cobsz) cobsz += len(rtable) else: buf += struct.pack("=I", 0) #CodeObjs table cobsztable = [] pyobjtablestart = len(buf) for cob in objbuf: buf += (struct.pack("=I", cobsz)) cobsztable.append(cobsz) cobsz += len(cob) pyobjtableend = len(buf) #add space for c natives addresses for i in range(0, len(self.cnatives)): buf += (struct.pack("=I", i)) #exception table etablestart = len(buf) for e in etable: buf += struct.pack("=H", e[0]) #name buf += struct.pack("=H", e[1]) #parent buf += struct.pack("=I", e[2]) #msg offs #print("etable entry:",e[0],e[1],e[2]) pckd = 0 for e in emtable: buf += struct.pack("=H", e[0]) #len buf += struct.pack("=" + str(e[0]) + "s", e[1].encode("latin1")) #str pckd += 2 + e[0] ssz = (len(buf)) % 4 if ssz: ssz = 4 - ssz while ssz > 0: buf += struct.pack("=B", 0) #pad pckd += 1 ssz -= 1 etableend = len(buf) #resource table buf += rtable bin["info"] = { "nmodules": len(self.modules), "npyobjs": len(objbuf), "pyobjtable_start": pyobjtablestart, "pyobjtable_end": pyobjtableend, "ncnatives": len(self.cnatives), "etable_start": etablestart, "etable_end": etableend, "rtable_start": etableend, "rtable_elements": len(self.resources), "header_size": len(buf), "version": env.var.version, "target": self.board.target } bin["header"] = str(base64.standard_b64encode(buf), 'utf-8') buf = bytearray() #Store CodeObjs for ic, cob in enumerate(objbuf): buf += cob bin["pyobjs"] = str(base64.standard_b64encode(buf), 'utf-8') bin["info"]["pyobjs_size"] = len(buf) bin["cobjs"] = None bin["modules"] = self.moduletable onatives = {v: k for k, v in self.cnatives.items()} bin["cnatives"] = [onatives[i] for i in range(0, len(onatives))] if ofile: bin["cobjs"] = str(base64.standard_b64encode(ofile), 'utf-8') #TODO: add proper stats bin["stats"] = {} bin["stats"]["modules"] = { } #{k.replace(homepath,""):v for k,v in self.moduletable.items() } bin["stats"]["natives"] = self.cnatives bin["stats"]["cfiles"] = list(self.cfiles) bin["stats"]["target"] = self.board.target bin["lmap"] = lmap return (bin, codereprs) #, self.codeobjs)
def base64encode(b, urlsafe=False, padding=True): ob = base64.urlsafe_b64encode(b) if urlsafe else base64.standard_b64encode(b) encoded = ob.decode('ascii') if not padding: encoded = encoded.rstrip('=') return encoded
def auth_client_header(auth_client): user_pass = "******".format( client_id=auth_client.id, secret=auth_client.secret ) encoded = base64.standard_b64encode(user_pass.encode("utf-8")) return {"Authorization": "Basic {creds}".format(creds=encoded.decode("ascii"))}
def encode_base64(buff: bytes) -> str: """ Convert bytes to base64 as text string (no newlines) """ return base64.standard_b64encode(buff).decode("ascii", "ignore")
elif curEp.show.indexer == 2: dupekey = "SickBeard-tvr" + str(curEp.show.indexerid) dupekey += "-" + str(curEp.season) + "." + str(curEp.episode) if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7): addToTop = True nzbgetprio = sickbeard.NZBGET_PRIORITY if nzb.quality != Quality.UNKNOWN: dupescore = nzb.quality * 100 if proper: dupescore += 10 nzbcontent64 = None if nzb.resultType == "nzbdata": data = nzb.extraInfo[0] nzbcontent64 = standard_b64encode(data) logger.log(u"Sending NZB to NZBget") logger.log(u"URL: " + url, logger.DEBUG) try: # Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old command nzbget_version_str = nzbGetRPC.version() nzbget_version = helpers.tryInt(nzbget_version_str[:nzbget_version_str.find(".")]) if nzbget_version == 0: if nzbcontent64 is not None: nzbget_result = nzbGetRPC.append(nzb.name + ".nzb", sickbeard.NZBGET_CATEGORY, addToTop, nzbcontent64) else: if nzb.resultType == "nzb": genProvider = GenericProvider("") data = genProvider.getURL(nzb.url)
def send_glucose_reading(self, attempt_or_reading): from genesishealth.apps.readings.models import GlucoseReading if isinstance(attempt_or_reading, GlucoseReading): reading = attempt_or_reading attempt, _ = APIReadingForwardAttempt.objects.get_or_create( reading=reading, partner=self) elif isinstance(attempt_or_reading, APIReadingForwardAttempt): attempt = attempt_or_reading else: raise ValueError("Unknown type for attempt_or_reading") if (attempt.committed or attempt.attempts >= self.maximum_send_attempts): return if self not in attempt.reading.patient.patient_profile.partners.all(): raise Exception('Attempting to send reading to wrong partner.') log = APILogRecord(is_inbound=False, datetime=utcnow(), url=self.get_url('push_data'), action_type='push_data', for_partner=self, reading=attempt.reading) reading_data = self.generate_reading_data(attempt.reading) encoded_data = json.dumps(reading_data) log.data = encoded_data # Send the reading from genesishealth.apps.gdrives.models import \ GDriveTransmissionLogEntry try: attempt.reading.log_entry except GDriveTransmissionLogEntry.DoesNotExist: pass else: attempt.reading.log_entry.sent_to_api = True attempt.reading.log_entry.save() attempt.attempts += 1 push_location = self.get_url('push_data') request = Request(push_location) if self.api_version == "2.1": request.add_header("x-api-key", self.outgoing_api_key) base64string = base64.standard_b64encode( "%s:%s" % (self.outgoing_username, self.outgoing_password)) request.add_header("Authorization", "Basic %s" % base64string) request.add_header("Content-type", "application/json") context = ssl._create_unverified_context() try: response = urlopen(request, encoded_data, 10, context=context) except HTTPError as e: log.status = 'connection_error' log.response = str(e.read()) except URLError as e: log.status = 'connection_error' log.response = e.reason except Exception as e: log.status = 'connection_error' log.response = str(e) else: response_contents = response.read() log.response = response_contents # Interpret response try: decoded_response = json.loads(response_contents) except ValueError: log.status = 'json_error' else: if decoded_response.get('success'): log.status = 'complete' attempt.committed = True try: attempt.reading.log_entry except GDriveTransmissionLogEntry.DoesNotExist: pass else: attempt.reading.log_entry.received_by_api = True attempt.reading.log_entry.save() else: log.status = 'returned_error' attempt.save() log.save()
def getBase64Value(self, userid, passwd): userpass = '******' % (userid, passwd) base64string = base64.standard_b64encode(userpass.encode('utf-8')) return base64string.decode('utf-8')
def DER_cert_to_PEM_cert(der_cert_bytes): """Takes a certificate in binary DER format and returns the PEM version of it as a string.""" f = str(base64.standard_b64encode(der_cert_bytes), 'ASCII', 'strict') return PEM_HEADER + '\n' + textwrap.fill(f, 64) + '\n' + PEM_FOOTER + '\n'
def test_to_api_repr_bigtable(self): ec = external_config.ExternalConfig('BIGTABLE') options = external_config.BigtableOptions() options.ignore_unspecified_column_families = True options.read_rowkey_as_string = False ec._options = options fam1 = external_config.BigtableColumnFamily() fam1.family_id = 'familyId' fam1.type_ = 'type' fam1.encoding = 'encoding' fam1.only_read_latest = False col1 = external_config.BigtableColumn() col1.qualifier_string = 'q' col1.field_name = 'fieldName1' col1.type_ = 'type1' col1.encoding = 'encoding1' col1.only_read_latest = True col2 = external_config.BigtableColumn() col2.qualifier_encoded = b'q' col2.field_name = 'fieldName2' col2.type_ = 'type2' col2.encoding = 'encoding2' fam1.columns = [col1, col2] options.column_families = [fam1] qualifier_encoded = base64.standard_b64encode(b'q').decode('ascii') exp_resource = { 'sourceFormat': 'BIGTABLE', 'bigtableOptions': { 'ignoreUnspecifiedColumnFamilies': True, 'readRowkeyAsString': False, 'columnFamilies': [ { 'familyId': 'familyId', 'type': 'type', 'encoding': 'encoding', 'columns': [ { 'qualifierString': 'q', 'fieldName': 'fieldName1', 'type': 'type1', 'encoding': 'encoding1', 'onlyReadLatest': True, }, { 'qualifierEncoded': qualifier_encoded, 'fieldName': 'fieldName2', 'type': 'type2', 'encoding': 'encoding2', }, ], 'onlyReadLatest': False, } ], }, } got_resource = ec.to_api_repr() self.assertEqual(got_resource, exp_resource)
def decrypt_image(self, img, input_type="jpg", output_type="textual", ocr=False, timeout=120): """ Loads a captcha and decrypts it with ocr, plugin, user input. :param img: image raw data :param get: get part for request :param post: post part for request :param cookies: True if cookies should be enabled :param input_type: Type of the Image :param output_type: 'textual' if text is written on the captcha\ or 'positional' for captcha where the user have to click\ on a specific region on the captcha :param ocr: if True, builtin ocr is used. if string, the OCR plugin name is used :return: result of decrypting """ result = None time_ref = "{:.2f}".format(time.time())[-6:].replace(".", "") with open( os.path.join( self.pyload.tempdir, "captcha_image_{}_{}.{}".format( self.pyfile.plugin.__name__, time_ref, input_type), ), "wb", ) as img_f: img_f.write(img) if ocr: self.log_info(self._("Using OCR to decrypt captcha...")) if isinstance(ocr, str): _OCR = self.pyload.plugin_manager.load_class( "anticaptcha", ocr) #: Rename `captcha` to `ocr` in 0.6.x result = _OCR(self.pyfile).recognize(img_f.name) else: result = self.recognize(img_f.name) if not result: self.log_warning(self._("No OCR result")) if not result: captcha_manager = self.pyload.captcha_manager timeout = max(timeout, 50) try: params = { "src": "data:image/{};base64,{}".format( input_type, base64.standard_b64encode(img)), "file": img_f.name, "captcha_plugin": self.__name__, "plugin": self.pyfile.plugin.__name__, } self.task = captcha_manager.new_task(input_type, params, output_type) captcha_manager.handle_captcha(self.task, timeout) while self.task.is_waiting(): self.pyfile.plugin.check_status() time.sleep(1) finally: captcha_manager.remove_task(self.task) result = self.task.result if self.task.error: if not self.task.handler and not self.pyload.is_client_connected( ): self.log_warning( self._("No Client connected for captcha decrypting")) self.fail( self._("No Client connected for captcha decrypting")) else: self.pyfile.plugin.retry_captcha(msg=self.task.error) elif self.task.result: self.log_info(self._("Captcha result: `{}`").format(result)) else: self.pyfile.plugin.retry_captcha(msg=self._( "No captcha result obtained in appropriate timing ({}s)"). format(timeout)) if not self.pyload.debug: self.remove(img_f.name, try_trash=False) return result
def encode_base64_filter(string): return standard_b64encode(string).decode('utf-8')
def sign(self, message): """Sign a message""" return base64.standard_b64encode( HMAC.new(self.sign_key, message.encode('utf-8'), SHA256).digest()).decode('utf-8')
def test_to_api_repr_bigtable(self): ec = external_config.ExternalConfig("BIGTABLE") options = external_config.BigtableOptions() options.ignore_unspecified_column_families = True options.read_rowkey_as_string = False ec._options = options fam1 = external_config.BigtableColumnFamily() fam1.family_id = "familyId" fam1.type_ = "type" fam1.encoding = "encoding" fam1.only_read_latest = False col1 = external_config.BigtableColumn() col1.qualifier_string = "q" col1.field_name = "fieldName1" col1.type_ = "type1" col1.encoding = "encoding1" col1.only_read_latest = True col2 = external_config.BigtableColumn() col2.qualifier_encoded = b"q" col2.field_name = "fieldName2" col2.type_ = "type2" col2.encoding = "encoding2" fam1.columns = [col1, col2] options.column_families = [fam1] qualifier_encoded = base64.standard_b64encode(b"q").decode("ascii") exp_resource = { "sourceFormat": "BIGTABLE", "bigtableOptions": { "ignoreUnspecifiedColumnFamilies": True, "readRowkeyAsString": False, "columnFamilies": [ { "familyId": "familyId", "type": "type", "encoding": "encoding", "columns": [ { "qualifierString": "q", "fieldName": "fieldName1", "type": "type1", "encoding": "encoding1", "onlyReadLatest": True, }, { "qualifierEncoded": qualifier_encoded, "fieldName": "fieldName2", "type": "type2", "encoding": "encoding2", }, ], "onlyReadLatest": False, } ], }, } got_resource = ec.to_api_repr() self.assertEqual(got_resource, exp_resource)