def test_subscriptions_delete(self): pvnames = ("test:long1", "test:long2") modedefs = ({"mode": "Monitor", "delta": 1.1}, {"mode": "Scan", "period": 0.1}) self._subscribe_to_many(pvnames, modedefs) response = self.fetch('/subscriptions/') self.assertEqual(response.code, 200) self.assertFalse(response.error) responses = json_decode(response.body)['response'] self.assertEqual(len(responses), len(pvnames)) self.assertItemsEqual( responses.keys(), pvnames ) for pvname, modedef in zip(pvnames, modedefs): self.assertEqual(responses[pvname]['name'], pvname) self.assertEqual(responses[pvname]['mode'], SubscriptionMode.parse(modedef)) # delete now response = self.fetch('/subscriptions/', method='DELETE') # all subscriptions should be gone now response = self.fetch('/subscriptions/') self.assertEqual(response.code, 200) self.assertFalse(response.error) responses = json_decode(response.body)['response'] self.assertEqual(len(responses), 0)
def test_subscriptions_put(self): # create subscription, check it. Then modify it and recheck pvname = "test:long1" mode1 = {"mode": "Monitor", "delta": 1.1} mode2 = {"mode": "Scan", "period": 1.1} reqbody = json_encode({'mode': mode1}) response = self.fetch('/subscriptions/'+pvname, method='PUT', body=reqbody) self.assertEqual(response.code, 200) self.assertFalse(response.error) response = self.fetch('/subscriptions/'+pvname) self.assertEqual(response.code, 200) self.assertFalse(response.error) res = json_decode(response.body)['response'] self.assertEqual(pvname, res['name']) self.assertEqual(res['mode'], SubscriptionMode.parse(mode1)) # modify it reqbody = json_encode({'mode': mode2}) response = self.fetch('/subscriptions/'+pvname, method='PUT', body=reqbody) self.assertEqual(response.code, 200) self.assertFalse(response.error) response = self.fetch('/subscriptions/'+pvname) self.assertEqual(response.code, 200) self.assertFalse(response.error) res = json_decode(response.body)['response'] self.assertEqual(pvname, res['name']) self.assertEqual(res['mode'], SubscriptionMode.parse(mode2))
def test_get_statuses(self): # without pvname glob response = self.fetch('/statuses/') self.assertEqual(response.code, 200) self.assertFalse(response.error) body = json_decode(response.body) self.assertTrue(body['status']['success']) statuses = body['response'] self.assertEqual(statuses, []) # now with some data pvnames = ("test:long1", "test:double1") modedefs = ({"mode": "Monitor", "delta": 1.1}, {"mode": "Scan", "period": 0.1}) self._subscribe_to_many(pvnames, modedefs) response = self.fetch('/statuses/') self.assertEqual(response.code, 200) self.assertFalse(response.error) body = json_decode(response.body) self.assertTrue(body['status']['success']) ts = time.time() * 1e6 statuses = body['response'] self.assertEqual( len(statuses), len(pvnames)) for status in statuses: last = status[-1] self.assertTrue(last['connected']) self.assertGreaterEqual(ts, last['timestamp'])
def template_get(self, template): if not isinstance(template, Template): TypeError('template is not of type Template') query = { 'account': template.user, 'name': template.name } try: r = urllib.request.Request('%s/api/templates.json?%s' % ( self._urlbase, urllib.parse.urlencode(query) )) u = self._opener.open(r) template_summary = json_decode( u.read().decode('utf-8') ) if len( template_summary ): # we only have one returned since template names are unique per account r = urllib.request.Request('%s/api/template/%d.json' % ( self._urlbase, template_summary[0]['id'] )) u = self._opener.open(r) return Template( template=json_decode( u.read().decode('utf-8') ) ) except urllib.error.URLError as e: print(e) except urllib.error.HTTPError as e: print(e) return None
def _decryptV1(self): from sjcl import SJCL from json import loads as json_decode password = self.__preparePassKey() cipher_text = json_decode(self._data['data']) if self._debug: print("Text:\t{}\n".format(cipher_text)) text = SJCL().decrypt(cipher_text, password) if len(text): if self._debug: print("Decoded Text:\t{}\n".format(text)) self._text = self.__decompress(text.decode()) if 'attachment' in self._data and 'attachmentname' in self._data: cipherfile = json_decode(self._data['attachment']) cipherfilename = json_decode(self._data['attachmentname']) if self._debug: print("Name:\t{}\nData:\t{}".format(cipherfilename, cipherfile)) attachment = SJCL().decrypt(cipherfile, password) attachmentname = SJCL().decrypt(cipherfilename, password) self._attachment = self.__decompress( attachment.decode('utf-8')).decode('utf-8') self._attachment_name = self.__decompress( attachmentname.decode('utf-8')).decode('utf-8')
def test_subscriptions_get(self): # this first fetch with a trailing slash response = self.fetch('/subscriptions/') self.assertEqual(response.code, 200, response.error) # empty expected = envelope(True, 200, 'ok', {}) decodedbody = json_decode(response.body) self.assertEqual(decodedbody, expected) # test alphabetical ordering of returned pvs pvnames = ("test:long1", "test:double1", "test:double2", "test:long3") modedefs = ({"mode": "Monitor", "delta": 1.1}, {"mode": "Scan", "period": 0.1}, {"mode": "Scan", "period": 0.2}, {"mode": "Scan", "period": 0.3}, ) self._subscribe_to_many(pvnames, modedefs) # second fetch WITHOUT a trailing slash response = self.fetch('/subscriptions') self.assertEqual(response.code, 200) self.assertFalse(response.error) responses = json_decode(response.body)['response'] self.assertEqual(len(responses), len(pvnames)) self.assertItemsEqual( responses.keys(), pvnames ) for pvname, modedef in zip(pvnames, modedefs): self.assertEqual(responses[pvname]['name'], pvname) self.assertEqual(responses[pvname]['mode'], SubscriptionMode.parse(modedef))
def repository_find(self, stub=None, arch=None, version=None, url=None): query = {} if stub is not None: query['s'] = stub try: r = urllib2.Request('%s/api/repositories.json?%s' % ( self._urlbase, urllib.urlencode(query) )) u = self._opener.open(r) repo_summary = json_decode( u.read() ) if len( repo_summary ): query = {} if arch is not None: query['a'] = arch if version is not None: query['v'] = version if url is not None: query['u'] = url r = urllib2.Request('%s/api/repository/%d.json?%s' % ( self._urlbase, repo_summary[0]['id'], urllib.urlencode(query) )) u = self._opener.open(r) repo = Repository() repo.fromObject( json_decode( u.read() ) ) return repo except urllib2.URLError, e: print e
def decrypt(self): from json import loads as json_decode if self._version == 2: iv = b64decode(self._data['adata'][0][0]) salt = b64decode(self._data['adata'][0][1]) key = self.__deriveKey(salt) # Get compression type from received paste self._compression = self._data['adata'][0][7] cipher = self.__initializeCipher(key, iv, self._data['adata']) # Cut the cipher text into message and tag cipher_text_tag = b64decode(self._data['ct']) cipher_text = cipher_text_tag[:-CIPHER_TAG_BYTES] cipher_tag = cipher_text_tag[-CIPHER_TAG_BYTES:] cipher_message = json_decode( self.__decompress( cipher.decrypt_and_verify(cipher_text, cipher_tag)).decode()) self._text = cipher_message['paste'].encode() if 'attachment' in cipher_message and 'attachment_name' in cipher_message: self._attachment = cipher_message['attachment'] self._attachment_name = cipher_message['attachment_name'] else: from hashlib import sha256 from sjcl import SJCL if self._password: digest = sha256(self._password.encode("UTF-8")).hexdigest() password = b64encode(self._key) + digest.encode("UTF-8") else: password = b64encode(self._key) cipher_text = json_decode(self._data['data']) if self._debug: print("Text:\t{}\n".format(data)) text = SJCL().decrypt(cipher_text, password) if len(text): self._text = self.__decompress(text.decode()) if 'attachment' in self._data and 'attachmentname' in self._data: cipherfile = json_decode(self._data['attachment']) cipherfilename = json_decode(self._data['attachmentname']) if self._debug: print("Name:\t{}\nData:\t{}".format( cipherfilename, cipherfile)) attachment = SJCL().decrypt(cipherfile, password) attachmentname = SJCL().decrypt(cipherfilename, password) self._attachment = self.__decompress( attachment.decode('utf-8')).decode('utf-8') self._attachment_name = self.__decompress( attachmentname.decode('utf-8')).decode('utf-8')
def request(path, content=None, method='GET'): from os.path import join from urllib2 import urlopen, HTTPError request_url = join(ALMAR_SERVER, 'object', *(path.split('.'))) try: if content: response = urlopen(request_url, content) else: response = urlopen(request_url) return json_decode(response.read()) except HTTPError as e: return json_decode(e.read())
def _call(self, cmd, payload=None, headers=None): if headers is None: headers = {} headers['Accept'] = HTTP_RESPONSE_TYPE headers['User-Agent'] = API_VERSION if hasattr(payload, 'items'): payload = urllib.parse.urlencode(payload) self._log('SEND', '%s %d %s' % (cmd, len(payload), payload)) if payload is not None: headers['Content-Length'] = len(payload) try: response = self.opener.open(urllib.request.Request( HTTP_BASE_URL + '/' + cmd.strip('/'), data=payload, headers=headers )).read() except urllib.error.HTTPError as err: if 403 == err.code: raise AccessDeniedException('Access denied, please check your credentials and/or balance') elif 400 == err.code or 413 == err.code: raise ValueError("CAPTCHA was rejected by the service, check if it's a valid image") elif 503 == err.code: raise OverflowError("CAPTCHA was rejected due to service overload, try again later") else: self._log('RECV', '%d %s' % (len(response), response)) try: return json_decode(str(response, 'utf-8')) except Exception: raise RuntimeError('Invalid API response') return {}
def _send(self, channel, message): try: body = json_decode(message.content.body) message = self._encode(channel, body["message"]) self.msg(channel, message) except ValueError: logging.warn("invalid incoming message: %s" % message.content.body)
def _call(self, cmd, payload=None, headers=None): if headers is None: headers = {} headers['Accept'] = HTTP_RESPONSE_TYPE headers['User-Agent'] = API_VERSION if hasattr(payload, 'items'): payload = urllib.parse.urlencode(payload) self._log('SEND', '%s %d %s' % (cmd, len(payload), payload)) if payload is not None: headers['Content-Length'] = len(payload) try: response = self.opener.open( urllib.request.Request(HTTP_BASE_URL + '/' + cmd.strip('/'), data=payload, headers=headers)).read() except urllib.error.HTTPError as err: if 403 == err.code: raise AccessDeniedException( 'Access denied, please check your credentials and/or balance' ) elif 400 == err.code or 413 == err.code: raise ValueError( "CAPTCHA was rejected by the service, check if it's a valid image" ) elif 503 == err.code: raise OverflowError( "CAPTCHA was rejected due to service overload, try again later" ) else: self._log('RECV', '%d %s' % (len(response), response)) try: return json_decode(str(response, 'utf-8')) except Exception: raise RuntimeError('Invalid API response') return {}
def process_message(record): message = json_decode(record['body']) # Skip messages that miss the required attributes if 'Date' not in message or 'Time' not in message: return [] time = datetime.strptime( '{} {} {}'.format(message['Date'], message['Time'], CURRENT_UTC_OFFSET), TIME_FORMAT) milliseconds = int(time.timestamp() * 1000) return [{ 'Dimensions': [{ 'Name': dimension['Name'], 'Value': message[dimension['Name']], 'DimensionValueType': dimension['Type'] } for dimension in DIMENSION_ATTRIBUTES if dimension['Name'] in message], 'MeasureName': measure['Name'], 'MeasureValue': message[measure['Name']], 'MeasureValueType': measure['Type'], 'Time': str(milliseconds), 'TimeUnit': 'MILLISECONDS' } for measure in MEASURE_ATTRIBUTES if measure['Name'] in message]
def STATIC(ctx, path, cache={}, prefix=STATIC_PATH): if path in cache: return cache[path] assets = json_decode(read('assets.json')) return cache.setdefault(path, "//%s%s%s" % ( ctx.host, prefix, assets[path] ))
def update_post(_id): if request.content_type == 'application/json': data = json_decode(request.data) else: return jsonify({'error': 1, 'message': 'invalid content type'}), 400 if 'body' not in data or not isinstance(data['body'], str): return jsonify({'error': 1, 'message': 'invalid content'}), 400 _body = data['body'] if 'checksum' not in data or not isinstance(data['checksum'], str)\ or match('^[0-9a-f]{20}$', data['checksum']) == None: return jsonify({'error': 1, 'message': 'invalid checksum'}), 400 _checksum = data['checksum'] with dbconn() as db: c = db.cursor() c.execute('SELECT body FROM post WHERE id=?;', (_id, )) post = c.fetchone() if hash(post[0]) == _checksum: #raise Exception # RESTler's imaginary subtle bug c.execute('UPDATE post SET body=? WHERE id=?;', (_body, _id)) if c.rowcount != 1: return jsonify({ 'error': 1, 'message': 'update error' }), 500 # should not be reach else: return jsonify({'error': 1, 'message': 'invalid checksum'}), 400 return jsonify({'error': 0}), 201
def main(): cnx = remote("socket.cryptohack.org", 13377) while True: remote_input = json_decode(cnx.recvline().decode().strip("\n")) if "flag" in remote_input: print(remote_input["flag"]) break if "type" in remote_input: print(remote_input) if remote_input["type"] == "base64": cnx.send( get_response(b64decode(remote_input["encoded"]).decode())) if remote_input["type"] == "hex": cnx.send( get_response(unhexlify(remote_input["encoded"]).decode())) if remote_input["type"] == "bigint": cnx.send( get_response( long_to_bytes(int(remote_input["encoded"], base=16)).decode())) if remote_input["type"] == "rot13": cnx.send( get_response( codecs.decode(remote_input["encoded"], "rot_13"))) if remote_input["type"] == "utf-8": cnx.send( get_response("".join( [chr(i) for i in remote_input["encoded"]])))
def extract_header(in_path): """Extracts the gma header, closing the file and returning the information.""" header, file = get_header(in_path) file.close() header['in_json'] = json_decode(header['info']) return header
def hyperion_profile_update(application, account): hyperion_id = db.hget('al:%s' % application, account) if hyperion_id is None: log.debug('Starting reverse lookup for (%s,%s)', application, account) for service, meta in (request.json or {}).iteritems(): uid = meta.get('id') hyperion_id = db.hget('sl:%s' % service, uid) if hyperion_id is not None: log.debug('Found match of (%s,%s) to %s', service, uid, hyperion_id) break if hyperion_id is None: hyperion_id = uuid4() db.hset('al:%s' % application, account, hyperion_id) db.hset('hm:%s' % hyperion_id, application, account) demographics = {'timestamp' : int(time())} keywords = set() for service, meta in (request.json or {}).iteritems(): uid = meta.get('id') meta = dict( (k,json_decode(v) if isinstance(v,basestring) and v != '' and (v[0] in ('\'', '"')) else v) for k,v in meta.iteritems() ) db.hset('am:%s:%s' % (application,account), service, uid) db.hset('sl:%s' % service, uid, hyperion_id) demographics.update(ServiceRegistry.get(service, 'noop').normalize(meta)) keywords.update(ServiceRegistry.get(service, 'noop').keywords(meta)) db.hmset('dd:%s:%s' % (application,account), demographics) if keywords: db.delete('kw:%s:%s' % (application,account)) db.sadd('kw:%s:%s' % (application,account), *keywords) return Response(status=200)
def test_subscriptions_post(self): pvnames = ("test:long1", "test:long2") modedefs = ({"mode": "Monitor", "delta": 1.1}, {"mode": "Scan", "period": 0.1}) self._subscribe_to_many(pvnames, modedefs) # read them back response = self.fetch('/subscriptions/') self.assertEqual(response.code, 200) self.assertFalse(response.error) res = [ {'pvname': pvname, 'mode': SubscriptionMode.parse(modedef)} \ for pvname, modedef in zip(pvnames, modedefs) ] expected = envelope(True, 200, 'ok', res) decodedbody = json_decode(response.body) self.assertEqual(decodedbody['status'], expected['status']) d = dict( zip(pvnames, modedefs) ) responses = decodedbody['response'] self.assertEqual( len(responses), len(pvnames) ) for pvname, pvdict in responses.iteritems(): self.assertEqual(pvname, pvdict['name']) self.assertIn(pvdict['name'], pvnames) m = SubscriptionMode.parse( d[pvdict['name']] ) self.assertEquals(pvdict['mode'], m)
def parse_action_requirement(ar): """ Parses an action-requirement mapping based on the type of requirement """ from dream.engine.soccer.models import Requirement requirement_type = ar.requirement.type required_values = None if requirement_type == Requirement.TYPE_BOOL: required_values = True \ if ar.value == Requirement.VAL_BOOL_TRUE else False elif requirement_type == Requirement.TYPE_INT: required_values = int(ar.value) elif requirement_type == Requirement.TYPE_ENUM: from json import loads as json_decode # IDs of values that are required value_ids = json_decode(ar.value) # Getting the actual values from IDs enum_values = ar.requirement.enum_values(value_ids) required_values = [ev.value for ev in enum_values] req_key = ar.requirement.name req_data = { 'condition': ar.condition, 'required_values': required_values, } return {req_key: req_data}
def control_panel(request,username): msg = '' if request.user.is_authenticated: if request.method == "POST": content = json_decode(request.body) old_password=content['old_password'] new_password=content['new_password'] confirm_password=content['confirm_password'] if is_ratelimited(request): msg = "You're submitting too fast." elif old_password == '' or new_password == '': msg = "Passwords can't be blank." elif old_password != new_password and new_password == confirm_password: if request.user.check_password(old_password): request.user.set_password(new_password) request.user.save() update_session_auth_hash(request,request.user) msg = "Password updated successfully." else: msg = "Password does not match your old password." elif old_password == new_password: msg = "New password is the same as the old password." elif new_password != confirm_password: msg = "New passwords must match." return JsonResponse({'ok':True,'msg':msg}) points = User.objects.get(username=username).points return render(request,'control_panel.html',{'points':points,'username':username})
def hint_admin(request,challenge_name): if not (request.user.is_staff or request.user.is_superuser): return JsonResponse({'OK':False}) elif not request.user.tfa_enabled: return JsonResponse({'OK':False}) if request.method == "POST": content = json_decode(request.body) if content['id'] == 0: new_hint = Hints.objects.create( description=content['description'], level=content['level'], challenge_id = Challenges.objects.get(name=challenge_name).id ) new_hint.save() else: edit_hint = Hints.objects.get(pk=content['id']) edit_hint.description = content['description'] edit_hint.level = content['level'] edit_hint.save() return JsonResponse({'OK':True}) else: challenge_hints = Hints.objects.filter(challenge__name=challenge_name) num_hints = challenge_hints.count() challenge_hints = jsonify_queryset(challenge_hints) return JsonResponse({'hints':challenge_hints,'len':num_hints})
def awaitUnpackResponse(self, response): def validateInput(response): try: response = json_decode(response) assert type(response) is dict except: #raise RuntimeError("Request cancelled or invalid response.") raise RuntimeError(response) return response response = validateInput(response) rpcId = response['id'] rpc = response['rpc'] if 'rpc' in response else {} if 'firstPart' in rpc: command = {"method": "rpc.nextPart", "params": {"rpcId": rpcId}} parts = [response['result']] while rpc['partsRemaining'] > 0: self.send(command, None, True) partialResponse = validateInput( input( 'Got partial response, awaiting more... (Press <enter> to cancel)' )) parts.append(partialResponse['result']) rpc = partialResponse['rpc'] response = json_decode("".join(parts)) return self.decodeResponse(response)
def _call(self, cmd, payload=None, headers=None, files=None): if headers is None: headers = {} if not payload: payload = {} headers['Accept'] = HTTP_RESPONSE_TYPE headers['User-Agent'] = API_VERSION self._log('SEND', '%s %d %s' % (cmd, len(payload), payload)) if payload: response = requests.post(HTTP_BASE_URL + '/' + cmd.strip('/'), data=payload, files=files, headers=headers) else: response = requests.get( HTTP_BASE_URL + '/' + cmd.strip('/'), headers=headers) status = response.status_code if 403 == status: raise AccessDeniedException('Access denied, please check' ' your credentials and/or balance') elif status in (400, 413): raise ValueError("CAPTCHA was rejected by the service, check" " if it's a valid image") elif 503 == status: raise OverflowError("CAPTCHA was rejected due to service" " overload, try again later") if not response.ok: raise RuntimeError('Invalid API response') self._log('RECV', '%d %s' % (len(response.text), response.text)) try: return json_decode(response.text) except Exception: raise RuntimeError('Invalid API response') return {}
def login_with_oauth(request, client_id, client_secret, oauth_url): if request.REQUEST.has_key('code'): data = { 'client_id': client_id, 'client_secret': client_secret, 'grant_type': 'authorization_code', 'code': request.REQUEST['code'] } outputstring = oauth_url + '/token.php?' + http_build_query(data) return outputstring if request.REQUEST.has_key('access_token'): access_token = request.REQUEST['access_token'] data = {'oauth_token': access_token} req = Request(oauth_url + '/resource.php', urlencode(data)) raw_info = urlopen(req).read() info = json_decode(raw_info) if info: return info else: return False array = {'client_id': client_id, 'response_type': 'code'} outputstring = oauth_url + '/authorize.php?' + http_build_query(array) return outputstring
def hyperion_profile_update(application, account): hyperion_id = db.hget('al:%s' % application, account) if hyperion_id is None: log.debug('Starting reverse lookup for (%s,%s)', application, account) for service, meta in (request.json or {}).iteritems(): uid = meta.get('id') hyperion_id = db.hget('sl:%s' % service, uid) if hyperion_id is not None: log.debug('Found match of (%s,%s) to %s', service, uid, hyperion_id) break if hyperion_id is None: hyperion_id = uuid4() db.hset('al:%s' % application, account, hyperion_id) db.hset('hm:%s' % hyperion_id, application, account) demographics = {'timestamp': int(time())} keywords = set() for service, meta in (request.json or {}).iteritems(): uid = meta.get('id') meta = dict( (k, json_decode(v) if isinstance(v, basestring) and v != '' and ( v[0] in ('\'', '"')) else v) for k, v in meta.iteritems()) db.hset('am:%s:%s' % (application, account), service, uid) db.hset('sl:%s' % service, uid, hyperion_id) demographics.update( ServiceRegistry.get(service, 'noop').normalize(meta)) keywords.update(ServiceRegistry.get(service, 'noop').keywords(meta)) db.hmset('dd:%s:%s' % (application, account), demographics) if keywords: db.delete('kw:%s:%s' % (application, account)) db.sadd('kw:%s:%s' % (application, account), *keywords) return Response(status=200)
def create_new_club(self, query): from django.utils.html import escape from django.utils.text import slugify from dream.site.services import SignupService form_data = json_decode(query['new-club']) data = {} data['manager_name'] = escape(form_data['manager-name']) data['username'] = slugify(data['manager_name']).replace('-', '_') data['email'] = data['username'].replace('-', '') + '@dream11.io' data['club_name'] = escape(form_data['club-name']) data['country'] = 1 # Assumed to be "intl" data['password'] = '******' response = {'is_error': False} try: service = SignupService() service.create_account(data) except Exception as e: response['is_error'] = True response['message'] = str(e) return response
def scrape_issuers(cusip, soup): PATTERN = r'(?P<a>pdata.issuerIssuesJson)( = )(?P<b>\[.*\])' iss = [] try: regex = re_search(PATTERN, soup.text).groupdict() json = regex.get('b') issuerJson = json_decode(json) except Exception as e: return None for issue in issuerJson: issuer = OrderedDict() issuer['issuer_name'] = soup.find('div', { 'class': ['card', 'grey-band', 'grey-header'] }).find('h3').text issuer['issuer_cusip'] = cusip issuer['Issue_ID'] = issue['IID'] issuer['issue_desc'] = issue['IDES'] issuer['issue_date'] = issue['DDT'] issuer['maturity_dates'] = issue['MDR'] # Add dict to temporary storage iss.append(issuer) return iss
def validateInput(response): try: response = json_decode(response) assert type(response) is dict except: raise RuntimeError("Request cancelled or invalid response.") return response
def _call(self, cmd, payload=None, headers=None, files=None): if headers is None: headers = {} if not payload: payload = {} headers['Accept'] = HTTP_RESPONSE_TYPE headers['User-Agent'] = API_VERSION self._log('SEND', '%s %d %s' % (cmd, len(payload), payload)) if payload: response = requests.post(HTTP_BASE_URL + '/' + cmd.strip('/'), data=payload, files=files, headers=headers) else: response = requests.get(HTTP_BASE_URL + '/' + cmd.strip('/'), headers=headers) status = response.status_code if 403 == status: raise AccessDeniedException('Access denied, please check' ' your credentials and/or balance') elif status in (400, 413): raise ValueError("CAPTCHA was rejected by the service, check" " if it's a valid image") elif 503 == status: raise OverflowError("CAPTCHA was rejected due to service" " overload, try again later") if not response.ok: raise RuntimeError('Invalid API response') self._log('RECV', '%d %s' % (len(response.text), response.text)) try: return json_decode(response.text) except Exception: raise RuntimeError('Invalid API response') return {}
def connect(self): """ attempts to connect to the Omegle server. returns a deferred that will fire when we've established a connection """ if self.status != DISCONNECTED: raise AlreadyRunningError() self.userAgent = getRandomUserAgent() self.status = CONNECTING """ print 'connecting to omegle...' homePage = yield self.getPage('http://omegle.com/') print 'got page, searching for server' with open('markup.html', 'w') as f: f.write(homePage) match = self._serverRegex.search(homePage) if not match: raise ValueError("Could not find a server to connect to!") else: self.server = match.group(1) """ self.server = 'http://front2.omegle.com/' id = yield self.getPage("start?rcs=1&spid=&randid=%s" % self._get_rand_id()) self.id = json_decode(id) self.status = WAITING self.doEvents() returnValue((self.id, self.server))
def unpackResponse(self, response): response = json_decode(response) rpc = response['rpc'] if 'rpc' in response else {} if 'partsRemaining' in rpc: rpcId = response['id'] if 'firstPart' in rpc: self.partialResponses[rpcId] = [response['result']] else: self.partialResponses[rpcId].append(response['result']) if rpc['partsRemaining'] > 0: return {"method": "rpc.nextPart", "params": {"rpcId": rpcId}} else: # Complete! parts = self.partialResponses.pop(rpcId) response = json_decode("".join(parts)) return self.decodeResponse(response)
def deploy_dataset(model_bim: Mapping, dataset_name: str = None, workspace: str = None): """Create a pushable dataset (or update the metadata and schema for existing tables) in Power BI Service by a `Tabular Model <https://github.com/otykier/TabularEditor/wiki/Power-BI-Desktop-Integration>`__ ``.bim`` file. """ if isinstance(model_bim, str): if 8 < len(model_bim) < 200 and model_bim[-4:].lower( ) == '.bim' and os_path.exists(model_bim): with open(model_bim, 'r') as bim_file: model_bim = bim_file.read() model_bim = json_decode(model_bim) if not isinstance(model_bim, Mapping): raise ValueError(f"model_bim argument is not a valid JSON") if not dataset_name: dataset_name = model_bim.get("name") if not dataset_name: raise ValueError(f"dataset_name argument is missing") access_token = get_accesstoken() pd_mgmt = PushDatasetsMgmt(access_token) return pd_mgmt.deploy_dataset(model_bim, dataset_name, workspace)
def recv_data(client): data = client.recv(1024) if not data: return None length = ord(data[1]) & 127 p = 2 if length == 126: length = struct.unpack(">H", data[p:p + 2])[0] p += 2 elif length == 127: length = struct.unpack(">Q", data[p:p + 8])[0] p += 8 masks = [ord(byte) for byte in data[p:p + 4]] p += 4 decoded = "" for char in data[p:p + length]: decoded += chr(ord(char) ^ masks[len(decoded) % 4]) decoded = cgi.escape(decoded) try: return json_decode(decoded) except: return decoded
def _decryptV2(self): from json import loads as json_decode iv = b64decode(self._data['adata'][0][0]) salt = b64decode(self._data['adata'][0][1]) self._iteration_count = self._data['adata'][0][2] self._block_bits = self._data['adata'][0][3] self._tag_bits = self._data['adata'][0][4] cipher_tag_bytes = int(self._tag_bits / 8) key = self.__deriveKey(salt) # Get compression type from received paste self._compression = self._data['adata'][0][7] cipher = self.__initializeCipher(key, iv, self._data['adata'], cipher_tag_bytes) # Cut the cipher text into message and tag cipher_text_tag = b64decode(self._data['ct']) cipher_text = cipher_text_tag[:-cipher_tag_bytes] cipher_tag = cipher_text_tag[-cipher_tag_bytes:] cipher_message = json_decode( self.__decompress( cipher.decrypt_and_verify(cipher_text, cipher_tag)).decode()) self._text = cipher_message['paste'].encode() if 'attachment' in cipher_message and 'attachment_name' in cipher_message: self._attachment = cipher_message['attachment'] self._attachment_name = cipher_message['attachment_name']
def add(): """ Add message to outbox. """ data = { 'subject': request.params.get('subject', None), 'from': request.params.get('from', None), 'to': request.params.get('to', None), 'text': request.params.get('text', None), 'html': request.params.get('html', None), } headers = request.params.get('headers', None) if headers: try: headers = json_decode(headers) data['headers'] = headers except: rejected.next() return error_msg('headers contains invalid json data') required_fields = ['from', 'to', 'subject', 'text'] for field in required_fields: if not (data.has_key(field) and data[field]): rejected.next() return error_msg('missing field %s'%field) outbox.put(data) queued.next() resp = {'status': 'queued', 'message': 'ok'} return json_encode(resp)
def recv_data (client): data = client.recv(1024) if not data: return None length = ord(data[1]) & 127 p = 2 if length == 126: length = struct.unpack(">H", data[p:p+2])[0] p += 2 elif length == 127: length = struct.unpack(">Q", data[p:p+8])[0] p += 8 masks = [ord(byte) for byte in data[p:p+4]] p += 4 decoded = "" for char in data[p:p+length]: decoded += chr(ord(char) ^ masks[len(decoded) % 4]) decoded = cgi.escape(decoded) try: return json_decode(decoded) except: return decoded
def load_previously_stored_values(self): """Load previously saved values""" if os.path.isfile('.rkd/' + self.filename): with open('.rkd/' + self.filename, 'rb') as f: self.answers = json_decode(f.read()) self.to_env = deepcopy(os.environ)
def _call(self, cmd, data=None): if data is None: data = {} data['cmd'] = cmd data['version'] = API_VERSION request = json_encode(data) response = None for i in range(2): if not self.socket and cmd != 'login': self._call('login', self.userpwd.copy()) self.socket_lock.acquire() try: sock = self.connect() response = self._sendrecv(sock, request) except IOError as err: sys.stderr.write(str(err) + "\n") self.close() except socket.error as err: sys.stderr.write(str(err) + "\n") self.close() raise IOError('Connection refused') else: break finally: self.socket_lock.release() if response is None: raise IOError('Connection lost timed out during API request') try: response = json_decode(response) except Exception: raise RuntimeError('Invalid API response') if not response.get('error'): return response error = response['error'] if error in ('not-logged-in', 'invalid-credentials'): raise AccessDeniedException( 'Access denied, check your credentials') elif 'banned' == error: raise AccessDeniedException('Access denied, account is suspended') elif 'insufficient-funds' == error: raise AccessDeniedException( 'CAPTCHA was rejected due to low balance') elif 'invalid-captcha' == error: raise ValueError('CAPTCHA is not a valid image') elif 'service-overload' == error: raise OverflowError( 'CAPTCHA was rejected due to service overload, try again later' ) else: self.socket_lock.acquire() self.close() self.socket_lock.release() raise RuntimeError('API server error occured: %s' % error)
def loadcache(p): if CLEAR: return {"source":{}, "error":{}, "location":p} elif path.exists(path.join(p, "__cache__.json")): f = open(path.join(p, "__cache__.json"), "r") j = f.read() return json_decode(j) else: return {"source":{}, "error":{}, "location":p}
def test_subscriptions_put(self): pvnames = ("test:long1", "test:long2") modedefs = ({"mode": "Monitor", "delta": 1.1}, {"mode": "Scan", "period": 0.1}) self._subscribe_to_many(pvnames, modedefs) response = self.fetch('/subscriptions/') self.assertEqual(response.code, 200) self.assertFalse(response.error) responses = json_decode(response.body)['response'] self.assertEqual(len(responses), len(pvnames)) self.assertItemsEqual( responses.keys(), pvnames ) for pvname, modedef in zip(pvnames, modedefs): self.assertEqual(responses[pvname]['name'], pvname) self.assertEqual(responses[pvname]['mode'], SubscriptionMode.parse(modedef)) # up to here, same as get's test pvnames = ("test:double1", "test:double2", "test:double3") modedefs = ({"mode": "Monitor", "delta": 2.1}, {"mode": "Scan", "period": 3.1}, {"mode": "Monitor", "period": 4.1}, ) to_sub = [{'pvname': pvname, 'mode': modedef} \ for pvname, modedef in zip(pvnames, modedefs)] reqbody = json_encode(to_sub) response = self.fetch('/subscriptions/', method='PUT', body=reqbody) self.assertEqual(response.code, 200) self.assertFalse(response.error) response = self.fetch('/subscriptions/') self.assertEqual(response.code, 200) self.assertFalse(response.error) responses = json_decode(response.body)['response'] self.assertEqual(len(responses), len(pvnames)) self.assertItemsEqual( responses.keys(), pvnames ) for pvname, modedef in zip(pvnames, modedefs): self.assertEqual(responses[pvname]['name'], pvname) self.assertEqual(responses[pvname]['mode'], SubscriptionMode.parse(modedef))
def test_45_policy_merge_3(self): """ Different order of products for test_44 - 3 policy groups - 2 base options from different warehouses """ self.product_both.property_planning_policy_id = self.policy_other self.product_12.property_planning_policy_id = self.policy_closest self.product_22.property_planning_policy_id = self.policy_other self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_1.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_2.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_12.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_22.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_both.id, 'name': 'demo', }) both_wh_ids = self.both_wh_ids() planner = self.env['sale.order.make.plan'].with_context( warehouse_domain=[('id', 'in', both_wh_ids)], skip_plan_shipping=True).create({'order_id': self.so.id}) self.assertTrue(planner.planning_option_ids, 'Must have one or more plans.') self.assertEqual(planner.planning_option_ids.warehouse_id, self.warehouse_1) self.assertTrue(planner.planning_option_ids.sub_options) sub_options = json_decode(planner.planning_option_ids.sub_options) _logger.error(sub_options) wh_1_ids = sorted([self.product_1.id, self.product_12.id]) wh_2_ids = sorted( [self.product_both.id, self.product_2.id, self.product_22.id]) self.assertEqual( sorted(sub_options[str(self.warehouse_1.id)]['product_ids']), wh_1_ids) self.assertEqual( sorted(sub_options[str(self.warehouse_2.id)]['product_ids']), wh_2_ids)
def test_43_policy_merge(self): """ Scenario that will make a complicated scenario specifically: - 3 policy groups - 2 base options with sub_options (all base options with same warehouse) """ self.product_both.property_planning_policy_id = self.policy_closest self.product_12.property_planning_policy_id = self.policy_other self.product_22.property_planning_policy_id = self.policy_other self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_both.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_1.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_2.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_12.id, 'name': 'demo', }) self.env['sale.order.line'].create({ 'order_id': self.so.id, 'product_id': self.product_22.id, 'name': 'demo', }) both_wh_ids = self.both_wh_ids() planner = self.env['sale.order.make.plan'].with_context( warehouse_domain=[('id', 'in', both_wh_ids)], skip_plan_shipping=True).create({'order_id': self.so.id}) self.assertTrue(planner.planning_option_ids, 'Must have one or more plans.') self.assertEqual(planner.planning_option_ids.warehouse_id, self.warehouse_1) self.assertTrue(planner.planning_option_ids.sub_options) sub_options = json_decode(planner.planning_option_ids.sub_options) _logger.error(sub_options) wh_1_ids = sorted( [self.product_both.id, self.product_1.id, self.product_12.id]) wh_2_ids = sorted([self.product_2.id, self.product_22.id]) self.assertEqual( sorted(sub_options[str(self.warehouse_1.id)]['product_ids']), wh_1_ids) self.assertEqual( sorted(sub_options[str(self.warehouse_2.id)]['product_ids']), wh_2_ids)
def _call(self, cmd, data=None): if data is None: data = {} data['cmd'] = cmd data['version'] = API_VERSION request = json_encode(data) response = None for i in range(2): if not self.socket and cmd != 'login': self._call('login', self.userpwd.copy()) self.socket_lock.acquire() try: sock = self.connect() response = self._sendrecv(sock, request) except IOError as err: sys.stderr.write(str(err) + "\n") self.close() except socket.error as err: sys.stderr.write(str(err) + "\n") self.close() raise IOError('Connection refused') else: break finally: self.socket_lock.release() if response is None: raise IOError('Connection lost timed out during API request') try: response = json_decode(response) except Exception: raise RuntimeError('Invalid API response') if not response.get('error'): return response error = response['error'] if error in ('not-logged-in', 'invalid-credentials'): raise AccessDeniedException('Access denied, check your credentials') elif 'banned' == error: raise AccessDeniedException('Access denied, account is suspended') elif 'insufficient-funds' == error: raise AccessDeniedException( 'CAPTCHA was rejected due to low balance') elif 'invalid-captcha' == error: raise ValueError('CAPTCHA is not a valid image') elif 'service-overload' == error: raise OverflowError( 'CAPTCHA was rejected due to service overload, try again later') else: self.socket_lock.acquire() self.close() self.socket_lock.release() raise RuntimeError('API server error occured: %s' % error)
def test_options(self): recaptcha_options = {'key1': 'value', 'key2': 3} client = RecaptchaClient( _FAKE_PRIVATE_KEY, _FAKE_PUBLIC_KEY, recaptcha_options=recaptcha_options, ) decoded_recaptcha_options = json_decode(client.recaptcha_options_json) eq_(recaptcha_options, decoded_recaptcha_options)
def get_info_from_ldap(access_token, oauth_url) : data = {'oauth_token' : access_token, 'getinfo' : True} req = Request(oauth_url+'/resource.php', urlencode(data)) raw_info = urlopen(req).read() info = json_decode(raw_info) if info : return info else : return False
def prepare(self): """ handler init - set self.POST from request body, decode json if request is json """ self.db = self.settings['db'] self.POST = {} if self.request.method in ['POST', 'PUT'] and self.request.body: try: self.POST = P(json_decode(self.request.body)) except Exception, e: self.POST = P(self.request.arguments)
def __init__(self, cdxline=b''): OrderedDict.__init__(self) cdxline = cdxline.rstrip() self._from_json = False self._cached_json = None # Allows for filling the fields later or in a custom way if not cdxline: self.cdxline = cdxline return fields = cdxline.split(b' ', 2) # Check for CDX JSON if fields[-1].startswith(b'{'): self[URLKEY] = to_native_str(fields[0], 'utf-8') self[TIMESTAMP] = to_native_str(fields[1], 'utf-8') json_fields = json_decode(to_native_str(fields[-1], 'utf-8')) for n, v in six.iteritems(json_fields): n = to_native_str(n, 'utf-8') n = self.CDX_ALT_FIELDS.get(n, n) if n == 'url': try: v.encode('ascii') except UnicodeEncodeError: v = quote(v.encode('utf-8'), safe=':/') if n != 'filename': v = to_native_str(v, 'utf-8') self[n] = v self.cdxline = cdxline self._from_json = True return more_fields = fields.pop().split(b' ') fields.extend(more_fields) cdxformat = None for i in self.CDX_FORMATS: if len(i) == len(fields): cdxformat = i if not cdxformat: msg = 'unknown {0}-field cdx format: {1}'.format( len(fields), fields) raise CDXException(msg) for header, field in zip(cdxformat, fields): self[header] = to_native_str(field, 'utf-8') self.cdxline = cdxline
def get_info_from_ldap(access_token, oauth_url): data = {'oauth_token': access_token, 'getinfo': True} req = Request(oauth_url + '/resource.php', urlencode(data)) raw_info = urlopen(req).read() info = json_decode(raw_info) if info: return info else: return False
def template_list(self): """ Check if the korora template exists """ try: r = urllib2.Request('%s/api/templates.json' % ( self._urlbase )) u = self._opener.open(r) return json_decode( u.read() ) except urllib2.URLError, e: print e
def gotEvents(response): events = json_decode(response) if events is None: self.disconnect() else: for event in events: event, params = event[0], event[1:] callback = getattr(self, 'EVENT_%s' % event, None) if callback: callback(params) self.doEvents()
def template_delete(self, template_id): try: r = urllib2.Request('%s/api/template/%d.json' % ( self._urlbase, template_id )) r.get_method = lambda: 'DELETE' u = self._opener.open(r) o = json_decode( u.read() ) return True except urllib2.URLError, e: print e
def __init__(self, cdxline=b''): OrderedDict.__init__(self) cdxline = cdxline.rstrip() self._from_json = False self._cached_json = None # Allows for filling the fields later or in a custom way if not cdxline: self.cdxline = cdxline return fields = cdxline.split(b' ' , 2) # Check for CDX JSON if fields[-1].startswith(b'{'): self[URLKEY] = to_native_str(fields[0], 'utf-8') self[TIMESTAMP] = to_native_str(fields[1], 'utf-8') json_fields = json_decode(to_native_str(fields[-1], 'utf-8')) for n, v in six.iteritems(json_fields): n = to_native_str(n, 'utf-8') n = self.CDX_ALT_FIELDS.get(n, n) if n == 'url': try: v.encode('ascii') except UnicodeEncodeError: v = quote(v.encode('utf-8'), safe=':/') if n != 'filename': v = to_native_str(v, 'utf-8') self[n] = v self.cdxline = cdxline self._from_json = True return more_fields = fields.pop().split(b' ') fields.extend(more_fields) cdxformat = None for i in self.CDX_FORMATS: if len(i) == len(fields): cdxformat = i if not cdxformat: msg = 'unknown {0}-field cdx format'.format(len(fields)) raise CDXException(msg) for header, field in zip(cdxformat, fields): self[header] = to_native_str(field, 'utf-8') self.cdxline = cdxline
def __getitem__(self, uid): url = self._getfileurl(uid) for _ in range(5): try: r = self.session.get(url, timeout=self.timeout) except requests.exceptions.Timeout: continue if r.status_code == 429: continue r.raise_for_status() return json_decode(r.text) raise Exception('server is too busy')
def analyze_file(s3_bucket, file_name): image_url = 'https://s3-%s.amazonaws.com/%s/%s' % (ec2_region, s3_bucket, file_location) watson_url = 'https://visual-recognition-demo.mybluemix.net/api/classify' r = requests.post(watson_url, data={'url': image_url}) try: identity = json_decode( r.text)['images'][0]['faces'][0]['identity']['name'] except: identity = None if (identity != None): faces[identity] = ''
def update_streams(self, cat): if (cat not in self.catmap): log.ERR("Category not in known map.", cat) return [] id = self.catmap[cat] # page url = "http://www.shoutcast.com/Home/BrowseByGenre" params = {"genrename": cat} referer = None try: json = ahttp.get(url, params=params, referer=referer, post=1, ajax=1) json = json_decode(json) except: log.ERR( "HTTP request or JSON decoding failed. Outdated python/requests perhaps." ) return [] self.parent.status(0.75) # remap JSON entries = [] for e in json: entries.append({ "id": int(e.get("ID", 0)), "genre": str(e.get("Genre", "")), "title": str(e.get("Name", "")), "playing": str(e.get("CurrentTrack", "")), "bitrate": int(e.get("Bitrate", 0)), "listeners": int(e.get("Listeners", 0)), "url": "http://yp.shoutcast.com/sbin/tunein-station.pls?id=%s" % e.get("ID", "0"), "homepage": "", "format": "audio/mpeg" }) #log.DATA(entries) return entries
def test_upload_image_w_pid(self): self.sync_db.img.drop() body = 'data=,dGVzdCBkYXRhIGFnYWlu&id=5781d84e2fe7e60000000000' response = self.fetch('/upload/', method='POST', body=body) self.assertEqual(response.code, 200) json = json_decode(response.body) _id = ObjectId(json['data']['id']) self.assertEqual(self.sync_db.img.count(), 1) obj = self.sync_db.img.find_one({'_id': _id}) self.assertEqual(obj['fs'], 20) self.assertEqual(obj['pid'], ObjectId('5781d84e2fe7e60000000000')) file_data = open('{}/{}.jpg'.format(self.settings['UPLOAD_PATH'], _id), 'rb').read() self.assertEqual(file_data, 'test data again')
def _handle_data(self, topic, payload): # get location and device try: location = topic.split('/')[self.topic_index_location] device = topic.split('/')[self.topic_index_device] except (AttributeError, ValueError): self.logger.error("Topic '%s' not valid. Dropping." % topic) return # check payload try: readings = json_decode( base64decode(json_decode(payload)['m2m:cin']['con'])) except (ValueError, KeyError, TypeError): self.logger.error('Damaged payload; discarding') return # push data for _, values in groupby(readings, key=lambda x: x['n']): sensor_cnt = self._get_target_container(location, device, 'number') for value in sorted(values, key=lambda x: x['t']): self.push_content(sensor_cnt, [value])