def test_delete_bookmarks(self): episode_title1 = 'Colombians to deliver their verdict on peace accord' episode_id1 = episodes_dao.\ get_episode_by_title(episode_title1, self.user1.uid).id episode_title2 = 'Battle of the camera drones' episode_id2 = episodes_dao\ .get_episode_by_title(episode_title2, self.user1.uid).id bookmark = Bookmark.query.filter(Bookmark.episode_id == episode_id1).first() self.assertIsNone(bookmark) self.user1.post('api/v1/bookmarks/{}/'.format(episode_id1)) self.user1.post('api/v1/bookmarks/{}/'.format(episode_id2)) response = self.user1.get('api/v1/bookmarks/') data = json.loads(response.data) self.assertEquals(len(data['data']['bookmarks']), 2) self.user1.delete('api/v1/bookmarks/{}/'.format(episode_id1)) self.user1.delete('api/v1/bookmarks/{}/'.format(episode_id2)) response = self.user1.get('api/v1/bookmarks/') data = json.loads(response.data) self.assertEquals(len(data['data']['bookmarks']), 0) self.assertRaises( Exception, self.user1.delete('api/v1/bookmarks/{}/'.format(episode_id1)), ) self.assertRaises( Exception, self.user1.delete('api/v1/bookmarks/{}/'.format(episode_id2)), )
def loads(s): o = json.loads(s) if not o.get('args', None): o['args'] = [] if not o.get('kwargs', None): o['kwargs'] = {} for v in o['args']: if isinstance(v, dict): cast_item(v) kwargs = o['kwargs'] if isinstance(kwargs, str): o['kwargs'] = json.loads(kwargs) kwargs = o['kwargs'] for k, v in kwargs.items(): if isinstance(v, str): kwargs[k] = try_cast(v) if isinstance(v, list): kwargs[k] = [try_cast(val) for val in v] if isinstance(v, dict): cast_item(v) return o
def test_get_new_episdoes(self): series_ids = [s.id for s in Series.query.limit(3).all()] # Not signed up for any notifcations response = self.user1.get('api/v1/notifications/episodes/' + \ '?offset={}&max={}'.format(0,10)) data = json.loads(response.data)['data'] self.assertTrue(len(data['episodes']) == 0) # One new series notifications self.user1.post('api/v1/subscriptions/{}/'.format(series_ids[0])) self.user1.post('api/v1/notifications/episodes/{}/'.format(series_ids[0])) response = self.user1.get('api/v1/notifications/episodes/' + \ '?offset={}&max={}'.format(0,10)) data = json.loads(response.data)['data'] self.assertTrue(len(data['episodes']) <= 10 and len(data['episodes']) > 0) prev_date = convert_to_datetime(data['episodes'][0]['pub_date']) for episode in data['episodes']: self.assertTrue(episode['unread_notifcation']) self.assertTrue(convert_to_datetime(episode['pub_date']) <= prev_date) prev_date = convert_to_datetime(episode['pub_date']) # Multiple new series notifications self.user1.post('api/v1/subscriptions/{}/'.format(series_ids[1])) self.user1.post('api/v1/notifications/episodes/{}/'.format(series_ids[1])) response = self.user1.get('api/v1/notifications/episodes/' + \ '?offset={}&max={}'.format(2,10)) data = json.loads(response.data)['data'] prev_date = convert_to_datetime(data['episodes'][0]['pub_date']) self.assertTrue(len(data['episodes']) <= 10 and len(data['episodes']) > 0) for episode in data['episodes']: self.assertTrue(episode['unread_notifcation']) self.assertTrue(convert_to_datetime(episode['pub_date']) <= prev_date) prev_date = convert_to_datetime(episode['pub_date'])
def search(): q = request.args.get('q', '') page = request.args.get('page', '0') works = [] if q: base_url = "http://bibliographica.org/search.json?q=%s&page=%s" target_url = base_url % (q, page) data = urllib2.urlopen(target_url).read() solrdata = json.loads(data) response = solrdata['response'] data = response["docs"] works = [] for item in data: uri = item["uri"].replace("<", "").replace(">", "") d = urllib2.urlopen(uri + ".json").read(); item["work"] = json.loads(d) out = Bibliographica(item).data work = pdcalc.work.Work(out) work.uri = uri try: result = pdcalc.get_pd_status(work) work.pd_status = {'error': '', 'results': result} except Exception, inst: if app.debug: raise work.pd_status = { 'error': 'Failed to calculate status: %s' % inst, 'results': [] } works.append(work) count = response['numFound']
def test_jsonp(self): with self.app.test_request_context('/?callback=callback'): kwargs = {'lang': 'en-us', 'query': 'python'} r = jsonp(**kwargs) response = ( u'callback({\n "%s": "%s",\n "%s": "%s"\n});' % ( 'lang', kwargs['lang'], 'query', kwargs['query']) ).encode('utf-8') self.assertEqual(response, r.get_data()) with self.app.test_request_context('/'): param1, param2 = 1, 2 r = jsonp(param1=param1, param2=param2) resp = json.loads(r.response[0]) self.assertEqual(resp['param1'], param1) self.assertEqual(resp['param2'], param2) r = jsonp({'param1': param1, 'param2': param2}) resp = json.loads(r.response[0]) self.assertEqual(resp['param1'], param1) self.assertEqual(resp['param2'], param2) r = jsonp([('param1', param1), ('param2', param2)]) resp = json.loads(r.response[0]) self.assertEqual(resp['param1'], param1) self.assertEqual(resp['param2'], param2)
def test_app_delete(self): rv = self.create_app() data = json.loads(rv.data) rv = self.app.delete(data['url']) json.loads(rv.data) rv = self.app.get(data['url']) assert rv.status_code == 404
def do_learning(): if request.method == 'POST': data = request.get_json(force=True) project_folder = data['project_folder'] directory = os.path.join(app.static_folder, 'project_folders', project_folder) markup_file = os.path.join(directory, 'learning', 'markup.json') with codecs.open(markup_file, "r", "utf-8") as myfile: json_str = myfile.read().encode('utf-8') markup = json.loads(json_str) pageManager = PageManager() for key in markup['__URLS__']: page_file = os.path.join(directory, key) with codecs.open(page_file, "r", "utf-8") as myfile: page_str = myfile.read().encode('utf-8') pageManager.addPage(key, page_str) markup.pop("__SCHEMA__", None) markup.pop("__URLS__", None) pageManager.learnStripes(markup) rule_set = pageManager.learnRulesFromMarkup(markup) rules_file = os.path.join(directory, 'learning', 'rules.json') with codecs.open(rules_file, "w", "utf-8") as myfile: myfile.write(json.dumps(json.loads(rule_set.toJson()), sort_keys=True, indent=2, separators=(',', ': '))) myfile.close() return jsonify(rules = json.loads(rule_set.toJson()) ) abort(404)
def reconcile(): # If a single 'query' is provided do a straightforward search. query = request.form.get('query') if query: # If the 'query' param starts with a "{" then it is a JSON object # with the search string as the 'query' member. Otherwise, # the 'query' param is the search string itself. if query.startswith("{"): query = json.loads(query)['query'] results = search(query) return jsonpify({"result": results}) # If a 'queries' parameter is supplied then it is a dictionary # of (key, query) pairs representing a batch of queries. We # should return a dictionary of (key, results) pairs. queries = request.form.get('queries') if queries: queries = json.loads(queries) results = {} for (key, query) in queries.items(): results[key] = {"result": search(query['query'])} return jsonpify(results) # If neither a 'query' nor 'queries' parameter is supplied then # we should return the service metadata. return jsonpify(metadata)
def get_song_ids_for_years(year_start, year_end): r = get_redis() key = "cache:years:%s:%s" % (year_start, year_end) if r.exists(key): return json.loads(r.get(key)) params = [ ('api_key', 'IILIWPF9XK31O9BLS'), ('bucket', 'songs'), ('artist_start_year_before', year_start), ('artist_end_year_after', year_end), ('sort', 'familiarity-desc') ] _ = "http://developer.echonest.com/api/v4/artist/search?%s" % urllib.urlencode(params) print _ fp = urllib2.urlopen(_) data = fp.read() fp.close() resp = json.loads(data) song_ids = [] for artist in resp['response']['artists']: if artist['songs']: song_ids.extend(map(lambda x: x['id'], artist['songs'][0:1])) r.set(key, json.dumps(song_ids)) return song_ids
def project_folder(): if request.method == 'POST': data = request.get_json(force=True) project_folder = data['project_folder'] directory = os.path.join(app.static_folder, 'project_folders', project_folder) markup = {} if(os.path.exists(directory)): pass else: blank = os.path.join(app.static_folder, 'project_folders', '_blank') shutil.copytree(blank, directory) markup_file = os.path.join(directory, 'learning', 'markup.json') with codecs.open(markup_file, "r", "utf-8") as myfile: json_str = myfile.read().encode('utf-8') markup = json.loads(json_str) rules_file = os.path.join(directory, 'learning', 'rules.json') with codecs.open(rules_file, "r", "utf-8") as myfile: json_str = myfile.read().encode('utf-8') rules = json.loads(json_str) return jsonify(project_folder = project_folder, markup = markup, rules = rules) abort(404)
def test_get_audit_event_for_missing_object_returns_404(self): self.add_audit_events_with_db_object() response = self.client.get('/audit-events?object-type=suppliers&object-id=100000') json.loads(response.get_data()) assert_equal(response.status_code, 404)
def get_aggregates(): value_fields_json = request.args.get('VALUE_FIELDS', '') if value_fields_json: value_fields = json.loads(value_fields_json) else: value_fields= [] grouping_fields_json = request.args.get('GROUPING_FIELDS', '') if grouping_fields_json: grouping_fields = json.loads(grouping_fields_json) else: grouping_fields = [] filters_json = request.args.get('FILTERS','[]') if filters_json: filters = json.loads(filters_json) else: filters = [] with_unfiltered = request.args.get('WITH_UNFILTERED','FALSE') if with_unfiltered == 'TRUE': with_unfiltered = True else: with_unfiltered = False base_filters_json = request.args.get('BASE_FILTERS','[]') if base_filters_json: base_filters = json.loads(base_filters_json) else: base_filters = [] aggregates = results_services.get_aggregates( value_fields=value_fields, grouping_fields=grouping_fields, filters=filters, with_unfiltered=with_unfiltered, base_filters=base_filters ) return Response(json.dumps(aggregates, indent=2), mimetype='application/json')
def test_is_following(self): following = User.query \ .filter(User.google_id == constants.TEST_USER_GOOGLE_ID1).first() followed_id = User.query \ .filter(User.google_id == constants.TEST_USER_GOOGLE_ID2).first().id followed = users_dao.get_user_by_id(following.id, followed_id) self.assertFalse(followed.is_following) response = self.user1.get('api/v1/users/{}/'.format(followed_id)) data = json.loads(response.data) self.assertFalse(data['data']['user']['is_following']) self.user1.post('api/v1/followings/{}/'.format(followed_id)) followed = users_dao.get_user_by_id(following.id, followed_id) self.assertTrue(followed.is_following) response = self.user1.get('api/v1/followings/show/{}/'.format(following.id)) data = json.loads(response.data) self.assertTrue(data['data']['followings'][0]['followed']['is_following']) response = self.user1.get('api/v1/users/{}/'.format(followed_id)) data = json.loads(response.data) self.assertTrue(data['data']['user']['is_following']) self.user1.delete('api/v1/followings/{}/'.format(followed_id)) followed = users_dao.get_user_by_id(following.id, followed_id) self.assertFalse(followed.is_following)
def instrument_command(device_type, instrument_device_id, agent_command, cap_type=None, session_type=None): cap_type = request.args.get('cap_type') if request.method in ('POST', 'PUT'): if agent_command == 'set_agent': resource_params = json.loads(request.data) command_response = ServiceApi.set_agent(instrument_device_id, resource_params) elif agent_command == 'set_resource': resource_params = json.loads(request.data) command_response = ServiceApi.set_resource(instrument_device_id, resource_params) elif agent_command == 'start': command_response = ServiceApi.instrument_agent_start(instrument_device_id) elif agent_command == 'stop': command_response = ServiceApi.instrument_agent_stop(instrument_device_id) else: if agent_command == 'RESOURCE_AGENT_EVENT_GO_DIRECT_ACCESS': session_type = request.args.get('session_type') command_response = ServiceApi.instrument_execute(instrument_device_id, agent_command, cap_type, session_type) else: if agent_command == 'get_capabilities': command_response = ServiceApi.instrument_agent_get_capabilities(instrument_device_id) elif agent_command == 'get_resource': command_response = ServiceApi.get_resource(instrument_device_id) elif agent_command == 'get_platform_agent_state': command_response = ServiceApi.platform_agent_state(instrument_device_id, 'get_agent_state') return render_json_response(command_response)
def test_update_message(self): """ Ensure we can update a single field for an existing message """ create_user_response = self.create_user(self.test_user_name, self.test_user_password) self.assertEqual(create_user_response.status_code, status.HTTP_201_CREATED) new_message_message_1 = 'Welcome to the IoT world' new_message_category_1 = 'Information' post_response = self.create_message(new_message_message_1, 30, new_message_category_1) self.assertEqual(post_response.status_code, status.HTTP_201_CREATED) self.assertEqual(Message.query.count(), 1) post_response_data = json.loads(post_response.get_data(as_text=True)) new_message_url = post_response_data['url'] new_printed_times = 1 new_printed_once = True data = {'printed_times': new_printed_times, 'printed_once': new_printed_once} patch_response = self.test_client.patch( new_message_url, headers=self.get_authentication_headers(self.test_user_name, self.test_user_password), data=json.dumps(data)) self.assertEqual(patch_response.status_code, status.HTTP_200_OK) get_response = self.test_client.get( new_message_url, headers=self.get_authentication_headers(self.test_user_name, self.test_user_password)) get_response_data = json.loads(get_response.get_data(as_text=True)) self.assertEqual(get_response.status_code, status.HTTP_200_OK) self.assertEqual(get_response_data['printed_times'], new_printed_times) self.assertEqual(get_response_data['printed_once'], new_printed_once)
def test_can_authenticate_user(self): user_data = { 'email': '*****@*****.**', 'password': '******' } salt = os.urandom(50) pass_hash = api.hash_password( api.string_to_bytes(user_data['password']), salt) new_user = domain.User(user_data['email'], pass_hash, salt) db.session.add(new_user) db.session.commit() with self.app.test_client() as client: # right password gives true result = client.post( '/user/authenticate', data=json.dumps(user_data), headers={'content-type': 'application/json'}) self.assert200(result) data = json.loads(result.data) self.assertTrue(data["authenticated"]) # wrong password gives false user_data["password"] = "******" result = client.post( '/user/authenticate', data=json.dumps(user_data), headers={'content-type': 'application/json'}) self.assert200(result) data = json.loads(result.data) self.assertFalse(data["authenticated"])
def browse_orgs(): data = controllers.getAllOrgNamesJSON(db) memberData = controllers.getMemberDataJSON(db, current_user.get_id()) if request.method == 'GET' and is_request_json(): return Response(response=data, status=200, mimetype='application/json') else: return render_template('browse_orgs.html', data=json.loads(data), memberData=json.loads(memberData))
def test_facebook_login(self): a_access_token = api_utils.get_facebook_app_access_token() u_info = api_utils.create_facebook_user(a_access_token, 'User One') payload = { 'access_token': u_info[0] } response = self.app.post('api/v1/users/facebook_sign_in/', \ data=json.dumps(payload)) response_data = json.loads(response.data)['data'] self.assertTrue(response_data['is_new_user']) self.assertTrue(response_data['user']['facebook_id'] != "null") self.assertEquals(constants.NUM_TEST_USERS + 1, \ users_dao.get_number_users()) # Bad login bad_token = 'bad token' payload = { 'access_token': bad_token } response = self.app.post('api/v1/users/facebook_sign_in/',\ data=json.dumps(payload)) response = json.loads(response.data) self.assertFalse(response['success']) self.assertEquals(constants.NUM_TEST_USERS + 1, \ users_dao.get_number_users())
def tag_list(): site_json = konata.read_site(db_name) list_json = konata.tags_list(db_name) site_dict = json.loads(site_json) list_dict = json.loads(list_json) app.logger.debug(list_dict) return render_template('tag_list.html.ja', list=list_dict, site=site_dict)
def test_upload_errors(self): """ Check the errors while uploading files. """ response = requests.get(__db_url__) value = response.json() if value: db_length = len(value['databases']) last_db_id = value['databases'][db_length-1]['id'] res = requests.put(__db_url__ + str(last_db_id) + '/deployment/', files={'file': open('test-files/images.png', 'rb')}) assert res.status_code == 200 result = json.loads(res.content) self.assertEqual(result['status'],'failure') self.assertEqual(result['error'],'Invalid file type.') res = requests.put(__db_url__ + str(last_db_id) + '/deployment/', files={'file': open('test-files/Invalid.xml', 'rb')}) assert res.status_code == 200 result = json.loads(res.content) self.assertEqual(result['status'],'failure') self.assertEqual(result['error'],'Invalid file content.') res = requests.put(__db_url__ + str(last_db_id) + '/deployment/', files={'file': open('test-files/Sample.xml', 'rb')}) assert res.status_code == 200 result = json.loads(res.content) self.assertEqual(result['status'],'failure') self.assertEqual(result['error'],'Invalid file content.') res = requests.put(__db_url__ + str(last_db_id) + '/deployment/', files={'file': open('test-files/deployment.xml', 'rb')}) assert res.status_code == 200 result = json.loads(res.content) self.assertEqual(result['status'],'success') else: print "The database list is empty"
def verify_pkcs7(pkcs7_sig, document): try: # raw_sig = pkcs7_sig raw_sig = str(pkcs7_sig).encode('ascii') msg = document sm_obj = SMIME.SMIME() x509 = X509.load_cert(os.path.join(config.APP_STATIC, 'AWSpubkey')) # public key cert used by the remote # client when signing the message sk = X509.X509_Stack() sk.push(x509) sm_obj.set_x509_stack(sk) st = X509.X509_Store() st.load_info(os.path.join(config.APP_STATIC, 'AWSpubkey')) # Public cert for the CA which signed # the above certificate sm_obj.set_x509_store(st) # re-wrap signature so that it fits base64 standards cooked_sig = '\n'.join(raw_sig[pos:pos + 76] for pos in xrange(0, len(raw_sig), 76)) # cooked_sig = raw_sig # now, wrap the signature in a PKCS7 block sig = ("-----BEGIN PKCS7-----\n" + cooked_sig + "\n-----END PKCS7-----").encode('ascii') # and load it into an SMIME p7 object through the BIO I/O buffer: buf = BIO.MemoryBuffer(sig) p7 = SMIME.load_pkcs7_bio(buf) # finally, try to verify it: if dict(json.loads(sm_obj.verify(p7))) == dict(json.loads(msg)): return True else: return False except Exception as e: raise Exception("INVALID CLIENT MESSAGE SIGNATURE")
def entry(client): """API exposes entries""" response = client.get("/api/1/entry/?id=donri") assert response.status_code == 200 assert response.content_type == "application/json" assert var(json.loads(response.data.obj)) == {"id": "donri", "type": "gismu", "affixes": ["dor", "do'i"]} response = client.get("/api/1/entry/?id=donri&locale=en") assert response.status_code == 200 assert var(json.loads(response.data.obj)) == { "id": "donri", "type": "gismu", "affixes": ["dor", "do'i"], "definition": "x1 is the daytime...", "notes": "See also {nicte}...", } response = client.get("/api/1/entry/") assert response.status_code == 400 assert response.content_type == "application/json" assert "error" in var(json.loads(response.data.obj)) response = client.get("/api/1/entry/?id=undef") assert response.status_code == 404 assert response.content_type == "application/json" assert "error" in var(json.loads(response.data.obj)) response = client.get("/api/1/entry/?id=donri&locale=zzz") assert response.status_code == 400 assert response.content_type == "application/json" assert "error" in var(json.loads(response.data.obj))
def on_update(self, updates, original): if 'content' in updates: # works around Eve behaviour which creates sub-dict on each "." it finds in keys # cf. SDESK-3083 try: updates['content'] = json.loads(request.data.decode('utf-8'))['content'] except (KeyError, JSONDecodeError, RuntimeError): # request.data is not set during tests, so we ignore those errors pass # parse json list if updates.get('content_list'): updates['content'] = json.loads(updates.pop('content_list')) if 'type' not in original: self.__set_default(updates) self._validate_dictionary(updates, original) # handle manual changes if original.get('type', DictionaryType.DICTIONARY.value) == DictionaryType.DICTIONARY.value: nwords = fetch_dict(original).copy() for word, val in updates.get('content', {}).items(): if val: add_words(nwords, word, val) else: nwords.pop(word, None) updates['content'] = nwords # handle uploaded file if updates.get(DICTIONARY_FILE): file_words = read_from_file(updates) merge(updates, file_words) store_dict(updates, original)
def test_view_topic_likes(self): topic = Topic(title=u'hello', content=u'', user_id=1) db.session.add(topic) db.session.commit() for i in range(10, 100): db.session.add(TopicLike(user_id=i, topic_id=topic.id)) name = 'foo-%d' % i user = User(id=i, username=name, email='*****@*****.**' % name) db.session.add(user) db.session.commit() url = '/api/topics/%d/likes' % topic.id rv = self.client.get(url) data = json.loads(rv.data) assert data['pagination']['total'] == 90 db.session.add(TopicLike(user_id=1, topic_id=topic.id)) db.session.commit() headers = self.get_authorized_header(user_id=2) rv = self.client.get(url, headers=headers) data = json.loads(rv.data) assert data['data'][0]['id'] != 1 headers = self.get_authorized_header(user_id=1) rv = self.client.get(url, headers=headers) data = json.loads(rv.data) assert data['data'][0]['id'] == 1 headers = self.get_authorized_header(user_id=12) rv = self.client.get(url, headers=headers) data = json.loads(rv.data) assert data['data'][0]['id'] == 12
def test_start_record_match(self): response = self.app.post('/matches/record_start', data=json.dumps({ 'mode': 'annihilation', 'map': 'hlw_woodland', 'type': '2vs2', 'start_date': datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), 'players': {"9": {"name": "CPU #4", "color": "#ff0000"}, "4": {"name": "Sandern", "color": "#c87814", "steamid": "STEAM_0:0:3833324"}}, }), content_type='application/json', environ_base={'REMOTE_ADDR': '127.0.0.1'} ) self.assertEqual(response.status_code, 200) data = json.loads(response.data) self.assertIn('success', data) self.assertEqual(data['success'], True, msg=data) # Newest entry in list response = self.app.post('/matches/list', data=json.dumps({})) self.assertEqual(response.status_code, 200) data = json.loads(response.data) self.assertIn('matches', data) self.assertIn('page', data) self.assertIn('per_page', data) self.assertIn('total', data) self.assertEqual(data['page'], 1) match_data = data['matches'][0] self.assertEqual(match_data['map'], 'hlw_woodland') self.assertEqual(match_data['mode'], 'annihilation') self.assertEqual(match_data['type'], '2vs2')
def test(config): """Test the given Flask configuration. If configured correctly, an error will be tracked by Exceptional for your app. Unlike the initialized extension, this test will post data to Exceptional, regardless of the configured ``DEBUG`` setting. :param config: The Flask application configuration object to test. Accepts either :class:`flask.Config` or the object types allowed by :meth:`flask.Config.from_object`. """ context = getattr(stack.top, "exceptional_context", None) app = Flask(__name__) exceptional = Exceptional() if isinstance(config, Config): app.config = config else: app.config.from_object(config) assert "EXCEPTIONAL_API_KEY" in app.config app.debug = False app.testing = False exceptional.init_app(app) app.testing = True @app.route("/exception") def exception(): setattr(stack.top, "exceptional_context", context) message = "Congratulations! Your application is configured for Exceptional error tracking." # NOQA raise Exception(message) with app.test_client() as client: client.get("/exception") json.loads(g.exceptional)
def spu_api(id_=None): if request.method == 'GET': spu = get_or_404(SPU, id_) elif request.method == 'POST': name = json.loads(request.data)['name'] spu = wraps(do_commit(SPU(name=name))) else: d = json.loads(request.data) name = d.get('name') id_ = d['id'] published = d.get('published') spu = get_or_404(SPU, id_) if name: spu.name = name if published is not None: if published: spu.publish() else: spu.unpublish() db.session.commit() return jsonify({ 'id': spu.id, 'name': spu.name, 'ocspu-id-list': [ocspu.id for ocspu in spu.ocspu_list], 'published': spu.published, })
def main(): if not set(['timestep','start','end']).issubset(set(g.study_settings.keys())): flash('Please set your time steps.', category='danger') return redirect(url_for('project_settings.main')) # get scenarios scenarios = {sc.id: sc.name for sc in g.network.scenarios} # default setup setup_id = None filters = {} config = {} setups = get_input_setups(session['study_id']) if 'input_id' in session and session['input_id'] is not None: setup_id = session['input_id'] pivot = setups[setup_id] if pivot: filters = json.loads(pivot.filters) config = json.loads(pivot.config) pivot_params = {'setup_id': setup_id, 'setups': setups, 'filters': filters, 'config': config} ttypes = get_used_ttypes(g.conn) return render_template('data-editor-advanced.html', scenarios=scenarios, ttypes=ttypes, pivot_params=pivot_params)
def createEvent(): if request.method == "POST": users = json.loads(request.form["users"]) event = {"eventtype" : request.form["eventtype"], "name" : request.form["name"], "duration" : request.form["duration"], "users" : [], "owner" : session["email"], "comments" : [], "finished" : False} for x in users: u = db.users.find_one({"email" : x}) if u != None: event["users"].append(u["_id"]) else: uId = db.users.insert({"email" : x}) event["users"].append(uId) datesRaw = json.loads(request.form["dates"]) dates = [] for x in datesRaw: dates.append({"date" : datetime.fromtimestamp(x), "users" : []}) event["dates"] = dates eId = db.events.insert(event) for x in users: mailer.send_mail(x, "You've been invited to an event", "You have been invitet to: " + event["name"] + "\nURL: " + url_for("viewEvent", eId=str(eId), _external=True)) return "OK" else: return render_template("event_create.html", registeredUsers=db.users.distinct("email"))
def get_repositories(): """ Get the current data used for processing requests from the flask request context and format it to display basic information about image ids and tags associated with each repository. Value corresponding to each key(repo-registry-id) is a dictionary itself with the following format: {'image-ids': [<image-id1>, <image-id2>, ...], 'tags': {<tag-id1>: <tag1>, <tag-id2>: <tag2>, ...} 'protected': true/false} :return: dictionary keyed by repo-registry-ids :rtype: dict """ all_repo_data = get_data().get('repos', {}) relevant_repo_data = {} for repo_registry_id, repo in all_repo_data.items(): image_ids = [image_json['id'] for image_json in json.loads(repo.images_json)] relevant_repo_data[repo_registry_id] = {'image_ids': image_ids, 'tags': json.loads(repo.tags_json), 'protected': repo.protected} return relevant_repo_data
def post_job(self, job_type, object_id): response = self.client.post('/job/%s/%s' % (job_type, object_id)) return json.loads(response.get_data(as_text=True))
def get_status(self, job_id): response = self.client.get('/job/%s' % job_id) data = json.loads(response.get_data(as_text=True)) return data['status']
def test_api_can_post_a_question(self): res = self.tester.post("/api/v1/auth/login", data=self.new_user) reply = json.loads(res.data.decode()) token = reply["access_token"] headers = {'Authorization': 'Bearer ' + token} res1 = self.tester.post("/api/v1/questions", headers=headers, content_type='application/json', data=json.dumps(self.qst1)) self.assertEqual(res1.status_code, 201) resz1 = self.tester.post("/api/v1/questions", headers=headers, content_type='application/json', data=json.dumps(self.qst3)) self.assertEqual(resz1.status_code, 201) # send question that has no title ress1 = self.tester.post("/api/v1/questions", headers=headers, content_type='application/json', data=json.dumps(self.qst2)) self.assertEqual(ress1.status_code, 400) # test sending the same question that exist res2 = self.tester.post("/api/v1/questions", headers=headers, content_type='application/json', data=json.dumps(self.qst1)) self.assertEqual(res2.status_code, 409) # test get all questions available res3 = self.tester.get("/api/v1/questions", headers=headers, content_type='application/json') self.assertEqual(res3.status_code, 200) # test delete id that does not exist res4 = self.tester.delete("/api/v1/questions/8", headers=headers, content_type='application/json') self.assertEqual(res4.status_code, 404) ress4 = self.tester.delete("/api/v1/questions/2", headers=headers, content_type='application/json') self.assertEqual(ress4.status_code, 200) # test post an answer to a non existing question id res5 = self.tester.post("/api/v1/questions/8/answers", headers=headers, content_type='application/json', data=json.dumps(self.answer1)) self.assertEqual(res5.status_code, 404) # send a valid answer to a valid id res6 = self.tester.post("/api/v1/questions/1/answers", headers=headers, content_type='application/json', data=json.dumps(self.answer1)) self.assertEqual(res6.status_code, 201) # send empty dictionary with key no value res7 = self.tester.post("/api/v1/questions/1/answers", headers=headers, content_type='application/json', data=json.dumps(self.answer2)) self.assertEqual(res7.status_code, 400) # send similar answer that alread exist res8 = self.tester.post("/api/v1/questions/1/answers", headers=headers, content_type='application/json', data=json.dumps(self.answer1)) self.assertEqual(res8.status_code, 409) # test fetch a question and all available answers by id ress3 = self.tester.get("/api/v1/questions/1", headers=headers, content_type='application/json') self.assertEqual(ress3.status_code, 200) # test fetch all questions posted by the current user res9 = self.tester.get("/api/v1/questions/userquestions", headers=headers, content_type='application/json') self.assertEqual(res9.status_code, 200) resi9 = self.tester.get("/api/v1/questions/useranswers", headers=headers, content_type='application/json') self.assertEqual(resi9.status_code, 200) # test if owner of the question can mark answer preffered # owner of answer can update answer res10 = self.tester.put("/api/v1/questions/4/answers/4", headers=headers, content_type='application/json') self.assertEqual(res10.status_code, 403) ress10 = self.tester.put("/api/v1/questions/1/answers/4", headers=headers, content_type='application/json') self.assertEqual(ress10.status_code, 400) resss10 = self.tester.put("/api/v1/questions/1/answers/4", headers=headers, content_type='application/json', data=json.dumps(self.modify_answer1)) self.assertEqual(resss10.status_code, 200)
def get(self, *args): """GET 메소드: 특정 조건에 해당하는 상품 리스트를 조회한다. Args: 'lookup_start_date' : 조회 시작 기간 'lookup_end_date' : 조회 종료 기간 'seller_name' : 셀러명 'product_name' : 상품명 'product_id' : 상품 아이디 'product_code' : 상품 코드 'seller_attribute_type_id : 셀러 속성 'is_sale' : 할인 여부 'is_display' : 진열 여부 'is_discount' : 할인 여부 'page_number' : 페이지 번호 'limit' : 한 화면에 보여줄 상품의 갯수 Author: 심원두 Returns: return {"message": "success", "result": result} Raises: 400, {'message': 'key error', 'errorMessage': 'key_error' + format(e)} : 잘못 입력된 키값 400, {'message': 'both date field required', 'errorMessage': 'both_date_field_required'}: 필수 값 유효성 체크 에러 400, {'message': 'start date is greater than end date', 'errorMessage': 'start_date_is_greater_than_end_date'}: 날짜 비교 유효성 체크 에러 400, {'message': 'invalid seller attribute type', 'errorMessage': 'invalid_seller_attribute_type'}: 셀러 타입 유효성 체크 에러 History: 2020-12-31(심원두): 초기생성 2021-01-03(심원두): 상품 리스트 검색 기능 구현, Login Decorator 구현 예정 """ try: search_condition = { 'seller_id': g.account_id if g.permission_type_id == 2 else None, 'lookup_start_date': request.args.get('lookup_start_date', None), 'lookup_end_date': request.args.get('lookup_end_date', None), 'seller_name': request.args.get('seller_name', None), 'product_name': request.args.get('product_name', None), 'product_id': request.args.get('product_id', None), 'product_code': request.args.get('product_code', None), 'seller_attribute_type_ids': json.loads(request.args.get('seller_attribute_type_id')) if request.args.get('seller_attribute_type_id') else None, 'is_sale': request.args.get('is_sale', None), 'is_display': request.args.get('is_display', None), 'is_discount': request.args.get('is_discount', None), 'page_number': request.args.get('page_number'), 'limit': request.args.get('limit') } connection = get_connection(self.database) result = self.service.search_product_service( connection, search_condition) return jsonify({'message': 'success', 'result': result}) except KeyError as e: traceback.print_exc() raise e except Exception as e: traceback.print_exc() raise e finally: try: if connection: connection.close() except Exception: raise DatabaseCloseFail('database close fail')
def post(self, *args): """ POST 메소드: 상품 정보 등록 Args: - 사용자 입력 값(상품 이미지 최대 5개) : image_files - 사용자 입력 값(옵션 정보 리스트) : options - 사용자 입력 값 Form-Data: ( 'seller_id' 'account_id', 'is_sale', 'is_display', 'main_category_id', 'sub_category_id', 'is_product_notice', 'manufacturer', 'manufacturing_date', 'product_origin_type_id', 'product_name', 'description', 'detail_information', 'options', 'minimum_quantity', 'maximum_quantity', 'origin_price', 'discount_rate', 'discounted_price', 'discount_start_date', 'discount_end_date', ) Author: 심원두 Returns: 200, {'message': 'success'} : 상품 정보 등록 성공 Raises: 400, {'message': 'key_error', 'errorMessage': 'key_error_' + format(e)} : 잘못 입력된 키값 400, {'message': 'required field is blank', 'errorMessage': 'required_manufacture_information'} : 제조 정보 필드 없음 400, {'message': 'required field is blank', 'errorMessage': 'required_discount_start_or_end_date'} : 필수 입력 항목 없음 400, {'message': 'compare quantity field check error', 'errorMessage': 'minimum_quantity_cannot_greater_than_maximum_quantity'}: 최소 구매 수량이 최대 보다 큼 400, {'message': 'compare price field check error', 'errorMessage': 'discounted_price_cannot_greater_than_origin_price'} : 할인가가 판매가 보다 큼 400, {'message': 'compare price field check error', 'errorMessage': 'wrong_discounted_price'} : 판매가와 할인가 일치하지 않음 400, {'message': 'compare price field check error', 'errorMessage': 'required_discount_start_or_end_date'} : 할인 시작, 종료 일자 필드 없음 400, {'message': 'start date is greater than end date', 'errorMessage': 'start_date_cannot_greater_than_end_date'} : 할인 시작일이 종료일 보다 큼 400, {'message': 'compare price field check error', 'errorMessage': 'discounted_price_have_to_same_with_origin_price'} : 할인가, 판매가 불일치(할인율 0) 413, {'message': 'invalid file', 'errorMessage': 'invalid_file'} : 파일 이름이 공백, 혹은 파일을 정상적으로 받지 못함 413, {'message': 'file size too large', 'errorMessage': 'file_size_too_large'} : 파일 사이즈 정책 위반 (4메가 이상인 경우) 413, {'message': 'file scale too small, 640 * 720 at least', 'errorMessage': 'file_scale_at_least_640*720'} : 파일 스케일 정책 위반 (680*720 미만인 경우) 413, {'message': 'only allowed jpg type', 'errorMessage': 'only_allowed_jpg_type'} : 파일 확장자 정책 위반 (JPG, JPEG 아닌 경우) 500, {'message': 'image_file_upload_to_amazon_fail', 'errorMessage': 'image_file_upload_fail'} : 이미지 업로드 실패 500, {'message': 'product create denied', 'errorMessage': 'unable_to_create_product'} : 상품 정보 등록 실패 500, {'message': 'product code update denied', 'errorMessage': 'unable_to_update_product_code'} : 상품 코드 갱신 실패 500, {'message': 'product code update denied', 'errorMessage': 'unable_to_update_product_code'} : 상품 코드 갱신 실패 500, {'message': 'product image create denied', 'errorMessage': 'unable_to_create_product_image'} : 상품 이미지 등록 실패 500, {'message': 'stock create denied', 'errorMessage': 'unable_to_create_stocks'} : 상품 옵션 정보 등록 실패 500, {'message': 'product history create denied', 'errorMessage': 'unable_to_create_product_history'} : 상품 이력 등록 실패 500, {'message': 'bookmark volumes create denied', 'errorMessage': 'unable_to_create_bookmark_volumes'} : 북마크 초기 등록 실패 500, {'message': 'database_connection_fail', 'errorMessage': 'database_close_fail'} : 커넥션 종료 실패 500, {'message': 'database_error', 'errorMessage': 'database_error_' + format(e)} : 데이터베이스 에러 500, {'message': 'internal_server_error', 'errorMessage': format(e)}) : 서버 에러 History: 2020-12-29(심원두): 초기 생성 2021-01-03(심원두): 파라미터 유효성 검사 추가 Enum(), NotEmpty() 2021-01-05(심원두): -이미지 저장 처리 순서를 3번째에서 가장 마지막으로 내림. 테이블 인서트 처리에 문제가 있을 경우, S3에 올라간 이미지는 롤백을 할 수 없는 이슈 반영. -북마크 테이블 초기 등록 처리 추가. """ try: data = { 'seller_id': request.form.get('seller_id'), 'account_id': g.account_id, 'is_sale': request.form.get('is_sale'), 'is_display': request.form.get('is_display'), 'main_category_id': request.form.get('main_category_id'), 'sub_category_id': request.form.get('sub_category_id'), 'is_product_notice': request.form.get('is_product_notice'), 'manufacturer': request.form.get('manufacturer'), 'manufacturing_date': request.form.get('manufacturing_date'), 'product_origin_type_id': request.form.get('product_origin_type_id'), 'product_name': request.form.get('product_name'), 'description': request.form.get('description'), 'detail_information': request.form.get('detail_information'), 'minimum_quantity': request.form.get('minimum_quantity'), 'maximum_quantity': request.form.get('maximum_quantity'), 'origin_price': request.form.get('origin_price'), 'discount_rate': request.form.get('discount_rate'), 'discounted_price': request.form.get('discounted_price'), 'discount_start_date': request.form.get('discount_start_date'), 'discount_end_date': request.form.get('discount_end_date') } product_images = request.files.getlist("image_files") stocks = json.loads(request.form.get('options')) connection = get_connection(self.database) product_id = self.service.create_product_service(connection, data) product_code = self.service.update_product_code_service( connection, product_id) self.service.create_stock_service(connection, product_id, stocks) self.service.create_product_history_service( connection, product_id, data) self.service.create_product_sales_volumes_service( connection, product_id) self.service.create_bookmark_volumes_service( connection, product_id) self.service.create_product_images_service(connection, data['seller_id'], product_id, product_code, product_images) connection.commit() return jsonify({'message': 'success'}), 200 except KeyError as e: traceback.print_exc() connection.rollback() raise e except Exception as e: traceback.print_exc() connection.rollback() raise e finally: try: if connection: connection.close() except Exception: traceback.print_exc() raise DatabaseCloseFail('database close fail')
def test_employee_history(self): # Create and edit a bunch of stuff, followed by a terminate # Arrange self.load_sample_structures() userid = "53181ed2-f1de-4c4a-a8fd-ab358c2c454a" # Act self.assertRequest('/service/details/edit', json=[ { "type": "engagement", "uuid": 'd000591f-8705-4324-897a-075e3623f37b', "data": { "person": { "uuid": userid }, "validity": { "from": "2018-04-01", } }, }, { "type": "association", "uuid": 'c2153d5d-4a2b-492d-a18c-c498f7bb6221', "data": { "person": { "uuid": userid }, "validity": { "from": "2018-04-01", } }, }, { "type": "role", "uuid": '1b20d0b9-96a0-42a6-b196-293bb86e62e8', "data": { "person": { "uuid": userid }, "validity": { "from": "2018-04-01", } }, }, { "type": "leave", "uuid": 'b807628c-030c-4f5f-a438-de41c1f26ba5', "data": { "person": { "uuid": userid }, "validity": { "from": "2018-04-01", } }, }, { "type": "manager", "uuid": '05609702-977f-4869-9fb4-50ad74c6999a', "data": { "person": { "uuid": userid }, "validity": { "from": "2018-04-01", } }, }, ]) self.assertRequest('/service/details/create', json=[ { "type": "engagement", "person": { "uuid": userid }, "org_unit": { 'uuid': "9d07123e-47ac-4a9a-88c8-da82e3a4bc9e" }, "job_function": { 'uuid': "3ef81e52-0deb-487d-9d0e-a69bbe0277d8" }, "engagement_type": { 'uuid': "62ec821f-4179-4758-bfdf-134529d186e9" }, "validity": { "from": "2017-12-01", "to": "2017-12-01", } }, { "type": "association", "person": { "uuid": userid }, "org_unit": { 'uuid': "04c78fc2-72d2-4d02-b55f-807af19eac48" }, "job_function": { 'uuid': "3ef81e52-0deb-487d-9d0e-a69bbe0277d8" }, "association_type": { 'uuid': "62ec821f-4179-4758-bfdf-134529d186e9" }, "validity": { "from": "2017-12-01", "to": "2017-12-01", }, }, { "type": "role", "person": { "uuid": userid }, "org_unit": { 'uuid': "9d07123e-47ac-4a9a-88c8-da82e3a4bc9e" }, "role_type": { 'uuid': "62ec821f-4179-4758-bfdf-134529d186e9" }, "validity": { "from": "2017-12-01", "to": "2017-12-01", }, }, { "type": "leave", "person": { "uuid": userid }, "leave_type": { 'uuid': "62ec821f-4179-4758-bfdf-134529d186e9" }, "validity": { "from": "2017-12-01", "to": "2017-12-01", }, }, { "type": "manager", "person": { "uuid": userid }, "org_unit": { 'uuid': "9d07123e-47ac-4a9a-88c8-da82e3a4bc9e" }, "responsibility": [{ 'uuid': "3ef81e52-0deb-487d-9d0e-a69bbe0277d8" }], "manager_type": { 'uuid': "62ec821f-4179-4758-bfdf-134529d186e9" }, "manager_level": { "uuid": "1edc778c-bf9b-4e7e-b287-9adecd6ee293" }, "validity": { "from": "2017-12-01", "to": "2017-12-01", }, }, ]) self.assertRequestResponse('/service/e/{}/terminate'.format(userid), userid, json={"validity": { "to": "2017-12-01" }}) expected_result = [{ 'action': 'Afsluttet', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Opret leder', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Opret orlov', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Opret rolle', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Opret tilknytning', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Opret engagement', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger leder', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger orlov', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger rolle', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger tilknytning', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger engagement', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': None, 'life_cycle_code': 'Importeret', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }] # Assert r = self.request('/service/e/{}/history/'.format(userid), ) self.assert200(r) actual_result = json.loads(r.get_data()) # 'From' and 'to' contain timestamps generated by the database, # and as such are unreliable in testing for obj in actual_result: del obj['from'] del obj['to'] self.assertEqual(expected_result, actual_result)
def test_org_unit_history(self): # A create, some edits, followed by a termination # Arrange self.load_sample_structures() # Act r = self.request('/service/ou/create', json={ "name": "History test", "parent": { 'uuid': "2874e1dc-85e6-4269-823a-e1125484dfd3" }, "org_unit_type": { 'uuid': "3ef81e52-0deb-487d-9d0e-a69bbe0277d8" }, "validity": { "from": "2016-02-04", "to": "2017-10-21", } }) self.assert200(r) unitid = json.loads(r.get_data()) self.assertRequest('/service/details/edit', json={ "type": "org_unit", "data": { "name": "History test II", "org_unit": { "uuid": unitid }, "validity": { "from": "2016-01-05", } } }) self.assertRequest('/service/details/edit', json={ "type": "org_unit", "data": { "name": "History test III", "org_unit": { "uuid": unitid }, "validity": { "from": "2016-01-12", } } }) self.assertRequestResponse( '/service/ou/{}/create'.format(unitid), unitid, json=[{ "type": "manager", "job_function": { 'uuid': "3ef81e52-0deb-487d-9d0e-a69bbe0277d8" }, "engagement_type": { 'uuid': "62ec821f-4179-4758-bfdf-134529d186e9" }, "validity": { "from": "2017-12-01", "to": "2017-12-01", } }], ) self.assertRequestResponse('/service/ou/{}/terminate'.format(unitid), unitid, json={"validity": { "to": "2017-12-01" }}) expected_result = [{ 'action': 'Afslut enhed', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Opret leder', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger organisationsenhed', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Rediger organisationsenhed', 'life_cycle_code': 'Rettet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }, { 'action': 'Oprettet i MO', 'life_cycle_code': 'Opstaaet', 'user_ref': '42c432e8-9c4a-11e6-9f62-873cf34a735f' }] # Assert r = self.request('/service/ou/{}/history/'.format(unitid), ) actual_result = json.loads(r.get_data()) # 'From' and 'to' contain timestamps generated by the database, # and as such are unreliable in testing for obj in actual_result: del obj['from'] del obj['to'] self.assertEqual(expected_result, actual_result)
def test_ping(self): response = self.client.get("/ping") data = response.data.decode("utf8") self.assertEqual(response.status_code, 200, data) data = json.loads(data) self.assertEqual(data, {"response": "pong"})
initials = arg print ('username: '******'password: '******'url: '+ url) print ('initials: ' + initials) if __name__ == '__main__': main(sys.argv[1:]) time.sleep(10) tunnels = requests.request("GET", \ "http://ngrok:4040/api/tunnels", \ verify=False) tunnels = json.loads(tunnels.text) tunnels = tunnels["tunnels"] for tunnel in tunnels: if tunnel['proto'] == 'http': msg = tunnel['public_url'] print(msg) get_maps() initialize_client_locations() create_notification(msg) app.run(host="0.0.0.0", port=5004,threaded=True,debug=False)
def deleteAthlete(): athleteJson = json.loads(request.data) removeAthlete(athleteJson.get('id')) return jsonify([item.serialize() for item in athletes]), 200, { 'ContentType': 'application/json' }
def createAthlete(data): athleteJson = json.loads(data) return Athlete(athleteJson.get('id'), athleteJson.get('firstName'), athleteJson.get('secondName'), athleteJson.get('age'), athleteJson.get('disc'), athleteJson.get('club'), athleteJson.get('nationality'))
def loads(*args, **kwargs): with app.app_context(): return flask_json.loads(*args, **kwargs)
def get_food_details(): return json.loads(open('/Users/lixiwei-mac/Documents/IdeaProjects/rhinotech_spider/boohee_spider/boohee/food_detail.json').read())
def send_sms_code(): """发送短信验证码 1.获取参数:手机号,验证码,uuid 2.判断是否缺少参数,并对手机号格式进行校验 3.获取服务器存储的验证码 4.跟客户端传入的验证码进行对比 5.如果对比成功就生成短信验证码 6.调用单例类发送短信 7.如果发送短信成功,就保存短信验证码到redis数据库 8.响应发送短信的结果 """ # 获取参数:手机号,验证码,uuid josn_str = request.data josn_dict = json.loads(josn_str) print ">" * 50, josn_dict mobile = josn_dict.get("mobile") imageCode_client = josn_dict.get("imageCode") uuid = josn_dict.get("uuid") # 判断是否缺少参数,并对手机号格式进行校验 if not all([mobile, imageCode_client, uuid]): return jsonify(errno=RET.PARAMERR, errmsg="缺少参数") if not re.match(r"^1([358][0-9]|4[579]|66|7[0135678]|9[89])[0-9]{8}$", mobile): return jsonify(errno=RET.PARAMERR, errmsg="手机号格式错误") # 获取服务器存储的验证码 try: imageCode_server = redis_store.get('ImageCode:%s' % uuid) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg="查询验证码错误") # 判断取得的数据是否为空 if not imageCode_server: return jsonify(errno=RET.DBERR, errmsg="查询验证码不存在") # 跟客户端传入的验证码进行对比 if imageCode_client.lower() != imageCode_server.lower(): return jsonify(errno=RET.DBERR, errmsg="验证码输入有误") # 如果对比成功就生成短信验证码 # "%06d"-不够六位补零 sms_code = "%06d" % random.randint(0, 999999) # result = CCP().send_sms_code(mobile, [sms_code, 5], 1) # # # if result != 1: # return jsonify(errno=RET.THIRDERR, errmsg="发送短信验证码失败") print "_" * 30, sms_code try: redis_store.set("SMS:%s" % mobile, sms_code, constants.SMS_CODE_REDIS_EXPIRES) except Exception as e: current_app.logger.error(e) return jsonify(errno=RET.DBERR, errmsg='存储短信验证码失败') # 8.响应发送短信的结果 return jsonify(errno=RET.OK, errmsg='发送短信验证码成功')
def test_delete_message(self): """Ensure the messages are being deleted""" rv = self.app.get('/delete/1') data = json.loads(rv.data) self.assertEqual(data['status'], 1)
def index(): form = Url_form(request.form) tot_urls = Shortto.query.count() tot_clicks_obj = db.session.query(Shortto, db.func.sum(Shortto.clicks)) tot_clicks = tot_clicks_obj[0][1] if request.method == 'POST' and request.form['from_url']: # Recaptcha Verify g_captcha_response = request.form['g-recaptcha-response'] data = { 'secret': '6LeFWDYUAAAAAAP1FaIZ8Q6NtJxHO9n3Sa1l6RKu', 'response': g_captcha_response, 'remoteip': request.remote_addr } post_obj = requests.post( "https://www.google.com/recaptcha/api/siteverify", data=data) if post_obj.status_code == 200: #All Fine json_data = json.loads(post_obj.text) if json_data['success'] == True: #Passed done = True else: return redirect( url_for('index2', error_code=str(json_data['error-codes'][0]))) else: return redirect('/', 320) # Recaptcha End for url_s in blacklist: url_s_1 = '://' + url_s url_s_2 = 'www.' + url_s if (url_s_1 in (request.form['from_url']).lower()) or (url_s_2 in ( request.form['from_url']).lower()): return render_template('index.html', blacklist_url=True, tot_clicks=tot_clicks, tot_urls=tot_urls) if not validators.url(request.form['from_url']): return render_template('index.html', url_error=True, tot_clicks=tot_clicks, tot_urls=tot_urls) if request.form['to_url']: if not re.match("^[A-Za-z0-9-]+$", request.form['to_url']): return render_template('index.html', code=320, error_url_type=True, tot_clicks=tot_clicks, tot_urls=tot_urls) # Check if unique or not if Shortto.query.filter_by( short_url=request.form['to_url']).count() > 0: # Already Used Short Url error_url = True return render_template('index.html', code=320, error_url=error_url, tot_clicks=tot_clicks, tot_urls=tot_urls) short_url = request.form['to_url'] # Url Not Present temp = Shortto(big_url=request.form['from_url'], short_url=request.form['to_url']) db.session.add(temp) db.session.commit() return redirect(url_for('short_done', short_url=short_url)) else: rows = Shortto.query.count() rows = int(rows) rows += 1 short_url = idtoshort_url(int(rows)) while Shortto.query.filter_by(short_url=short_url).count() > 0: rows += 1 short_url = idtoshort_url(rows) temp = Shortto(big_url=request.form['from_url'], short_url=short_url) db.session.add(temp) db.session.commit() # prev_url_data = request.cookies.get(COOKIE_VAR) # prev_url_data_split = [] # if prev_url_data: # prev_url_data_split = prev_url_data.split('#') # THis variable has previous data return redirect(url_for('short_done', short_url=short_url)) # resp.set_cookie(COOKIE_VAR, '#'.join(prev_url_data_split)) # Just Index Render get analytics data return render_template('index.html', form=form, tot_clicks=tot_clicks, tot_urls=tot_urls)
def stats(): t0 = time.clock() filename = os.getcwd() + "/statsapp/static/champions/champion.json" pp = pprint.PrettyPrinter(indent=4) result = request.form['summoner'] lookup_summoner = requests.get(na_summoner + urlify(result, len(result)) + api_key) try_lookup = json.loads(lookup_summoner.text) if (int(len(try_lookup)) != 7): return redirect('/') lookup_matches = requests.get( na_account + json.loads(lookup_summoner.text)['accountId'] + api_key) list = json.loads(lookup_matches.text) matches = list['matches'] matches = matches[:NUM_MATCHES] game_id = [] game_data = [] wins_loss = [] start_time = time.time() - t0 ## timing variable loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) loop.run_until_complete(main(matches, game_id, game_data)) ## Async Looking up matches print(time.time() - t0 - start_time, "elasped time") games_kdas = [] i = 0 for game in game_data: list = json.loads(game)['participants'] #print(list) for player in list: ##print("kills " + str(player['stats']['kills']) + " participantId " + str(player['participantId']) + " deaths " + str(player['stats']['deaths']) + " assists " + str(player['stats']['assists'])) if (player['championId'] == game_id[i][1]): wins_loss.append(player['stats']['win']) kda = { "kills": player['stats']['kills'], "id": player['participantId'], "deaths": player['stats']['deaths'], "assists": player['stats']['assists'] } games_kdas.append(kda) print(player['timeline']['lane']) i += 1 ### making into a win loss function with open(filename, 'r') as f: datastore = json.loads(f.read()) print(time.time() - t0 - start_time, "elasped time") champs = Champions.champions info = zip(matches, replace_boolean(wins_loss), games_kdas) return render_template("matches.html", result=result, value=matches, datastore=datastore, champs=champs, info=info, win=win_count(wins_loss), loss=NUM_MATCHES - win_count(wins_loss), num_matches=NUM_MATCHES)
def get_json(client, url): """Get json from client.""" resp = client.get(url, headers={'Accept': 'application/json'}) assert resp.status_code == 200, \ 'error %d on get to %s:\n%s' % (resp.status_code, url, resp.get_data().decode('utf-8')) return json.loads(resp.get_data())
def modal_details(region): with open('data/model.json') as f: data = json.loads(f.read()) data = json.dumps(data) return data
def run(project): start_time = time.time() try: task = utils.decode_unicode_obj(json.loads(request.form['task'])) except Exception: result = { 'fetch_result': "", 'logs': u'task json error', 'follows': [], 'messages': [], 'result': None, 'time': time.time() - start_time, } return json.dumps(utils.unicode_obj(result)), 200, { 'Content-Type': 'application/json' } project_info = { 'name': project, 'status': 'DEBUG', 'script': request.form['script'], } fetch_result = {} try: fetch_result = app.config['fetch'](task) response = rebuild_response(fetch_result) module = ProjectManager.build_module(project_info, {'debugger': True}) ret = module['instance'].run(module['module'], task, response) except Exception: type, value, tb = sys.exc_info() tb = utils.hide_me(tb, globals()) logs = ''.join(traceback.format_exception(type, value, tb)) result = { 'fetch_result': fetch_result, 'logs': logs, 'follows': [], 'messages': [], 'result': None, 'time': time.time() - start_time, } else: result = { 'fetch_result': fetch_result, 'logs': ret.logstr(), 'follows': ret.follows, 'messages': ret.messages, 'result': ret.result, 'time': time.time() - start_time, } result['fetch_result']['content'] = response.text try: # binary data can't encode to JSON, encode result as unicode obj # before send it to frontend return json.dumps(utils.unicode_obj(result)), 200, { 'Content-Type': 'application/json' } except Exception: type, value, tb = sys.exc_info() tb = utils.hide_me(tb, globals()) logs = ''.join(traceback.format_exception(type, value, tb)) result = { 'fetch_result': "", 'logs': logs, 'follows': [], 'messages': [], 'result': None, 'time': time.time() - start_time, } return json.dumps(utils.unicode_obj(result)), 200, { 'Content-Type': 'application/json' }
def api_maintenance_set_wheels(): response = json.loads(request.data.decode()) new_speed = my_rover.set_new_speed(response["wheels"]) return json.jsonify({"success": True, "wheels": my_rover.speed})
def map_info(): with open('data/mapinfo.json') as f: data = json.loads(f.read()) return data
def api_add_notifications(): response = json.loads(request.data.decode()) my_rover.remark = response["notifications"] return json.jsonify({"success": True, "notifications": my_rover.remark})
def api_maintenance_set_arm(): response = json.loads(request.data.decode()) my_rover.set_new_arm(response["arm"]) return json.jsonify({"success": True, "arm": my_rover.arm})
def test_get_last_ten_by_time_full_list(self): results = self.app.get('/feeds/api/v1.0/events/time/1421606501') self.assertDictEqual(json.loads(results.data), { "events": self.sample_events[1:11] })
def api_update_ips(): response = json.loads(request.data.decode()) my_rover.update_ips(response["ips"]) ips = my_rover.ips return json.jsonify({"success": True, "ips": ips})
from urllib.request import Request, urlopen from flask import Flask, json, request, jsonify, render_template import SecAppManager import jwt import logging import os import ast # Initialize and load CONFIG. CONFIG = configparser.ConfigParser() CONFIG_FILE = os.path.join(os.path.dirname(__file__), 'controller.ini') CONFIG.read(CONFIG_FILE) if not CONFIG["GENERAL"]["port"]: print("Port missing in Config file!") sys.exit(0) GROUP_LIST = json.loads(CONFIG["Controller"]["groups"]) CONTROLLER_URL = CONFIG["GENERAL"]["SDN_CONTROLLER"] APP = Flask(__name__) if not CONFIG["GENERAL"]["SECRET"]: print("Secret missing in Config file!") sys.exit(0) SECRET = CONFIG["GENERAL"]["SECRET"] if not CONFIG["Controller"]["timeout"]: print("Timeout missing in Config file!") sys.exit(0) TIMEOUT_LENGTH = int( CONFIG["Controller"]["timeout"]) * 60 # timeout time of token in minutes global CONTROLLER_READY STANDARD_CONF = json.loads(CONFIG["Controller"]["standard_conf"]) global CURRENT_CONF THRESHHOLD = 100
def json_reload(json_as_a_dict): return json.loads(json.dumps(json_as_a_dict, cls=CustomJSONEncoder))
def setCollection(self, collectionJSON): data = json.loads(collectionJSON) self.reset() for item in data: setattr(self, item, 1)
def test_get_last_ten_full_list(self): results = self.app.get('/feeds/api/v1.0/events') self.assertDictEqual(json.loads(results.data), { "events": self.sample_events[0:10] })