def change(self, node, authenticate=True): """ Add a new record into CitySDK db """ session = self.get_session() citysdk_record = self.convert_format(node, create_type='update') citysdk_api_url = '%snodes/%s' % (self.citysdk_url, self.config['citysdk_layer']) # citysdk sync response = requests.put( citysdk_api_url, data=json.dumps(citysdk_record), verify=self.config.get('verify_SSL', True), headers={ 'Content-type': 'application/json', 'X-Auth': session } ) print response.content self.release_session(session) if response.status_code != 200: message = 'ERROR while updating record "%s" through CitySDK API\n%s' % (node.name, response.content) logger.error(message) return False try: json.loads(response.content) except json.JSONDecodeError as e: logger.error(e) return False message = 'Updated record "%s" through the CitySDK HTTP API' % node.name self.verbose(message) logger.info(message) return True
def test_GET_limit_marker_json(self): req = Request.blank('/sda1/p/a', environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0'}) self.controller.PUT(req) for c in xrange(5): req = Request.blank('/sda1/p/a/c%d' % c, environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Put-Timestamp': str(c + 1), 'X-Delete-Timestamp': '0', 'X-Object-Count': '2', 'X-Bytes-Used': '3', 'X-Timestamp': normalize_timestamp(0)}) self.controller.PUT(req) req = Request.blank('/sda1/p/a?limit=3&format=json', environ={'REQUEST_METHOD': 'GET'}) resp = self.controller.GET(req) self.assertEquals(resp.status_int, 200) self.assertEquals(simplejson.loads(resp.body), [{'count': 2, 'bytes': 3, 'name': 'c0'}, {'count': 2, 'bytes': 3, 'name': 'c1'}, {'count': 2, 'bytes': 3, 'name': 'c2'}]) req = Request.blank('/sda1/p/a?limit=3&marker=c2&format=json', environ={'REQUEST_METHOD': 'GET'}) resp = self.controller.GET(req) self.assertEquals(resp.status_int, 200) self.assertEquals(simplejson.loads(resp.body), [{'count': 2, 'bytes': 3, 'name': 'c3'}, {'count': 2, 'bytes': 3, 'name': 'c4'}])
def get_category_asin_list(cate): """ 得到一个分类下所有商品的编号 """ cate = cate.split('>') print cate asin_list = [] commodity_data = [] first_commodity_data_name = [] for index in range(0,117): path = 'D:/trendata/'+str(index) list = os.listdir(path) if os.listdir(path): list1 = os.listdir(path)[0] file_obj = open('D:\\trendata/'+str(index)+'/'+list1,'r') first_commodity_data = json.loads(file_obj.read()) print first_commodity_data file_obj.close() if (str(first_commodity_data['category'][0]).startswith(str(cate))): for each_asin_txt in list: file_obj = open('D:\\trendata/'+str(index)+'/'+each_asin_txt,'r') commodity_data += json.loads(file_obj.read()) for each_commodity in commodity_data: asin_list += each_commodity['ASIN'] return asin_list
def get_session(self): """ authenticate into the CitySDK Mobility API and return session token """ self.verbose('Authenticating to CitySDK') logger.info('== Authenticating to CitySDK ==') authentication_url = '%sget_session?e=%s&p=%s' % ( self.citysdk_url, self.config['citysdk_username'], self.config['citysdk_password'] ) try: response = requests.get( authentication_url, verify=self.config.get('verify_SSL', True) ) except Exception as e: message = 'API Authentication Error: "%s"' % e logger.error(message) raise ImproperlyConfigured(message) if response.status_code != 200: try: message = 'API Authentication Error: "%s"' % json.loads(response.content)['message'] except Exception: message = 'API Authentication Error: "%s"' % response.content logger.error(message) raise ImproperlyConfigured(message) # store session token # will be valid for 1 minute after each request session = json.loads(response.content)['results'][0] return session
def _list(self, url, query_string=''): """Lists all existing remote resources. Resources in listings can be filterd using `query_string` formatted according to the syntax and fields labeled as filterable in the BigML documentation for each resource. Sufixes: __lt: less than __lte: less than or equal to __gt: greater than __gte: greater than or equal to For example: 'size__gt=1024' Resources can also be sortened including a sort_by statement within the `query_sting`. For example: 'order_by=size' """ code = HTTP_INTERNAL_SERVER_ERROR meta = None resources = None error = { "status": { "code": code, "message": "The resource couldn't be listed"}} try: response = requests.get(url + self.auth + query_string, headers=ACCEPT_JSON, verify=VERIFY) code = response.status_code if code == HTTP_OK: resource = json.loads(response.content, 'utf-8') meta = resource['meta'] resources = resource['objects'] error = None elif code in [HTTP_BAD_REQUEST, HTTP_UNAUTHORIZED, HTTP_NOT_FOUND]: error = json.loads(response.content, 'utf-8') else: LOGGER.error("Unexpected error (%s)" % code) code = HTTP_INTERNAL_SERVER_ERROR except ValueError: LOGGER.error("Malformed response") except requests.ConnectionError: LOGGER.error("Connection error") except requests.Timeout: LOGGER.error("Request timed out") except requests.RequestException: LOGGER.error("Ambiguous exception occurred") return { 'code': code, 'meta': meta, 'objects': resources, 'error': error}
def inscriptos_actividad(request, idActividad): actividad = get_object_or_404(Actividad, pk=idActividad) # print actividad # m, txt = encode_data(idActividad) # print m # print txt # url_info = request.scheme + '://' + request.META['HTTP_HOST'] + '/inscriptos?m=' + m + '&text=' + txt # return HttpResponseRedirect(url_info) form_entry = actividad.formDinamico form_element_entries = form_entry.formelemententry_set.all()[:] cabecera = [] jsontitles=[] for entry in form_element_entries: aux = json.loads(entry.plugin_data) cabecera.append(aux["label"]) jsontitles.append(aux["name"]) lista_inscriptos = InscripcionBase.objects.filter(actividad=actividad).order_by('puesto') for inscripto in lista_inscriptos: if inscripto.datos != None: inscripto.datos = json.loads(inscripto.datos) m, txt = encode_data(actividad.id) url_contacto = request.scheme + '://' + request.META['HTTP_HOST'] + '/inscriptos?m=' + m + '&text=' + txt url_csv = request.scheme + '://' + request.META['HTTP_HOST'] + '/csv?m=' + m + '&text=' + txt context = {'lista_inscriptos': lista_inscriptos, 'actividad': actividad, 'cabecera': cabecera, 'jsontitles': jsontitles, 'url_contacto': url_contacto, 'url_csv': url_csv, } return render_to_response('admin/inscriptos.html', context, context_instance=RequestContext(request))
def test_json_configs(self): """ Currently only "is this well-formed json?" """ config_files = self.query_config_files() filecount = [0, 0] for config_file in config_files: if config_file.endswith(".json"): filecount[0] += 1 self.info("Testing %s." % config_file) contents = self.read_from_file(config_file, verbose=False) try: json.loads(contents) except ValueError: self.add_summary("%s is invalid json." % config_file, level="error") self.error(pprint.pformat(sys.exc_info()[1])) else: self.info("Good.") filecount[1] += 1 if filecount[0]: self.add_summary("%d of %d json config files were good." % (filecount[1], filecount[0])) else: self.add_summary("No json config files to test.")
def post_add_hs(request): """The post JSON data about .onion address.""" post_data = request.body #build post to json form if post_data.startswith('url'): url = request.POST.get('url', '') # Add the / in the end if it lacks # http://something.onion => http://something.onion/ url = url.strip(' \t\n\r') if url and url[-1] != "/": url = url + "/" title = request.POST.get('title', '') description = request.POST.get('description', '') relation = request.POST.get('relation', '') subject = request.POST.get('subject', '') type_str = request.POST.get('type', '') sign = request.POST.get('sign', '') if sign != 'antispammer': return HttpResponse("Must have the anti-spam field filled.") raw_json = '{"url":"'+url+'","title":"'+title+'","description":"' raw_json = raw_json+description+'","relation":"'+relation raw_json = raw_json+'","subject":"'+subject+'","type":"'+type_str+'"}' json = simplejson.loads(raw_json) #json else: try: json = simplejson.loads(post_data) except: return HttpResponse('Error: Invalid JSON data.') return add_hs(json)
def test_aggregations_datasets_with_two_groups(self): self.dataset_id = self._post_file() group = 'food_type' self._post_calculations(self.default_formulae + ['sum(amount)']) self._post_calculations(['sum(gps_alt)'], group) groups = ['', group] results = self._test_aggregations(groups) for row in results: self.assertEqual(row.keys(), ['sum_amount_']) self.assertTrue(isinstance(row.values()[0], float)) # get second linked dataset results = json.loads(self.controller.aggregations(self.dataset_id)) self.assertEqual(len(results.keys()), len(groups)) self.assertEqual(results.keys(), groups) linked_dataset_id = results[group] self.assertTrue(isinstance(linked_dataset_id, basestring)) # inspect linked dataset results = json.loads(self.controller.show(linked_dataset_id)) row_keys = [group, 'sum_gps_alt_'] for row in results: self.assertEqual(row.keys(), row_keys)
def test_get_list(self): self.client.login(username=self.username, password=self.password) response = self.client.get(self.list_endpoint) self.assertEqual(response.status_code, 200) api_users = simplejson.loads(response.content)['objects'] self.assertEqual(len(api_users), 1) self._check_user_data(self.user, api_users[0]) another_user = WebUser.create(self.domain.name, 'anotherguy', '***') another_user.set_role(self.domain.name, 'field-implementer') another_user.save() response = self.client.get(self.list_endpoint) self.assertEqual(response.status_code, 200) api_users = simplejson.loads(response.content)['objects'] self.assertEqual(len(api_users), 2) # username filter response = self.client.get('%s?username=%s' % (self.list_endpoint, 'anotherguy')) self.assertEqual(response.status_code, 200) api_users = simplejson.loads(response.content)['objects'] self.assertEqual(len(api_users), 1) self._check_user_data(another_user, api_users[0]) response = self.client.get('%s?username=%s' % (self.list_endpoint, 'nomatch')) self.assertEqual(response.status_code, 200) api_users = simplejson.loads(response.content)['objects'] self.assertEqual(len(api_users), 0)
def getOsdMapInfos(self, pgid): Log.info("___getOsdMapInfos(pgid=" + str(pgid) + ")") cephRestApiUrl = self.cephRestApiUrl + 'tell/' + pgid + '/query.json'; Log.debug("____cephRestApiUrl Request=" + cephRestApiUrl) osdmap = [] data = requests.get(cephRestApiUrl) r = data.content if data.status_code != 200: print 'Error ' + str(data.status_code) + ' on the request getting pools' return osdmap # print(r) if len(r) > 0: osdmap = json.loads(r) else: Log.err('The getOsdMapInfos() method returns empty data') osdmap = json.loads(r) # osdmap=r.read() # Log.debug(osdmap) acting = osdmap["output"]["acting"] up = osdmap["output"]["up"] state = osdmap["output"]["state"] acting_primary = osdmap["output"] ["info"]["stats"]["acting_primary"] up_primary = osdmap["output"]["info"]["stats"]["up_primary"] osdmap_infos = {"acting":acting, "acting_primary":acting_primary, "state":state, "up":up, "up_primary":up_primary} return osdmap_infos
def linkedin_connections(): # Use your credentials to build the oauth client consumer = oauth.Consumer(key=CONSUMER_KEY, secret=CONSUMER_SECRET) token = oauth.Token(key=OAUTH_TOKEN, secret=OAUTH_TOKEN_SECRET) client = oauth.Client(consumer, token) # Fetch first degree connections resp, content = client.request('http://api.linkedin.com/v1/people/~/connections?format=json') results = simplejson.loads(content) # File that will store the results output = codecs.open(OUTPUT, 'w', 'utf-8') # Loop thru the 1st degree connection and see how they connect to each other for result in results["values"]: con = "%s %s" % (result["firstName"].replace(",", " "), result["lastName"].replace(",", " ")) # type your own name in below print >>output, "%s,%s" % ("Your name goes here", con) # This is the trick, use the search API to get related connections u = "https://api.linkedin.com/v1/people/%s:(relation-to-viewer:(related-connections))?format=json" % result["id"] resp, content = client.request(u) rels = simplejson.loads(content) try: for rel in rels['relationToViewer']['relatedConnections']['values']: sec = "%s %s" % (rel["firstName"].replace(",", " "), rel["lastName"].replace(",", " ")) print >>output, "%s,%s" % (con, sec) except: pass
def get_actions(self,coordinator): accumulator=dict() accumulator['total']=0 try: url = "http://" + self.host + ":" + str(self.port) + self.api_url['actions_from_coordinator'] % (coordinator,0,0) response = requests.get(url, auth=self.html_auth) if not response.ok: return {} total_actions=json.loads(response.content)['total'] url = "http://" + self.host + ":" + str(self.port) + self.api_url['actions_from_coordinator'] % (coordinator,total_actions-self.query_size,self.query_size) response = requests.get(url, auth=self.html_auth) if not response.ok: return {} actions = json.loads(response.content)['actions'] for action in actions: created=time.mktime(self.time_conversion(action['createdTime'])) modified=time.mktime(self.time_conversion(action['lastModifiedTime'])) runtime=modified-created if accumulator.get(action['status']) is None: accumulator[action['status']]=defaultdict(int) accumulator[action['status']]['count']+=1 accumulator[action['status']]['runtime']+=runtime accumulator['total']+=1 except: logging.error('http request error: "%s"' % url) return {} return accumulator
def image(title,id): titleUrl = "http://de.wikipedia.org/w/api.php?action=query&titles="+title+"&prop=images&format=json" imgJson = simplejson.loads(urllib.urlopen(titleUrl).read()) imageUrl = None if imgJson['query']['pages'].has_key(id): if imgJson['query']['pages'][id].has_key('images'): if imgJson['query']['pages'][id]['images'][0]: imageTitle = imgJson['query']['pages'][id]['images'][0]['title'] imageTitle = imageTitle.encode('utf-8') imageTitleUrl = "http://de.wikipedia.org/w/api.php?action=query&titles="+imageTitle+"&prop=imageinfo&iiprop=url&format=json" imgTitleJson = simplejson.loads(urllib.urlopen(imageTitleUrl).read()) if imgTitleJson['query']['pages'].has_key("-1"): if imgTitleJson['query']['pages']['-1'].has_key('imageinfo') : imageUrl = imgTitleJson['query']['pages']['-1']['imageinfo'][0]['url'] elif imgTitleJson['query']['pages'].has_key('id'): if imgTitleJson['query']['pages']['id'].has_key('imageinfo'): imageUrl = imgTitleJson['query']['pages']['id']['imageinfo'][0]['url'] return imageUrl
def read_request_data(self): if request.data: return json.loads(request.data.decode('utf-8')) elif request.form.get('data'): return json.loads(request.form['data']) else: return dict(request.form)
def report_routes(self, reportname, docids=None, converter=None, **data): report_obj = request.registry['report'] cr, uid, context = request.cr, request.uid, request.context if docids: docids = [int(i) for i in docids.split(',')] options_data = None if data.get('options'): options_data = simplejson.loads(data['options']) if data.get('context'): # Ignore 'lang' here, because the context in data is the one from the webclient *but* if # the user explicitely wants to change the lang, this mechanism overwrites it. data_context = simplejson.loads(data['context']) if data_context.get('lang'): del data_context['lang'] context.update(data_context) if converter == 'html': html = report_obj.get_html(cr, uid, docids, reportname, data=options_data, context=context) return request.make_response(html) elif converter == 'pdf': pdf = report_obj.get_pdf(cr, uid, docids, reportname, data=options_data, context=context) pdfhttpheaders = [('Content-Type', 'application/pdf'), ('Content-Length', len(pdf))] return request.make_response(pdf, headers=pdfhttpheaders) else: raise exceptions.HTTPException(description='Converter %s not implemented.' % converter)
def getJSON(self, url, language=None): language = language or config["language"] page = requests.get(url, params={"language": language}).content try: return simplejson.loads(page) except: return simplejson.loads(page.decode("utf-8"))
def get_applications(self, user, collection, since, token): """Get all applications that have been modified later than 'since'.""" s = self._resume_session(token) since = round_time(since) updates = [] # Check the collection metadata first. # It might be deleted, or last_modified might be too early. # In either case, this lets us bail out before doing any hard work. try: item = self._get_cached_metadata(s, user, collection) meta = json.loads(item.value) except KeyError: return updates if meta.get("deleted", False): raise CollectionDeletedError(meta.get("client_id", ""), meta.get("reason", "")) last_modified = round_time(meta.get("last_modified", 0)) if last_modified < since: return updates # Read and return all apps with modification time > since. apps = meta.get("apps", []) for (last_modified, appid) in apps: last_modified = round_time(last_modified) if last_modified <= since: break key = "%s::item::%s" % (collection, appid) try: app = json.loads(s.get(key)) except KeyError: # It has been deleted; ignore it. continue updates.append((last_modified, app)) return updates
def test_limit_agencies(self): rv = self.app.get('/agencies?format=json') agencies = json.loads(rv.data) assert len(agencies) == 20 rv = self.app.get('/agencies?format=json&limit=15') agencies = json.loads(rv.data) assert len(agencies) == 15
def get_summary(days=30): # Retrieve json of permits, parse data = get_data(days=days) permits = json.loads(data) cost = 0 offset = 0 link = 'http://chicagocityscape.com/dashboard.php' # Loop through permits can get more data until we run out of permits while len(permits) > 0: for permit in permits: cost += float(permit['_estimated_cost']) offset += len(permits) print(offset) # Get more data, incrementing call by offset = number of records already parsed data = get_data(days=days,offset=offset) permits = json.loads(data) text = "Over the past " + str(days) + " days there have been " + "{:,.0f}".format(offset) + " permits issued in Chicago, totaling $" + "{:,.0f}".format(cost) + " " + link print(text) post_status(text)
def getTaggedMedia(tag, accessToken): ''' Get recent tagged media. Parameters: tag: String - The tag used to search the most recent media that's tagged with it. Returns: data: Dictionary - A dictionary containing recent tagged 120 media counts data pertaining to each media. ''' tagUri = 'https://api.instagram.com/v1/tags/' taggedMediaUrl = tagUri + tag + '/media/recent?access_token=' + accessToken req = requests.get(taggedMediaUrl) content = json2.loads(req.content) data = content['data'] while len(data) <= 100: nextUrl = content['pagination']['next_url'] req = requests.get(nextUrl) content = json2.loads(req.content) for i in content['data']: data.append(i) #print len(data) return data
def from_request(cls, request): """Create new TransientShardState from webapp request.""" mapreduce_spec = MapreduceSpec.from_json_str(request.get("mapreduce_spec")) mapper_spec = mapreduce_spec.mapper input_reader_spec_dict = simplejson.loads(request.get("input_reader_state")) input_reader = mapper_spec.input_reader_class().from_json( input_reader_spec_dict) output_writer = None if mapper_spec.output_writer_class(): output_writer = mapper_spec.output_writer_class().from_json( simplejson.loads(request.get("output_writer_state", "{}"))) assert isinstance(output_writer, mapper_spec.output_writer_class()), ( "%s.from_json returned an instance of wrong class: %s" % ( mapper_spec.output_writer_class(), output_writer.__class__)) request_path = request.path base_path = request_path[:request_path.rfind("/")] return cls(base_path, mapreduce_spec, str(request.get("shard_id")), int(request.get("slice_id")), input_reader, output_writer=output_writer)
def test_including_replace_image(self): ## if we are replacing, the passed-in image url ## is added as a fake 1th element. import simplejson from karl.content.interfaces import IImage from zope.interface import directlyProvides context = testing.DummyModel() image = context["boo.jpg"] = testing.DummyModel() image.title = "Boo" directlyProvides(image, IImage) request = testing.DummyRequest(params={"include_image_url": "/boo.jpg", "source": "myrecent"}) response = self._call_fut(context, request) self.assertEqual(response.status, "200 OK") self.assertEqual( self.dummy_get_images_batch.called, (context, request, {"community": None, "batch_start": 0, "batch_size": 12, "creator": None}), ) data = simplejson.loads(response.body) self.assertEqual(data["images_info"], {"totalRecords": 6, "start": 0, "records": ["Boo", "foo", "bar"]}) # if we don't ask for the 0th index: it's not # added, but the sizes and indexes are aligned. request = testing.DummyRequest(params={"include_image_url": "/boo.jpg", "source": "myrecent", "start": "1"}) response = self._call_fut(context, request) self.assertEqual(response.status, "200 OK") self.assertEqual( self.dummy_get_images_batch.called, (context, request, {"community": None, "batch_start": 0, "batch_size": 12, "creator": None}), ) data = simplejson.loads(response.body) self.assertEqual(data["images_info"], {"totalRecords": 6, "start": 1, "records": ["foo", "bar"]})
def call_prediction(agency_id, route_id, direction_id, stop_id, phone_num): user = users.get_current_user() app.logger.info("%s (%s) called %s" % (user.nickname(), user.user_id(), phone_num)) url = DOMAIN + url_for('prediction_list', agency_id=agency_id, route_id=route_id, direction_id=direction_id, stop_id=stop_id, format="twiml") call_info = { 'From': PHONE_NUMBER, 'To': phone_num, 'Url': url, 'Method': 'GET', } try: call_json = account.request( '/%s/Accounts/%s/Calls.json' % (API_VERSION, ACCOUNT_SID), 'POST', call_info) app.logger.info(call_json) call = json.loads(call_json) return "Now calling %s with call ID %s" % (call['to'], call['sid']) except HTTPErrorAppEngine, e: app.logger.error(e) try: err = json.loads(e.msg) message = err['Message'] return "REMOTE ERROR: %s" % (message,) except: return "Couldn't parse error output:<br>\n%s" % e.msg
def test_it(self): import simplejson context = testing.DummyModel() request = testing.DummyRequest(params={"source": "myrecent"}) response = self._call_fut(context, request) self.assertEqual(response.status, "200 OK") self.assertEqual( self.dummy_get_images_batch.called, (context, request, {"community": None, "batch_start": 0, "batch_size": 12, "creator": None}), ) data = simplejson.loads(response.body) self.assertEqual(data["images_info"], {"totalRecords": 5, "start": 0, "records": ["foo", "bar"]}) # ask a batch from the 1st (or nth) image request = testing.DummyRequest(params={"source": "myrecent", "start": "1"}) response = self._call_fut(context, request) self.assertEqual(response.status, "200 OK") self.assertEqual( self.dummy_get_images_batch.called, (context, request, {"community": None, "batch_start": 1, "batch_size": 12, "creator": None}), ) data = simplejson.loads(response.body) self.assertEqual( data["images_info"], {"totalRecords": 5, "start": 1, "records": ["foo", "bar"]}, # no problem... just bad faking )
def test_parse_ad(self): response = """ {"response":[ {"id":"607256","campaign_id":"123","name":"Ad1","status":0,"approved":0,"all_limit":0,"cost_type":0,"cpm":118}, {"id":"664868","campaign_id":"123","name":"Ad2","status":1,"approved":1,"all_limit":100,"cost_type":1,"cpc":488} ]} """ account = AccountFactory(remote_id=1) campaign = CampaignFactory(account=account, remote_id=1) instance = Ad(campaign=campaign, fetched=timezone.now()) instance.parse(json.loads(response)["response"][0]) instance.save(commit_remote=False) self.assertTrue(isinstance(instance.campaign, Campaign)) self.assertEqual(instance.campaign.remote_id, 1) self.assertEqual(instance.remote_id, 607256) self.assertEqual(instance.name, "Ad1") self.assertEqual(instance.status, False) self.assertEqual(instance.approved, False) self.assertEqual(instance.all_limit, 0) self.assertEqual(instance.cpm, 118) instance = Ad(campaign=campaign, fetched=timezone.now()) instance.parse(json.loads(response)["response"][1]) instance.save(commit_remote=False) self.assertEqual(instance.remote_id, 664868) self.assertEqual(instance.name, "Ad2") self.assertEqual(instance.status, True) self.assertEqual(instance.approved, True) self.assertEqual(instance.all_limit, 100) self.assertEqual(instance.cpc, 488)
def after_request(response): if flask.request.method == 'OPTIONS': in_value = response.headers.get('Access-Control-Allow-Headers', '') allowed = [h.strip() for h in in_value.split(',')] allowed.append('X-Client-ID') out_value = ', '.join(allowed) response.headers['Access-Control-Allow-Headers'] = out_value return response if 200 <= response.status_code < 300: match = re.match(r'^store\.(\w+)_annotation$', flask.request.endpoint) if match: request = get_current_request() action = match.group(1) if action == 'delete': data = json.loads(flask.request.data) else: data = json.loads(response.data) annotation = wrap_annotation(data) event = events.AnnotationEvent(request, annotation, action) request.registry.notify(event) return response
def _get_more(self, last_key, value_dict): key = None params = value_dict.get('params', {}) if not last_key: key = value_dict.get('start_marker') else: key = last_key if key: params['marker'] = key response = self.connection.request(value_dict['url'], params) # newdata, self._last_key, self._exhausted if response.status == httplib.NO_CONTENT: return [], None, False elif response.status == httplib.OK: resp = json.loads(response.body) l = None if 'list_item_mapper' in value_dict: func = value_dict['list_item_mapper'] l = [func(x, value_dict) for x in resp['values']] else: l = value_dict['object_mapper'](resp, value_dict) m = resp['metadata'].get('next_marker') return l, m, (m is None) body = json.loads(response.body) details = body['details'] if 'details' in body else '' raise LibcloudError('Unexpected status code: %s (url=%s, details=%s)' % (response.status, value_dict['url'], details))
def test_parse_campaign(self): response = """ {"response":[ {"id":"111","name":"Campaign1","status":0,"day_limit":2000,"all_limit":1000000,"start_time":"0","stop_time":"0"}, {"id":"222","name":"Campaign2","status":1,"day_limit":6000,"all_limit":9000000,"start_time":"1298365200","stop_time":"1298451600"} ]} """ account = AccountFactory(remote_id=1) instance = Campaign(account=account, fetched=timezone.now()) instance.parse(json.loads(response)["response"][0]) instance.save(commit_remote=False) self.assertEqual(instance.remote_id, 111) self.assertEqual(instance.name, "Campaign1") self.assertEqual(instance.status, False) self.assertEqual(instance.day_limit, 2000) self.assertEqual(instance.all_limit, 1000000) self.assertEqual(instance.start_time, None) self.assertEqual(instance.stop_time, None) instance = Campaign(account=account, fetched=timezone.now()) instance.parse(json.loads(response)["response"][1]) instance.save(commit_remote=False) self.assertTrue(isinstance(instance.account, Account)) self.assertEqual(instance.account.remote_id, 1) self.assertEqual(instance.remote_id, 222) self.assertEqual(instance.name, "Campaign2") self.assertEqual(instance.status, True) self.assertEqual(instance.day_limit, 6000) self.assertEqual(instance.all_limit, 9000000) self.assertEqual(instance.start_time, datetime(2011, 2, 22, 9, 0, 0, tzinfo=timezone.utc)) self.assertEqual(instance.stop_time, datetime(2011, 2, 23, 9, 0, 0, tzinfo=timezone.utc))
def test_load(self): response = self.get('api.load', 'i18n://sv-se@page/title') self.assertEqual(response.status_code, 200) json_content = json.loads(response.content) self.assertEqual(json_content['uri'], 'i18n://sv-se@page/title.txt') self.assertIsNone(json_content['data']) self.assertEqual(len(json_content['meta'].keys()), 0) # TODO: Should get 404 # response = self.get('api.load', 'i18n://sv-se@page/title#1') # self.assertEqual(response.status_code, 404) cio.set('i18n://sv-se@page/title.md', u'# Djedi') response = self.get('api.load', 'sv-se@page/title') self.assertEqual(response.status_code, 200) node = json_node(response, simple=False) meta = node.pop('meta', {}) self.assertDictEqual(node, {'uri': 'i18n://sv-se@page/title.md#1', 'data': u'# Djedi', 'content': u'<h1>Djedi</h1>'}) self.assertKeys(meta, 'modified_at', 'published_at', 'is_published') response = self.get('api.load', 'i18n://sv-se@page/title#1') json_content = json.loads(response.content) self.assertEqual(json_content['uri'], 'i18n://sv-se@page/title.md#1') self.assertEqual(len(cio.revisions('i18n://sv-se@page/title')), 1)
def as_dict(self): if not hasattr(self, '_as_dict'): self._as_dict = json.loads(self.json) return self._as_dict
def parse_body(self): """ Parse the JSON response body, or raise exceptions as appropriate. :return: JSON dictionary :rtype: ``dict`` """ if len(self.body) == 0 and not self.parse_zero_length_body: return self.body json_error = False try: body = json.loads(self.body) except: # If there is both a JSON parsing error and an unsuccessful http # response (like a 404), we want to raise the http error and not # the JSON one, so don't raise JsonParseError here. body = self.body json_error = True valid_http_codes = [ httplib.OK, httplib.CREATED, httplib.ACCEPTED, httplib.CONFLICT, ] if self.status in valid_http_codes: if json_error: raise JsonParseError(body, self.status, None) elif 'error' in body: (code, message) = self._get_error(body) if code == 'QUOTA_EXCEEDED': raise QuotaExceededError(message, self.status, code) elif code == 'RESOURCE_ALREADY_EXISTS': raise ResourceExistsError(message, self.status, code) elif code == 'alreadyExists': raise ResourceExistsError(message, self.status, code) elif code.startswith('RESOURCE_IN_USE'): raise ResourceInUseError(message, self.status, code) else: raise GoogleBaseError(message, self.status, code) else: return body elif self.status == httplib.NOT_FOUND: if (not json_error) and ('error' in body): (code, message) = self._get_error(body) else: message = body code = None raise ResourceNotFoundError(message, self.status, code) elif self.status == httplib.BAD_REQUEST: if (not json_error) and ('error' in body): (code, message) = self._get_error(body) else: message = body code = None raise InvalidRequestError(message, self.status, code) else: if (not json_error) and ('error' in body): (code, message) = self._get_error(body) else: message = body code = None raise GoogleBaseError(message, self.status, code)
def test_basic_decode(self): self.assertEqual(simplejson.loads(self.basic_dumps, cls=json.JSONDecoder), self.basic)
def test_plus_decode(self): self.assertEqual(simplejson.loads(self.plus_dumps, cls=json.JSONDecoder), self.plus)
def save(sid, did): """ This function will apply the privileges to the selected Database Objects """ server_prop = server_info data = request.form if request.form else json.loads(request.data.decode()) # Form db connection and we use conn to execute sql manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did) acls = [] try: acls = render_template( "/".join([server_prop['template_path'], 'acl.json']), ) acls = json.loads(acls) except Exception as e: current_app.logger.exception(e) try: # Parse privileges data['priv'] = {} if 'acl' in data: # Get function acls data['priv']['function'] = parse_priv_to_db( data['acl'], acls['function']['acl']) data['priv']['sequence'] = parse_priv_to_db( data['acl'], acls['sequence']['acl']) data['priv']['table'] = parse_priv_to_db(data['acl'], acls['table']['acl']) data['priv']['foreign_table'] = parse_priv_to_db( data['acl'], acls['foreign_table']['acl']) # Logic for setting privileges only for ppas if server_prop['server_type'] == 'ppas': data['priv']['package'] = parse_priv_to_db( data['acl'], acls['package']['acl']) # Pass database objects and get SQL for privileges # Pass database objects and get SQL for privileges sql_data = '' data_func = { 'objects': data['objects'], 'priv': data['priv']['function'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_function.sql']), data=data_func, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_seq = { 'objects': data['objects'], 'priv': data['priv']['sequence'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_sequence.sql']), data=data_seq, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_table = { 'objects': data['objects'], 'priv': data['priv']['table'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_table.sql']), data=data_table, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_table = { 'objects': data['objects'], 'priv': data['priv']['foreign_table'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_foreign_table.sql']), data=data_table, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql # Logic for generating privileges sql only for ppas if server_prop['server_type'] == 'ppas': data_package = { 'objects': data['objects'], 'priv': data['priv']['package'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_package.sql']), data=data_package, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql status, res = conn.execute_dict(sql_data) if not status: return internal_server_error(errormsg=res) return make_json_response(success=1, info="Privileges applied") except Exception as e: return internal_server_error(errormsg=e.message)
def msql(sid, did): """ This function will return modified SQL """ server_prop = server_info data = request.form if request.form else json.loads(request.data.decode()) # Form db connection manager = get_driver(PG_DEFAULT_DRIVER).connection_manager(sid) conn = manager.connection(did=did) acls = [] try: acls = render_template("/".join( [server_prop['template_path'], '/acl.json'])) acls = json.loads(acls) except Exception as e: current_app.logger.exception(e) try: # Parse privileges data['priv'] = {} if 'acl' in data: # Get function acls data['priv']['function'] = parse_priv_to_db( data['acl'], acls['function']['acl']) data['priv']['sequence'] = parse_priv_to_db( data['acl'], acls['sequence']['acl']) data['priv']['table'] = parse_priv_to_db(data['acl'], acls['table']['acl']) data['priv']['foreign_table'] = parse_priv_to_db( data['acl'], acls['foreign_table']['acl']) # Logic for setting privileges only for ppas if server_prop['server_type'] == 'ppas': data['priv']['package'] = parse_priv_to_db( data['acl'], acls['package']['acl']) # Pass database objects and get SQL for privileges sql_data = '' data_func = { 'objects': data['objects'], 'priv': data['priv']['function'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_function.sql']), data=data_func, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_seq = { 'objects': data['objects'], 'priv': data['priv']['sequence'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_sequence.sql']), data=data_seq, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_table = { 'objects': data['objects'], 'priv': data['priv']['table'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_table.sql']), data=data_table, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql data_table = { 'objects': data['objects'], 'priv': data['priv']['foreign_table'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_foreign_table.sql']), data=data_table, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql # Logic for generating privileges sql only for ppas if server_prop['server_type'] == 'ppas': data_package = { 'objects': data['objects'], 'priv': data['priv']['package'] } sql = render_template("/".join( [server_prop['template_path'], '/sql/grant_package.sql']), data=data_package, conn=conn) if sql and sql.strip('\n') != '': sql_data += sql res = {'data': sql_data} return ajax_response(response=res, status=200) except Exception as e: return make_json_response(status=410, success=0, errormsg=e.message)
def as_dict(self): return json.loads(self.json)
def delete(self, gid, sid, did, scid=None): """ This function will delete an existing schema object Args: gid: Server Group ID sid: Server ID did: Database ID scid: Schema ID """ if scid is None: data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) else: data = {'ids': [scid]} for scid in data['ids']: try: # Get name for schema from did SQL = render_template( "/".join([self.template_path, 'sql/get_name.sql']), _=gettext, scid=scid ) status, name = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=name) if name is None: return make_json_response( status=410, success=0, errormsg=gettext( 'Error: Object not found.' ), info=gettext( 'The specified schema could not be found.\n' ) ) # drop schema SQL = render_template( "/".join([self.template_path, self._SQL_PREFIX + self._DELETE_SQL]), _=gettext, name=name, conn=self.conn, cascade=True if self.cmd == 'delete' else False ) status, res = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=res) except Exception as e: current_app.logger.exception(e) return internal_server_error(errormsg=str(e)) return make_json_response( success=1, info=gettext("Schema dropped") )
import simplejson import urllib2 import datetime # Change QUERY to your search term of choice. # Examples: 'newsnight', 'from:bbcnewsnight', 'to:bbcnewsnight' QUERY = ('from:sanderdekker') RESULTS_PER_PAGE = '100' LANGUAGE = 'nl' NUM_PAGES = 50 for page in range(1, NUM_PAGES + 1): base_url = 'http://search.twitter.com/search.json?q=%s&rpp=%s&lang=%s&page=%s' \ % (urllib2.quote(QUERY), RESULTS_PER_PAGE, LANGUAGE, page) try: results_json = simplejson.loads(scraperwiki.scrape(base_url)) for result in results_json['results']: data = {} data['id'] = result['id'] data['text'] = result['text'].replace(""", "'") data['from_user'] = result['from_user'] data['date'] = datetime.datetime.today() print data['from_user'], data['text'] scraperwiki.sqlite.save(["id"], data) except: print 'Oh dear, failed to scrape %s' % base_url ################################################################################### # Twitter scraper - designed to be forked and used for more interesting things ###################################################################################
def f(name): val = formdata.get(name) return val and simplejson.loads(val)
URL = 'https://apiforsaltmaster.org:8443/' session = requests.session() headers = { 'Accept': 'application/json', } sdata = [ ('client', 'local'), ('tgt', '*'), ('fun', 'test.ping'), ] data = [ ('username', USER), ('password', PASSWORD), ('eauth', 'pam'), ] # Authenticate r = session.post(AURL, data, verify=False) #Access API r2 = session.post(URL, headers=headers, data=sdata, verify=False) hdata = json.loads(r2.content) print "Current salt minion status" pprint(hdata)
def create(self, gid, sid, did): """ This function will create a schema object Args: gid: Server Group ID sid: Server ID did: Database ID """ data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) required_args = { 'name': 'Name' } for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=gettext( "Could not find the required parameter ({})." ).format(arg) ) try: self.format_request_acls(data) SQL = render_template( "/".join([self.template_path, self._SQL_PREFIX + self._CREATE_SQL]), data=data, conn=self.conn, _=gettext ) status, res = self.conn.execute_scalar(SQL) if not status: return make_json_response( status=410, success=0, errormsg=res ) # we need oid to to add object in tree at browser, # below sql will gives the same SQL = render_template( "/".join([self.template_path, 'sql/oid.sql']), schema=data['name'], _=gettext ) status, scid = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=scid) icon = 'icon-{0}'.format(self.node_type) return jsonify( node=self.blueprint.generate_browser_node( scid, did, data['name'], icon=icon ) ) except Exception as e: current_app.logger.exception(e) return internal_server_error(errormsg=str(e))
def delete(self, gid, sid, did, fid, fsid, umid=None): """ This function will delete the selected user mapping node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: foreign data wrapper ID fsid: foreign server ID umid: User mapping ID """ if umid is None: data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) else: data = {'ids': [umid]} if self.cmd == 'delete': # This is a cascade operation cascade = True else: cascade = False try: for umid in data['ids']: # Get name of foreign server from fsid sql = render_template("/".join([self.template_path, 'delete.sql']), fsid=fsid, conn=self.conn) status, name = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=name) if name is None: return make_json_response( status=410, success=0, errormsg=gettext( 'Error: Object not found.' ), info=gettext( 'The specified foreign server ' 'could not be found.\n' ) ) sql = render_template("/".join([self.template_path, 'properties.sql']), umid=umid, conn=self.conn) status, res = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res) if not res['rows']: return make_json_response( status=410, success=0, errormsg=gettext( 'The specified user mapping could not be found.\n' ) ) data = res['rows'][0] # drop user mapping sql = render_template("/".join([self.template_path, 'delete.sql']), data=data, name=name, cascade=cascade, conn=self.conn) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) return make_json_response( success=1, info=gettext("User Mapping dropped") ) except Exception as e: return internal_server_error(errormsg=str(e))
def _callJsonRPC(self, auth, callrequests, returnreq=False, notimeout=False): """Calls the Exosite One Platform RPC API. If returnreq is False, result is a tuple with this structure: (success (boolean), response) If returnreq is True, result is a list of tuples with this structure: (request, success, response) notimeout, if true, ignores reuseconnection setting, creating a new connection with no timeout. """ # get full auth (auth could be a CIK str) auth = self._getAuth(auth) jsonreq = {"auth": auth, "calls": callrequests} if self.logrequests: self._loggedrequests.append(jsonreq) body = json.dumps(jsonreq) def handle_request_exception(exception): raise JsonRPCRequestException( "Failed to make http request: %s" % str(exception)) body, response = self.onephttp.request('POST', self.url, body, self.headers, exception_fn=handle_request_exception, notimeout=notimeout) def handle_response_exception(exception): raise JsonRPCResponseException( "Failed to get response for request: %s %s" % (type(exception), str(exception))) try: res = json.loads(body) except: ex = sys.exc_info()[1] raise OnePlatformException( "Exception while parsing JSON response: %s\n%s" % (body, ex)) if isinstance(res, dict) and 'error' in res: raise OnePlatformException(str(res['error'])) if isinstance(res, list): ret = [] for r in res: # first, find the matching request so we can return it # along with the response. request = None for call in callrequests: if call['id'] == r['id']: request = call if 'status' in r: if 'ok' == r['status']: if 'result' in r: ret.append((request, True, r['result'])) else: ret.append((request, True, 'ok')) else: ret.append((request, False, r['status'])) elif 'error' in r: raise OnePlatformException(str(r['error'])) if returnreq: return ret else: # backward compatibility: return just True/False and # 'ok'/result/status as before return ret[0][1:] raise OneException("Unknown error")
import scraperwiki import simplejson import urllib2 import sys # Needs to be in lower case SCREENNAME = 'miartMilano' # API help: https://dev.twitter.com/docs/api/1/get/following/ids url = 'http://api.twitter.com/1/following/ids.json?screen_name=%s' % (urllib2.quote(SCREENNAME)) print url following_json = simplejson.loads(scraperwiki.scrape(url)) print "Found %d following of %s" % (len(following_json), SCREENNAME) print following_json following_json = following_json['ids'] # get earliest following first for batching following_json.reverse() print following_json # Groups a list in chunks of a given size def group(lst, n): for i in range(0, len(lst), n): val = lst[i:i+n] if len(val) == n: yield tuple(val) # Where to start? Overlap one batch to increase hit rate if people unfollow etc. batchdone = scraperwiki.sqlite.get_var('batchdone', 1) batchstart = batchdone - 1 if batchstart < 1: batchstart = 1
import simplejson as json # Arquivo com Tweets tweets_file = open('tweets_teste.txt', "r") #le a linha do arquivo tweet_json = tweets_file.readline() #imprime a linha lida print(tweet_json) #remove espacos em branco strippedJson = tweet_json.strip() #converte uma string json em um objeto python tweet = json.loads(strippedJson) print(tweet['id']) # ID do tweet print(tweet['created_at']) # data de postagem print(tweet['text']) # texto do tweet print(tweet['user']['id']) # id do usuario que postou print(tweet['user']['name']) # nome do usuario print(tweet['user']['screen_name']) # nome da conta do usuario
def indent_json(data): if not json: return data info = json.loads(data) return json.dumps(info, indent=2, sort_keys=True)
def create(self, gid, sid, did, fid, fsid): """ This function will create the user mapping node. Args: gid: Server Group ID sid: Server ID did: Database ID fid: Foreign data wrapper ID fsid: Foreign server ID """ required_args = [ 'name' ] data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=gettext( "Could not find the required parameter ({})." ).format(arg) ) try: sql = render_template("/".join([self.template_path, 'properties.sql']), fserid=fsid, conn=self.conn) status, res1 = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=res1) if len(res1['rows']) == 0: return gone( gettext("The specified user mappings could not be found.")) fdw_data = res1['rows'][0] is_valid_options = False if 'umoptions' in data: is_valid_options, data['umoptions'] = validate_options( data['umoptions'], 'umoption', 'umvalue' ) sql = render_template("/".join([self.template_path, 'create.sql']), data=data, fdwdata=fdw_data, is_valid_options=is_valid_options, conn=self.conn) status, res = self.conn.execute_scalar(sql) if not status: return internal_server_error(errormsg=res) sql = render_template("/".join([self.template_path, 'properties.sql']), fsid=fsid, data=data, conn=self.conn) status, r_set = self.conn.execute_dict(sql) if not status: return internal_server_error(errormsg=r_set) for row in r_set['rows']: return jsonify( node=self.blueprint.generate_browser_node( row['um_oid'], fsid, row['name'], icon='icon-user_mapping' ) ) except Exception as e: return internal_server_error(errormsg=str(e))
def _get_data_from_request(self): return request.form if request.form else json.loads( request.data, encoding='utf-8' )
def rxNorm(ndc): # ndc value coming from master.py # ndc = [array of ndc values] if ndc[0] is None: return {"rxcui": "", "rxtty": "", "rxstring": ""} else: # if internet or request throws an error, print out to check connection and exit try: baseurl = 'http://rxnav.nlm.nih.gov/REST/' # Searching RXNorm API, Search by identifier to find RxNorm concepts # http://rxnav.nlm.nih.gov/REST/rxcui?idtype=NDC&id=0591-2234-10 # Set url parameters for searching RXNorm for SETID ndcSearch = 'rxcui?idtype=NDC&id=' # Search RXNorm API, Return all properties for a concept rxPropSearch = 'rxcui/' rxttySearch = '/property?propName=TTY' rxstringSearch = '/property?propName=RxNorm%20Name' # Request RXNorm API to return json header = {'Accept': 'application/json'} def getTTY(rxCUI): # Search RXNorm again using RXCUI to return RXTTY & RXSTRING getTTY = requests.get(baseurl + rxPropSearch + rxCUI + rxttySearch, headers=header) ttyJSON = json.loads(getTTY.text, encoding="utf-8") return ttyJSON['propConceptGroup']['propConcept'][0][ 'propValue'] def getSTRING(rxCUI): # Search RXNorm again using RXCUI to return RXTTY & RXSTRING getString = requests.get(baseurl + rxPropSearch + rxCUI + rxstringSearch, headers=header) stringJSON = json.loads(getString.text, encoding="utf-8") return stringJSON['propConceptGroup']['propConcept'][0][ 'propValue'] # Search RXNorm using NDC code, return RXCUI id # ndc = [ndc1, ndc2, ... ] for item in ndc: getRXCUI = requests.get(baseurl + ndcSearch + item, headers=header) if getRXCUI.status_code != requests.codes.ok: print "RXNorm server response error. Response code: %s" % getRXCUI.status_code rxcuiJSON = json.loads(getRXCUI.text, encoding="utf-8") # Check if first value in list returns a RXCUI, if not go to next value try: if rxcuiJSON['idGroup']['rxnormId']: rxCUI = rxcuiJSON['idGroup']['rxnormId'][0] rxTTY = getTTY(rxCUI) rxSTRING = getSTRING(rxCUI) return { "rxcui": rxCUI, "rxtty": rxTTY, "rxstring": rxSTRING } except: # if last item return null values if item == ndc[-1]: return {"rxcui": "", "rxtty": "", "rxstring": ""} pass except: sys.exit("RXNorm connection")
def post(self): self._add_CORS_header() try: body = simplejson.loads(self.request.body.decode('utf-8')) if 'script' not in body: self.error_out(400, 'Script is empty.') return # Transforming user script into a proper function. user_code = body['script'] arguments = None arguments_str = '' if 'data' in body: arguments = body['data'] if arguments is not None: if not isinstance(arguments, dict): self.error_out( 400, 'Script parameters need to be ' 'provided as a dictionary.') return else: arguments_expected = [] for i in range(1, len(arguments.keys()) + 1): arguments_expected.append('_arg' + str(i)) if sorted(arguments_expected) == sorted(arguments.keys()): arguments_str = ', ' + ', '.join(arguments.keys()) else: self.error_out( 400, 'Variables names should follow ' 'the format _arg1, _arg2, _argN') return function_to_evaluate = ('def _user_script(tabpy' + arguments_str + '):\n') for u in user_code.splitlines(): function_to_evaluate += ' ' + u + '\n' logger.info("function to evaluate=%s" % function_to_evaluate) result = yield self.call_subprocess(function_to_evaluate, arguments) if result is None: self.error_out(400, 'Error running script. No return value') else: self.write(simplejson.dumps(result)) self.finish() except Exception as e: err_msg = "%s : " % e.__class__.__name__ err_msg += "%s" % str(e) if err_msg != "KeyError : 'response'": err_msg = format_exception(e, 'POST /evaluate') self.error_out(500, 'Error processing script', info=err_msg) else: self.error_out( 404, 'Error processing script', info="The endpoint you're " "trying to query did not respond. Please make sure the " "endpoint exists and the correct set of arguments are " "provided.")
def _media_unique(media_type, dbid): log('Using JSON for retrieving %s info' % media_type) Medialist = [] if media_type == 'tvshow': json_query = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetTVShowDetails", "params": {"properties": ["file", "imdbnumber", "art"], "tvshowid":%s}, "id": 1}' % dbid) json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject['result'].has_key('tvshowdetails'): item = jsonobject['result']['tvshowdetails'] # Search for season information json_query_season = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetSeasons", "params": {"properties": ["season", "art"], "sort": { "method": "label" }, "tvshowid":%s }, "id": 1}' % item.get('tvshowid', '')) jsonobject_season = simplejson.loads(json_query_season) # Get start/end and total seasons if jsonobject_season['result'].has_key('limits'): season_limit = jsonobject_season['result']['limits'] # Get the season numbers seasons_list = [] if jsonobject_season['result'].has_key('seasons'): seasons = jsonobject_season['result']['seasons'] for season in seasons: seasons_list.append(season.get('season')) Medialist.append({ 'id': item.get('imdbnumber', ''), 'dbid': item.get('tvshowid', ''), 'name': item.get('label', ''), 'path': media_path(item.get('file', '')), 'seasontotal': season_limit.get('total', ''), 'seasonstart': season_limit.get('start', ''), 'seasonend': season_limit.get('end', ''), 'seasons': seasons_list, 'art': item.get('art', ''), 'mediatype': media_type }) elif media_type == 'movie': json_query = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMovieDetails", "params": {"properties": ["file", "imdbnumber", "year", "trailer", "streamdetails", "art"], "movieid":%s }, "id": 1}' % dbid) json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject['result'].has_key('moviedetails'): item = jsonobject['result']['moviedetails'] disctype = media_disctype( item.get('file', '').encode('utf-8').lower(), item['streamdetails']['video']) streamdetails = item['streamdetails']['video'] Medialist.append({ 'dbid': item.get('movieid', ''), 'id': item.get('imdbnumber', ''), 'name': item.get('label', ''), 'year': item.get('year', ''), 'file': item.get('file', ''), 'path': media_path(item.get('file', '')), 'trailer': item.get('trailer', ''), 'disctype': disctype, 'art': item.get('art', ''), 'mediatype': media_type }) elif media_type == 'musicvideo': json_query = xbmc.executeJSONRPC( '{"jsonrpc": "2.0", "method": "VideoLibrary.GetMusicVideoDetails", "params": {"properties": ["file", "artist", "album", "track", "runtime", "year", "genre", "art"], "movieid":%s }, "id": 1}' % dbid) json_query = unicode(json_query, 'utf-8', errors='ignore') jsonobject = simplejson.loads(json_query) if jsonobject['result'].has_key('musicvideodetails'): item = jsonobject['result']['musicvideodetails'] Medialist.append({ 'dbid': item.get('musicvideoid', ''), 'id': '', 'name': item.get('label', ''), 'artist': item.get('artist', ''), 'album': item.get('album', ''), 'track': item.get('track', ''), 'runtime': item.get('runtime', ''), 'year': item.get('year', ''), 'path': media_path(item.get('file', '')), 'art': item.get('art', ''), 'mediatype': media_type }) else: log('No JSON results found') return Medialist
def testUsers(self): ret = self._get('/users') self.assertEqual(ret.status_code, 200) data = json.loads(ret.data) data['users'] = set(data['users']) self.assertEqual(data, dict(users=set(['bill', 'billy', 'bob', 'ashanti', 'mary'])))
def create(self, gid, sid): """Create the database.""" required_args = [ 'name' ] data = request.form if request.form else json.loads( request.data, encoding='utf-8' ) for arg in required_args: if arg not in data: return make_json_response( status=410, success=0, errormsg=_( "Could not find the required parameter ({})." ).format(arg) ) # The below SQL will execute CREATE DDL only SQL = render_template( "/".join([self.template_path, self._CREATE_SQL]), data=data, conn=self.conn ) status, msg = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=msg) if 'datacl' in data: data['datacl'] = parse_priv_to_db(data['datacl'], 'DATABASE') # The below SQL will execute rest DMLs because we cannot execute # CREATE with any other SQL = render_template( "/".join([self.template_path, self._GRANT_SQL]), data=data, conn=self.conn ) SQL = SQL.strip('\n').strip(' ') if SQL and SQL != "": status, msg = self.conn.execute_scalar(SQL) if not status: return internal_server_error(errormsg=msg) # We need oid of newly created database SQL = render_template( "/".join([self.template_path, self._PROPERTIES_SQL]), name=data['name'], conn=self.conn, last_system_oid=0, show_system_objects=self.blueprint.show_system_objects, ) SQL = SQL.strip('\n').strip(' ') if SQL and SQL != "": status, res = self.conn.execute_dict(SQL) if not status: return internal_server_error(errormsg=res) response = res['rows'][0] # Add database entry into database table with schema_restrictions. database = Database(id=response['did'], server=sid, schema_res=','.join(data['schema_res'])) db.session.add(database) db.session.commit() return jsonify( node=self.blueprint.generate_browser_node( response['did'], sid, response['name'], icon="icon-database-not-connected", connected=False, tablespace=response['default_tablespace'], allowConn=True, canCreate=response['cancreate'], canDisconn=True, canDrop=True ) )
def testGetRoles(self): ret = self._get("/users/bill/roles") self.assertStatusCode(ret, 200) got = json.loads(ret.data)["roles"] self.assertEquals(got, [{"role": "qa", "data_version": 1}, {"role": "releng", "data_version": 1}])
def test_parse(self): # test in/out equivalence and parsing res = json.loads(JSON) out = json.dumps(res) self.assertEqual(res, json.loads(out))
v_top = heapq.heappop(priority_queue) v = v_top[1] path = [] while True: path.append(v.vertex) v = v.previous_vertex if v is None: break path.reverse() return path print('Initializing map...') map_file = open('testmap2.json') map_json = json.loads(map_file.read()) vertex_list = [] for v in map_json['vertices']: vertex_list.append( Vertex(v['id'], (v['coordinates']['x'], v['coordinates']['y']))) test_course = Graph(vertex_list) for v in map_json['vertices']: current_v = test_course.search(v['id']) if 'directed' in v: directed = True directed_index = v['directed']['index'] previous_vertex = test_course.search(v['directed']['previous'])
def testGetAllRoles(self): ret = self._get("/users/roles") self.assertStatusCode(ret, 200) got = json.loads(ret.data)["roles"] self.assertEqual(got, ['releng', 'qa', 'relman'])
def importHosts(self, content): u"""导入""" try: data = json.loads(content) except Exception: wx.MessageBox(u"档案解析出错了!", caption=u"导入失败") return if type(data) != dict: wx.MessageBox(u"档案格式有误!", caption=u"导入失败") return configs = data.get("configs") hosts_files = data.get("hosts_files") if type(configs) != dict or type(hosts_files) not in (list, tuple): wx.MessageBox(u"档案数据有误!", caption=u"导入失败") return # 删除现有 hosts 文件 current_files = glob.glob(os.path.join(self.hosts_path, "*.hosts")) for fn in current_files: try: os.remove(fn) except Exception: wx.MessageBox(u"删除 '%s' 时失败!\n\n%s" % (fn, traceback.format_exc()), caption=u"导入失败") return # 写入新 hosts 文件 for hf in hosts_files: if type(hf) != dict or "filename" not in hf or "content" not in hf: continue fn = hf["filename"].strip() if not fn or not fn.lower().endswith(".hosts"): continue try: self.writeFile(os.path.join(self.hosts_path, fn), hf["content"].strip().encode("utf-8")) except Exception: wx.MessageBox(u"写入 '%s' 时失败!\n\n%s" % (fn, traceback.format_exc()), caption=u"导入失败") return # 更新 configs # self.configs = {} try: self.writeFile(self.configs_path, json.dumps(configs).encode("utf-8")) except Exception: wx.MessageBox(u"写入 '%s' 时失败!\n\n%s" % (self.configs_path, traceback.format_exc()), caption=u"导入失败") return # self.clearTree() # self.init2() wx.MessageBox(u"导入成功!") self.restart()
def testPermissionGet(self): ret = self._get('/users/bill/permissions/admin') self.assertEqual(ret.status_code, 200) self.assertEqual(json.loads(ret.data), dict(options=None, data_version=1))