def _convert_from_v1(self): """Converts the JSON dict from v1 to the current version :raises :class:`recipe.definition.exceptions.InvalidDefinition`: If the given definition is invalid """ v1_json_dict = RecipeDefinitionV1(self._definition).get_dict() # A database query here is necessary to retrieve the latest revisions for each job type (the v1 recipe # definition did not store job type revision) revisions = {} from job.models import JobType for job_type in JobType.objects.all().only('name', 'version', 'revision_num'): revisions[job_type.name + job_type.version] = job_type.revision_num # Convert input parameters files = [] json = [] for input_data_dict in v1_json_dict['input_data']: name = input_data_dict['name'] if input_data_dict['type'] in ['file', 'files']: file_input_dict = {'name': name, 'required': input_data_dict['required'], 'multiple': input_data_dict['type'] == 'files'} if 'media_types' in input_data_dict: file_input_dict['media_types'] = input_data_dict['media_types'] files.append(file_input_dict) elif input_data_dict['type'] == 'property': json.append({'name': name, 'type': 'string', 'required': input_data_dict['required']}) del v1_json_dict['input_data'] v1_json_dict['input'] = {'files': files, 'json': json} # Convert jobs nodes = {} for job_dict in v1_json_dict['jobs']: dependencies = [] node_input = {} for dependency_dict in job_dict['dependencies']: d_name = dependency_dict['name'] dependencies.append({'name': d_name}) for conn_dict in dependency_dict['connections']: node_input[conn_dict['input']] = {'type': 'dependency', 'node': d_name, 'output': conn_dict['output']} for recipe_input_dict in job_dict['recipe_inputs']: node_input[recipe_input_dict['job_input']] = {'type': 'recipe', 'input': recipe_input_dict['recipe_input']} jt_name = job_dict['job_type']['name'] version = job_dict['job_type']['version'] revision = revisions[jt_name + version] node_type_dict = {'node_type': 'job', 'job_type_name': jt_name, 'job_type_version': version, 'job_type_revision': revision} nodes[job_dict['name']] = {'dependencies': dependencies, 'input': node_input, 'node_type': node_type_dict} del v1_json_dict['jobs'] v1_json_dict['nodes'] = nodes # Update version if 'version' in v1_json_dict: del v1_json_dict['version'] v1_json_dict['version'] = SCHEMA_VERSION self._definition = v1_json_dict
def generate_dict(feature_labels, score_labels, standardized, non_standardized, title=''): values_keys = list(score_labels) values_keys.insert(0, 'name') json = [] values = [[0 for x in range(len(score_labels))] for y in range(len(feature_labels))] for k in range(0, len(feature_labels), 1): json_element = {} json_element['name'] = feature_labels[k] for i in range(0, len(standardized), 1): data = standardized[i].tolist() values[k][i] = data[k] json_element[score_labels[i]] = non_standardized[i][k] json.append(json_element) result_dict = {} result_dict['scoreLabels'] = score_labels result_dict['featureLabels'] = feature_labels result_dict['title'] = title result_dict['values'] = values result_dict['json'] = json return result_dict
def CheckUpdate(): json = [] notinstall = Mongo.coll['Update'].find({'isInstall': 0}).sort('unicode', -1) for _ in notinstall: json.append({'unicode': _['unicode'], 'name': _['name'], 'info': _['info'], 'time': _['pushtime'], 'author': _['author']}) return dumps(json)
def list_recents(request): results = utils.query(""" PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX dct: <http://purl.org/dc/terms/> PREFIX dn: <http://melodi.irit.fr/ontologies/dn/> select * where { ?dn a dn:Dataset. ?dn dct:issued ?date. ?dn dct:title ?title. ?dn dct:description ?desc. ?dn dn:hasSubject ?subj. ?subj dn:name ?subj_name. } order by DESC(?date) limit 6 """) json = [] for result in results["results"]["bindings"]: json.append({ 'uri': result["dn"]["value"], 'title': result["title"]["value"], 'description': result["desc"]["value"], 'issued': result["date"]["value"], 'subject': result["subj_name"]["value"] }) return JsonResponse({'rs': json}, safe=False)
def get_distributions(request): results = utils.query("""PREFIX dn: <http://melodi.irit.fr/ontologies/dn/> PREFIX dcat: <http://www.w3.org/ns/dcat#> PREFIX dct: <http://purl.org/dc/terms/> select ?uri ?format ?download ?size where {{ <{}> dcat:distribution ?uri. ?uri dn:hasFormat ?fm. ?uri dcat:byteSize ?size. ?uri dcat:downloadURL ?download. ?fm rdfs:label ?format. }}""".format(request.GET.get('uri'))) json = [] for result in results["results"]["bindings"]: json.append({ 'uri': result["uri"]["value"], 'label': result["uri"]["value"][result["uri"]["value"].rindex('/') + 1:] + ", format " + result["format"]["value"] + ", size " + result["size"]["value"] + " bytes", 'download': result["download"]["value"], 'size': result["size"]["value"], 'format': result["format"]["value"] }) return JsonResponse({'rs': json}, safe=False)
def get_dataset(request): if request.GET.get("search") == "title": filter = 'FILTER regex(str(?title), "' + request.GET.get( 'value') + '", "i")' elif request.GET.get("search") == "keyword": filter = 'FILTER regex(str(?keyword), "' + request.GET.get( 'value') + '" , "i")' results = utils.query("""PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX dct: <http://purl.org/dc/terms/> PREFIX dcat: <http://www.w3.org/ns/dcat#> PREFIX dn: <http://melodi.irit.fr/ontologies/dn/> select distinct ?dn ?title ?description ?issued ?subject where {{ ?dn a dn:Dataset. ?dn dct:issued ?issued. ?dn dct:title ?title. ?dn dct:description ?description. ?dn dn:hasSubject ?subj. ?subj dn:name ?subject. ?dn dcat:keyword ?keyword. {} }} """.format(filter)) json = [] for result in results["results"]["bindings"]: json.append({ 'uri': result["dn"]["value"], 'title': result["title"]["value"], 'description': result["description"]["value"], 'issued': result["issued"]["value"], 'subject': result["subject"]["value"] }) return JsonResponse({'rs': json}, safe=False)
def reduceTree (user): json = [] sponser = db.users.find_one({'customer_id': user['p_node']}) if sponser is not None: user_sponser = sponser['username'] else: user_sponser = "administrator" levelss = user['level'] tree = { "id":user['customer_id'], "text":str(user['username'].encode('utf-8')), "username":str(user['username'].encode('utf-8')), "email":user['email'], "date_added":format_date(user['creation']), "level":levelss, "leftPD":format_usd(user['total_pd_left']), "rightPD":format_usd(user['total_pd_right']), "totalPD":format_usd(user['investment']), "sponsor":user_sponser, "empty":False, "iconCls":"level2", "fl":1, 'children' : [] } json.append(tree) children_tree(tree) return json
def produce_json(response): json = [] for (line, description, details) in parse_raw_state(response): json.append({'line': str_to_enum(line), 'state':str_to_enum(description), 'details': details}) return json
def processCSV(file, datasetName, encoding="utf-8"): try: dataset = pd.read_csv(file, keep_default_na=False, low_memory=False, encoding=encoding, sep=CSV_DELIMITER, error_bad_lines=False, warn_bad_lines=False, dtype=object) except: encodingAux = "latin-1" if (encodingAux != encoding): # try to parse file with another encoding processCSV(file, datasetName, encoding=encodingAux) else: printMessage("Error parseando el archivo", True) return json = [] headers = list(dataset) for index, row in dataset.iterrows(): feature = {} for header in headers: feature[header] = row[header] json.append(feature) processJSON(None, datasetName, data=json)
def allcourses(request): j = Course.objects.all() json = [] for i in j: json.append(i.serialize()) # print json return HttpResponse(json)
def get_data(spreadsheetId): credentials = get_credentials() http = credentials.authorize(httplib2.Http()) discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?' 'version=v4') service = discovery.build('sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl, cache_discovery=False) print(spreadsheetId) rangeName = 'A1:ZZZ' result = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName, dateTimeRenderOption='FORMATTED_STRING', majorDimension='ROWS').execute() values = result.get('values', []) header_row = values[0] json = [] if not values: print('No data found.') else: for counter, row in enumerate(values): if counter != 0: record = {} for column_id, value in enumerate(row): record[header_row[column_id]] = row[column_id] json.append(record) return (json)
def createTestMetrics(cls, metric_class): parameters = {'container': 1} if issubclass(metric_class, ContainerMetric): pass elif issubclass(metric_class, DomainMetric): parameters['domain'] = 1 else: raise TypeError('Cannot handle {class_name} class'.format( class_name=metric_class.__name__)) start = date(2010, 1, 1) end = date(2010, 12, 31) cls.test_metrics = [] for day in daterange(start, end): json = [] for i in xrange(5): json.append([random.randint(1, 100), random.randint(1, 100)]) parameters['day'] = day.day parameters['month'] = day.month parameters['year'] = day.year parameters['json'] = json test_metric = metric_class(**parameters) test_metric.save() cls.test_metrics.append(test_metric)
def copy_design(request, uid): try: design_obj = get_feature_by_uid(uid) except Feature.DoesNotExist: raise Http404 #check permissions viewable, response = design_obj.is_viewable(request.user) if not viewable: return response design_obj.pk = None design_obj.user = request.user design_obj.save() json = [] json.append({ 'id': design_obj.id, 'uid': design_obj.uid, 'name': design_obj.name, 'description': design_obj.description, 'attributes': design_obj.serialize_attributes() }) return HttpResponse(dumps(json), status=200)
def get_scenarios(request): json = [] scenarios = Scenario.objects.filter(user=request.user, active=True).order_by('date_created') for scenario in scenarios: # Allow for "sharing groups" without an associated MapGroup, for "special" cases sharing_groups = [group.mapgroup_set.get().name for group in scenario.sharing_groups.all() if group.mapgroup_set.exists()] json.append({ 'id': scenario.id, 'uid': scenario.uid, 'name': scenario.name, 'description': scenario.description, 'attributes': scenario.serialize_attributes(), 'sharing_groups': sharing_groups }) shared_scenarios = Scenario.objects.shared_with_user(request.user) for scenario in shared_scenarios: if scenario.active and scenario not in scenarios: username = scenario.user.username actual_name = scenario.user.first_name + ' ' + scenario.user.last_name json.append({ 'id': scenario.id, 'uid': scenario.uid, 'name': scenario.name, 'description': scenario.description, 'attributes': scenario.serialize_attributes(), 'shared': True, 'shared_by_username': username, 'shared_by_name': actual_name }) return HttpResponse(dumps(json))
def get_selections(request): json = [] selections = LeaseBlockSelection.objects.filter(user=request.user).order_by('date_created') for selection in selections: sharing_groups = [group.mapgroup_set.get().name for group in selection.sharing_groups.all() if group.mapgroup_set.exists()] json.append({ 'id': selection.id, 'uid': selection.uid, 'name': selection.name, 'description': selection.description, 'attributes': selection.serialize_attributes(), 'sharing_groups': sharing_groups }) shared_selections = LeaseBlockSelection.objects.shared_with_user(request.user) for selection in shared_selections: if selection not in selections: username = selection.user.username actual_name = selection.user.first_name + ' ' + selection.user.last_name json.append({ 'id': selection.id, 'uid': selection.uid, 'name': selection.name, 'description': selection.description, 'attributes': selection.serialize_attributes(), 'shared': True, 'shared_by_username': username, 'shared_by_name': actual_name }) return HttpResponse(dumps(json))
def get_leaseblocks(request): json = [] leaseblocks = LeaseBlock.objects.filter(avg_depth__lt=0.0, min_wind_speed_rev__isnull=False) for ocs_block in leaseblocks: json.append({ 'id': ocs_block.id, #'ais_density': ocs_block.ais_density, #'ais_min_density': ocs_block.ais_min_density, #'ais_max_density': ocs_block.ais_max_density, 'ais_mean_density': ocs_block.ais_all_vessels_maj, #'min_distance': ocs_block.min_distance, #'max_distance': ocs_block.max_distance, 'avg_distance': ocs_block.avg_distance, 'awc_min_distance': ocs_block.awc_min_distance, 'substation_min_distance': ocs_block.substation_min_distance, #'awc_max_distance': ocs_block.awc_max_distance, #'awc_avg_distance': ocs_block.awc_avg_distance, 'avg_depth': -ocs_block.avg_depth, #'min_depth': meters_to_feet(-ocs_block.min_depth, 1), #'max_depth': meters_to_feet(-ocs_block.max_depth, 1), 'min_wind_speed': ocs_block.min_wind_speed_rev, #'max_wind_speed': ocs_block.max_wind_speed_rev, 'tsz_min_distance': ocs_block.tsz_min_distance, #'tsz_max_distance': ocs_block.tsz_max_distance, #'tsz_mean_distance': ocs_block.tsz_mean_distance, #'wea_name': ocs_block.wea_name, #'wea_number': ocs_block.wea_number, #'wea_state_name': ocs_block.wea_state_name 'uxo': ocs_block.uxo }) return HttpResponse(dumps(json))
def invitations(request, app_id): user_id = long(request.user.id) # get all invitations sent by user {{user_id}} if request.method == 'GET': invitations = list(InvitationKeys.objects.filter(inviter_id=user_id)) json = [] for i in invitations: json.append({"invitee": i.invitee, "date": str(i.date.date()), "accepted": i.accepted}) return JsonResponse(json) # send an invitation from {{user_id}} to a friend else: user = get_object_or_404(User, pk=user_id) app = get_object_or_404(App, pk=long(app_id)) if user.first_name != "": user_name = user.first_name if user.last_name is not "": user_name = user_name + " " + user.last_name else: user_name = user.username name = request.POST['name'] email = request.POST['email'] subject = "%s has invited you to check out Appcubator!" % user_name invitation = InvitationKeys.create_invitation(request.user, email) message = ('Dear {name},\n\n' 'Check out what I\'ve build using Appcubator:\n\n' '<a href="{url}">{hostname}</a>\n\n' 'Appcubator is the only visual web editor that can build rich web applications without hiring a developer or knowing how to code. It is also free to build an app, forever.\n' 'You can signup here: <a href="http://appcubator.com/signup?k={invitation_key}">Appcubator Signup</a>\n\n' 'Best,\n{user_name}\n\n\n') message = message.format(name=name, url=app.url(), hostname=app.hostname(), user_name=user_name, invitation_key=invitation.api_key) template_context = { "text": message } send_template_email(request.user.email, email, subject, "", "emails/base_boxed_basic_query.html", template_context) return HttpResponse(message)
def paper_json(): json = [] for v in site_data["papers"]: json.append(format_paper(v)) for v in site_data["open_problems"]: json.append(format_open_problem(v)) return jsonify(json)
def downloading(): table_apks = TableApk.query.filter_by(downloaded_flag=False).all() if table_apks is None: return jsonify(error=1, apks=[]) json = [] for apk in table_apks: json.append({'app_id': apk.app_id}) return jsonify(error=0, apks=json)
def get_locations(): locations = Location.select() json = [] for location in locations: json.append(location.to_dict()) return flask.json.dumps(json)
def retail_search_facility(request): json = [] if request.method == 'GET': name = request.GET['name'] retailers = retail_sales.objects.filter(name__icontains=name) for retailer in retailers: json.append({'name': retailer.name}) return JsonResponse(json, safe=False)
def get(self, *args, **kwargs): print(self.request.user) friends = Friend.objects.filter(account=self.request.user) json = [] for friend in friends: json.append(friend.friend) return Response(data=json, status=status.HTTP_200_OK)
def get_json_from_csv_file(file_name, fieldnames=('ts', 'pending_txs_found')): import csv json = [] csvfile = open(file_name, 'r') reader = csv.DictReader(csvfile, fieldnames) for row in reader: json.append(dict(row)) return json
def articles_all_json(): e = es.search(index="dilemma", doc_type="articles", size=100, body={"query": {"match_all": {}}}) results = _render_hits(e["hits"]["hits"]) json = [] for result in results: json.append(OrderedDict(sorted(result.items(), key=lambda t: t[0]))) return json
def _get_movi_cidade(self): cidade = self.request.GET['cidade'] movi_data = Movi.objects.all().filter(cidade=cidade) json = [] for m in movi_data: json.append(m.to_json()) return JsonResponse(json, safe=False)
def generation_json(result_comparation): json = [] for face in result_comparation: if (len(face.get('FaceMatches'))) >= 1: profile = dict( name=face['FaceMatches'][0]['Face']['ExternalImageId'], faceMatch=round(face['FaceMatches'][0]['Similarity'], 2)) json.append(profile) return json
def getGasto(ano): connection = engine.connect() result = connection.execute('select sum(valor) from gastos where anoEmissao =' + ano) json = [] for row in result: json.append(row[0]) connection.close() return jsonify(json)
def getTasks(request): print('================get task================') print(request.GET) page_number = request.GET.get('page') sort_key = request.GET.get('sortKey') sort_way = request.GET.get('sortWay') taskList = None if sort_key == 'status': sort_key = 'is_cofirmed' if sort_way == 'dec': sort_key = '-' + sort_key taskList = Task.objects.order_by(sort_key) paginator = Paginator(taskList, 3) if page_number == None: page_number = 1 page_obj = paginator.get_page(page_number) next_page = None prev_page = None if page_obj.has_next(): next_page = page_obj.next_page_number() if page_obj.has_previous(): prev_page = page_obj.previous_page_number() json = [] for task in page_obj.object_list: json.append({ 'taskid': task.id, 'title': task.title, 'user': task.user, 'email': task.email, 'description': task.description, 'is_cofirmed': task.is_cofirmed, }) results = { 'taskList': json, 'next': next_page, 'prev': prev_page, 'page': page_number, } if request.session.has_key('username'): auth_user = request.session['username'] user = User.objects.get(name=auth_user) results['user'] = user.name results['is_authorized'] = True return JsonResponse(results, content_type='application/json')
def get_colleges_for_dashboard(query_lst,headers_dashboard): print("HERE") if len(query_lst) is 0 or query_lst is None: return [] cols = ','.join(headers_dashboard) #initial query formation query = "SELECT " + cols + " FROM " + os.environ.get("TABLE_NAME") if len(query_lst) > 0: #use IN (...) query structure for efficiency query += " WHERE college_name IN (" for i in range(0, len(query_lst), 2): if query_lst[i+1].find("'") is not -1: #replace every ' with another ' immediately after query_lst[i+1] = query_lst[i+1][:query_lst[i+1].find("\'")] + "\'" + query_lst[i+1][query_lst[i+1].find("\'"):] if query_lst[i] == "college_name": #put string in single quotes query += "\'" + query_lst[i + 1] + "\'" #end of college name list, close parens if i+3 >= len(query_lst): query += ")" else: #still more colleges, keep (...,...) structure in query query += "," else: return "Incorrect Usage -- Not all parameters are college names" elif len(query_lst) <= 0: return "Incorrect Usage" query += ";" # print(query) #only execute query if it passes the screening if query_screen(query_lst): results = get_query(query) else: results = [] toBeSorted = [] # convert to college object for element in results: c = College(element) toBeSorted.append(c) #sort alphabetically in-place mergeSort_alphabetical(toBeSorted,headers_dashboard) json = [] for college in toBeSorted: json.append(college.get_json(headers_dashboard)) #return json result return json
def convertir(ruta): with open(ruta) as f_csv: csv = DictReader(f_csv) json = [] for row in csv: for k, v in row.items(): if k.endswith("_i"): row[k] = int(v) json.append(row) return json
def addJob(json, name): found = False for job in json: if job["name"] == name: minutes = job["minutes"] minutes += 1 job["minutes"] = minutes found = True if not found: json.append({"name": name, "minutes": 1})
def object_to_json(model, ignore=None): if ignore is None: ignore = [] if type(model) in [QuerySet, list]: json = [] for element in model: json.append(single_object_to_json(element, ignore)) return json else: return single_object_to_json(model, ignore)
def JSONifyData(data): json = [] for ts in data: json.append({ 'name': ts.ptchNm, 'app': ts.appNm, 'updateMech': ts.upM, 'exploitable': ts.expFlag }) return json
def get_from_ceilandia(): collection = get_intercampi_collection() json = [] for y in collection.find(): if (y['origem'] == "Ceilândia"): del y['_id'] json.append(y) return jsonify(json)
def get_from_planaltina(): collection = get_intercampi_collection() json = [] for y in collection.find(): if (y['origem'] == "Planaltina"): del y['_id'] json.append(y) return jsonify(json)
def allCategoriesJson(): categories = session.query(Category).all() json = [] for category in categories: new_tupple = { 'category': category.name, 'id': category.id } json.append(new_tupple) return(jsonify(data=json))
def parse_task_as_list(tasks: list): """ Accepts a list of tasks and then translates it to a list of task objects :param tasks: tasks to transform :return: list of task objects """ json = [] for task in tasks: json.append(return_task_json(task)) return json
def get_from_darcy(): collection = get_intercampi_collection() json = [] for y in collection.find(): if (y['origem'] == "Darcy Ribeiro"): del y['_id'] json.append(y) return jsonify(json)
def add_records_to_index(index_name, doc_type, table, prop_str): query = "SELECT " + prop_str + " FROM " + table rows = session.execute(query) count = 0 json = [] for row in rows: count += 1 json.append(get_record_json_from_row(row, table, index_name, doc_type)) helpers.bulk(es, json) print count
def get(self): blogs = db.GqlQuery("SELECT * from Blog order by created desc limit 10") json = [] for p in blogs: time_fmt = '%c' d = {'subject': p.subject,'content': p.content,'created': p.created.strftime(time_fmt), 'last_modified': p.last_modified.strftime(time_fmt)} json.append(d) self.render_json(json)
def show_result(request): json= [] data = models.Save_sql_check.objects.filter(username_id=request.user.id) for i in range(len(data)): json_data = {} json_data['url'] = data[i].sql_url json_data['msg1'] = data[i].msg1 json_data['msg2'] = data[i].msg2 json.append(json_data) return render(request, 'sqlcheck/check_result.html', locals())
def get_location_reviews(id): try: location = Location.get(Location.id == id) reviews = location.reviews json = [] for review in reviews: json.append(review.to_dict()) return flask.json.dumps(json) except DoesNotExist: return flask.Response(status=404)
def get_location_listings(id): try: location = Location.get(Location.id == id) listings = location.listings json = [] for listing in listings: json.append(listing.to_dict()) return flask.json.dumps(json) except DoesNotExist: return flask.Response(status=404)
def output_state(state, path): for type in state['types']: json = [] for key in state[type]: message = message_to_json(state[type][key]) json.append(message) out = '\n'.join(json) outpath = 'tcga.' + type + '.json' outhandle = open(os.path.join(path, outpath), 'w') outhandle.write(out) outhandle.close()
def output_state(state, prefix): for type in state['types']: json = [] for key in state[type]: message = message_to_json(state[type][key]) json.append(message) out = '\n'.join(json) outhandle = open(prefix + '.' + type + '.json', 'w') outhandle.write(out) outhandle.close()
def object_to_json(model, ignore=None): """ Returns a JSON representation of an object. """ if ignore is None: ignore = [] if type(model) in [QuerySet, list]: json = [] for element in model: json.append(_django_single_object_to_json(element, ignore)) return json else: return _django_single_object_to_json(model, ignore)
def categoryItemsJson(category_id): items = session.query(Item).filter_by(category_id=category_id).all() json = [] for item in items: new_tupple = { 'item': item.name, 'id': item.id, 'category': item.category.name, 'category_id': item.category_id } json.append(new_tupple) return(jsonify(data=json))
def all_actuators(): json = [] cursor = mysql.connection.cursor() dict_cursor = mysql.connection.cursor(cursorclass=DictCursor) dict_cursor.execute('''SELECT * FROM `t_actuator_info`''') ai = dict_cursor.fetchall() for at_item in ai: json.append(actuator_parser(at_item, dict_cursor)) dict_cursor.close() return jsonify(all=json)
def all(): json = [] cursor = mysql.connection.cursor() dict_cursor = mysql.connection.cursor(cursorclass=DictCursor) dict_cursor.execute('''SELECT * FROM `t_sensor_info`''') si = dict_cursor.fetchall() values_si = [] for s_item in si: json.append(sensor_parser(s_item, dict_cursor)) dict_cursor.close() return jsonify(all=json)
def get_file_data(limit=50, offset=0): files = [] if current_user.is_authenticated: query = db_session.query(File) else: query = db_session.query(File).filter_by(access='public') files = query.order_by(File.id.desc()).limit(limit).offset(offset).all() json = [] handlers = [] for record in files: json.append(file_json(record)) handlers.append(get_handler_instance(record)) return (json, handlers)
def get_round_json(filename): json = [] path = get_pdf_path(filename) pdf_content = get_pdf_as_string(path) pdf_content = cleanse(pdf_content) questions = get_questions(pdf_content) i = 0 for question in questions: # print "working on question {0}".format(i) try: question_json = question_to_json(question) json.append(question_json) except: "Poorly Formated Question... Skipping..." i+=1 return json
def get_json_from_feed(user_feed): """Constructs and returns a JSON object from the given feed object Args: user_feed: A gdata.apps.UserFeed object Returns: A JSON object containing the first and last names of the domain users """ json = [] for entry in user_feed.entry: json.append({'given_name': entry.name.given_name, 'family_name': entry.name.family_name, 'username': entry.login.user_name, 'admin': entry.login.admin }) return simplejson.dumps(json)
def add(): global json addjson = {} if request.method == 'POST': # print request.form addjson['Pulsar'] = request.form['Pulsar'] addjson['TOAs'] = request.form['TOAs'] addjson['Raw Profiles'] = request.form['Raw Profiles'] addjson['Period'] = request.form['Period'] addjson['Period Derivative'] = request.form['Period Derivative'] addjson['DM'] = request.form['DM'] addjson['RMS'] = request.form['RMS'] addjson['Binary'] = request.form['Binary'] json.append(addjson) print json return redirect('/') else: return 'error'
def getRecentEvent(old_cfp): conferences = [ ["ISCA", "http://www.wikicfp.com/cfp/program?id=1683&s=ISCA&f=International%20Symposium%20on%20Computer%20Architecture"], ["ASPLOS", "http://www.wikicfp.com/cfp/program?id=242&s=ASPLOS&f=Architectural%20Support%20for%20Programming%20Languages%20and%20Operating%20Systems"], ["MICRO", "http://www.wikicfp.com/cfp/program?id=2052&s=MICRO&f=International%20Symposium%20on%20Microarchitecture"], ["HPCA", "http://www.wikicfp.com/cfp/program?id=1220&s=HPCA&f=High-Performance%20Computer%20Architecture"], ["CGO", "http://www.wikicfp.com/cfp/program?id=429&s=CGO&f=Symposium%20on%20Code%20Generation%20and%20Optimization"], ["PLDI", "http://www.wikicfp.com/cfp/program?id=2369&s=PLDI&f=Programming%20Language%20Design%20and%20Implementation"], ["LCTES", "http://www.wikicfp.com/cfp/program?id=1950&s=LCTES&f=Languages,%20Compilers,%20and%20Tools%20for%20Embedded%20Systems"], ["PACT", "http://www.wikicfp.com/cfp/program?id=2291&s=PACT&f=International%20Conference%20on%20Parallel%20Architectures%20and%20Compilation%20Techniques"], ["ISMM", "http://www.wikicfp.com/cfp/program?id=1730&s=ISMM&f=International%20Symposium%20on%20Memory%20Management"], ] json = [] for i, conf in enumerate(conferences): json.append(getWikiCfp(conf[1], old_cfp[i][0])) json.sort(compare) return json
def action_all(): start = time.time() actions = action.all() print "Parsing {} actions".format(len(actions)) json = [] for a in actions: j = { "id": a[0], "timestamp": a[1], "action": a[2], "category": a[3], "content": a[4] } json.append(j) return jsonify({ "count":len(json), "results":json, "time": int(time.time()- start) })
def readings_last(): start = time.time() json = [] for device in readings.devices: read = readings.last(device) timestamp = read[1] value = read[2] json.append({ "id": read[0], "time": read[1], "value": read[2], "device": device }) return jsonify({ "results":json, "time": int(time.time()- start) })
def readings_all(): limit = request.args.get('l') start = time.time() read = readings.all(limit) print "Parsing {} actions".format(len(read)) json = [] for a in read: j = { "id": a[0], "timestamp": a[1], "value": a[2], "device": a[3] } json.append(j) return jsonify({ "count":len(json), "results":json, "time": int(time.time()- start) })