Пример #1
0
def sample(request, sample_id):

	api = MetPet("*****@*****.**","24809ab2c593b544a491748094ed10d3cbffc699")
	sampleobj = api.getSample(sample_id).data
	# subsampleobj = api.getSubSample(sample_id).data
	sampleuser = api.getUserByURI(sampleobj['user']).data
	location = sampleobj['location']
	location = location.split(" ")
	longtitude = location[1].replace("(","")
	latitude = location[2].replace(")","")
	loc = [longtitude, latitude]
	subsamplelist = []
	filters = {"sample__sample_id": sampleobj["sample_id"], "limit": "0"}
	data = api.searchSubsamples(filters)
	for subsample in data.data['objects']:
		# print sample['sample_id']
		subsamplelist.append([subsample['subsample_id'],subsample['name'],
							  subsample['public_data']])

	print latitude
	print longtitude
	if sampleobj:
		print sampleobj
		# return render(request, 'sampleview.html',{'sample':sampleobj, 'subsamples':subsamples.attributes['*'],})
		return render(request, 'sampleview.html',{'sample':sampleobj, 
			'subsamples': subsamplelist,'user':sampleuser, 
			'location': loc})
	else:
		return HttpResponse("Sample does not Exist")
Пример #2
0
def prevsamplelist(request, pagenum=1):
	api = MetPet("*****@*****.**",
		 		 "24809ab2c593b544a491748094ed10d3cbffc699")
	user = "******"
	api_key = "24809ab2c593b544a491748094ed10d3cbffc699"
	if pagenum > 1:
		pagenum -= 20
		data = api.getAllSamples(pagenum, user, api_key)
	else:
		data = api.getAllSamples()
	nextlist = data.data['meta']['next']
	samplelist =[]
	offsets = []
	total_count = data.data['meta']['total_count']
	# print offset
	print dir(samplelist)
	for sample in data.data['objects']:
		print sample['sample_id']
		samplelist.append([sample['sample_id'],sample['number']] )
	#CREATE PAGINATION
	if total_count > 20:
		pages = total_count / 20
	for x in range(0,pages):
		offsets.append(x*20)
	pagenum = int(nextlist.split('=')[-1])
	pageprev = pagenum - 20

	return render(request,'samplelist.html', {'samples':samplelist,
	 			  'nextURL': nextlist, 'total': total_count,
	 			  'offsets': offsets, 'pagenum':pagenum, 'pageprev': pageprev})
Пример #3
0
def subsamples(request):
    email = request.COOKIES.get('email', None)
    api_key = request.COOKIES.get('api_key', None)
    api = MetPet(email, api_key).api
    data = api.getAllSubSamples()
    subsamplelist =[]
    for subsample in data.data['objects']:
        subsamplelist.append([subsample['subsample_id'],subsample['name']] )
    return render(request,'subsamples.html', {'subsamples':subsamplelist})
Пример #4
0
def subsamplelist(request):
	api = MetPet("*****@*****.**",
				 "24809ab2c593b544a491748094ed10d3cbffc699")
	data = api.getAllSubSamples()
	subsamplelist =[]
	# print dir(samplelist)
	for subsample in data.data['objects']:
		# print sample['sample_id']
		subsamplelist.append([subsample['subsample_id'],subsample['name']] )

	return render(request,'subsamplelist.html', {'subsamples':subsamplelist})
Пример #5
0
def chemical_analysislist(request):
	api = MetPet("*****@*****.**",
				 "24809ab2c593b544a491748094ed10d3cbffc699")
	data = api.getAllChemicalAnalysis()
	chemicallist =[]
	print dir(chemicallist)
	for chemical in data.data['objects']:
		print chemical['chemical_analysis_id']
		chemicallist.append([chemical['chemical_analysis_id'], 
							chemical['where_done']] )

	return render(request,'chemicalanalysislist.html', {'chemicals':chemicallist})
Пример #6
0
def previous(request, pagenum=1, optional=''):
	# cursor=con.cursor()
	# cursor.execute("SELECT DISTINCT sample_id, number FROM samples ORDER BY sample_id, number")
	# samplelist=cursor.fetchall()
	
	# for sample in samplelist:
	# 	print sample[1]
	pagenum = int(pagenum) - 40
	api = MetPet("*****@*****.**",
				 "24809ab2c593b544a491748094ed10d3cbffc699")
	user = "******"
	api_key = "24809ab2c593b544a491748094ed10d3cbffc699"
	if pagenum > 1:
		data = api.getAllSamples(pagenum, user, api_key)
	else:
		data = api.getAllSamples()
	nextlist = data.data['meta']['next']
	samplelist =[]
	offsets = []
	total_count = data.data['meta']['total_count']
	# print offset
	print dir(samplelist)
	for sample in data.data['objects']:
		print sample['sample_id']
		samplelist.append([sample['sample_id'],sample['number']] )
	#CREATE PAGINATION
	if total_count > 20:
		pages = total_count / 20
	for x in range(0,pages):
		offsets.append(x*20)
	pagenum = int(nextlist.split('=')[-1])
	pageprev = pagenum - 20

	return render(request,'samplelist.html', {'samples':samplelist,
	 			  'nextURL': nextlist, 'total': total_count,
	 			  'offsets': offsets, 'pagenum':pagenum, 'pageprev': pageprev})
Пример #7
0
def chemical_analysis(request, chemical_analysis_id):
    email = request.GET.get('email', None)
    api_key = request.GET.get('api_key', None)
    api = MetPet(email, api_key).api

    chem_analysis =ChemicalAnalysisObject(chemical_analysis_id)
    chem_analysis_obj = api.chemical_analysis.get(chemical_analysis_id).data

    subsample = api.subsample.get_by_uri(chem_analysis_obj['subsample']).data

    if chem_analysis:
        del chem_analysis.attributes['analysis_date']
        data = {
            'chemical_analysis': chem_analysis.attributes,
            'subsample_id': subsample['subsample_id'],
            'sample_id': subsample['sample'].split('/')[-2]
        }
        return HttpResponse(json.dumps(data), content_type='application/json')
    else:
        return HttpResponse("Chemical Analysis does not exist")
Пример #8
0
def search(request):
	#Lists for filtering in search
	region_list = []
	collector_list = []
	reference_list = []
	metamorphic_region_list = []
	all_regions = Region.objects.all().order_by("name")
	all_samples = Sample.objects.all().order_by("collector")
	all_references = Reference.objects.all().order_by("name")
	all_metamorphic_regions = MetamorphicRegion.objects.all().order_by("name")
	#Populate lists
	for region in all_regions:
		region_list.append(region.name)
	for sample in all_samples:
		if sample.collector and sample.collector not in collector_list:
			print sample.collector
			collector_list.append(sample.collector)
	for ref in all_references:
		reference_list.append(ref.name)
	for mmr in all_metamorphic_regions:
		metamorphic_region_list.append(mmr.name)

	search_terms = {}
	error = False
	#Loop through search terms from search GET request in search form
	#Prepare dictionary of filters for api request
	print request.GET
	for search_term in request.GET:
		print search_term
		print request.GET[search_term]
		if request.GET[search_term]:
			if search_term != 'resource':
				search_terms[search_term] = request.GET[search_term]
	#Temporary credentials for api
	username = "******"
	api_key = "24809ab2c593b544a491748094ed10d3cbffc699"
	api = MetPet(username,api_key)
	#determine what resource to search for
	if search_terms:
		if request.GET['resource'] == 'sample':
			#search for samples

			data = api.searchSamples(filters=search_terms)
			#parse results
			search_results = []
			for key in data.data['objects']:
				search_results.append(key['sample_id'])
			return render(request, 'search_results.html',
				{'samples': search_results, 'query': ''})
		if request.GET['resource'] == 'chemicalanalysis':
			#search for chemical analyses
			data = api.searchChemicals(filters=search_terms)
			search_results = []
			for key in data.data['objects']:
				search_results.append(key['chemical_analysis_id'])
			return render(request, 'search_results.html',
				{'samples': search_results, 'query': ''})
	else:
		data = api.searchSamples()
		return render(request, 'search_form.html',
			{'samples': [], 'query': '', 'regions':region_list,
			 'provenenances': collector_list, "references": reference_list,
			  "mmrs": metamorphic_region_list})
	return render(request, 'search_form.html', {'error': error})