Esempio n. 1
0
def sql_create_table(user_table):
	model_class = opengis.create_model(user_table)
	
	style = no_style()
	
	tables = connection.introspection.table_names()
	seen_models = connection.introspection.installed_models(tables)
	pending_references = {}
	
	sql, references = connection.creation.sql_create_model(model_class, style)
	
	for refto, refs in references.items():
		pending_references.setdefault(refto, []).extend(refs)
		if refto in seen_models:
			sql.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
	
	sql.extend(connection.creation.sql_for_pending_references(model_class, style, pending_references))
	
	cursor = connection.cursor()
	
	for statement in sql:
		cursor.execute(statement)
	
	transaction.commit_unless_managed()
	
	custom_sql = custom_sql_for_model(model_class, style)
	
	if custom_sql:
		try:
			for sql in custom_sql:
				cursor.execute(sql)
		except Exception, e:
			transaction.rollback_unless_managed()
		else:
			transaction.commit_unless_managed()
Esempio n. 2
0
def view_user_table(request, username, table_name):
	account = Account.objects.get(user=request.user)
	
	user_table = get_object_or_404(UserTable, account=account, table_name=table_name)
	user_table.columns = UserTableColumn.objects.filter(table=user_table).order_by('created')
	user_table.tags = UserTableTag.objects.filter(table=user_table)
	
	table_model = opengis.create_model(user_table)
	table_data = table_model.objects.all()

	return render_to_response(settings.OPENGIS_TEMPLATE_PREFIX + "table_view.html", {'account':account, 'user_table':user_table, 'table_data':table_data}, context_instance=RequestContext(request))
Esempio n. 3
0
def _extract_query_result(starter_table, query_result, result_columns):
	result = list()
	
	for datum in query_result:
		result_row = list()
		
		for result_column in result_columns:
			if result_column['column_hierarchy']:
				hierarchy_list = result_column['column_hierarchy'].split(".")
				
				if type(datum).__name__ != 'dict':
					try:
						attr = datum
						for hierarchy in hierarchy_list: attr = getattr(attr, hierarchy)
						result_row.append(getattr(attr, result_column['physical_name']))
					except:
						result_row.append("")
					
				else: # query_result returns as a list of dict (cause by using group by)
					
					growing_hierarchy = ""
					hierarchy_user_table = starter_table
					hierarchy_data = None
					
					for index, hierarchy in enumerate(hierarchy_list):
						
						try:
							value = hierarchy_data.id
						except:
							value = datum[hierarchy]
						
						table_column = utilities.list_find(lambda table_column: table_column.physical_column_name==hierarchy, UserTableColumn.objects.filter(table=hierarchy_user_table))
						related_table = table_column.related_table
						
						hierarchy_user_table = UserTable.objects.get(pk=related_table)
						hierarchy_model_object = opengis.create_model(hierarchy_user_table)
						
						hierarchy_data = hierarchy_model_object.objects.get(pk=value)

					result_row.append(getattr(hierarchy_data, result_column['physical_name']))
					
			else:
				try:
					result_row.append(getattr(datum, result_column['physical_name']))
				except:
					try:
						result_row.append(datum[result_column['physical_name']])
					except:
						result_row.append("")
		
		if result_row: result.append(result_row)

	return result
Esempio n. 4
0
def api_table_empty(request):
	if request.method == 'POST':
		user_table = UserTable.objects.get(pk=request.POST.get('table_id'))
		user_table_columns = UserTableColumn.objects.filter(table=user_table)
		
		table_model = opengis.create_model(user_table)
		table_model.objects.all().delete()
		
		return api.APIResponse(api.API_RESPONSE_SUCCESS)
		
	else:
		return api.APIResponse(api.API_RESPONSE_POSTONLY)
Esempio n. 5
0
def api_table_save_row(request):
	# TODO: Check permissions.
	if request.method == 'POST':
		account = Account.objects.get(user=request.user)

		table_id = int(request.POST.get('table_id'))
		row = request.POST.getlist('row[]')
		row_id = int(request.POST.get('row_id', 0))
		op = request.POST.get('op')
		
		if not table_id: return api.APIResponse(api.API_RESPONSE_ERROR, response_meta={'error':'required_table_id'})
		
		user_table = UserTable.objects.filter(account=account, pk=table_id);
		if not user_table.count():
			return api.APIResponse(api.API_RESPONSE_ERROR, response_meta={'error':'missing_table_id'})
		
		user_table = user_table[0]
		table_columns = UserTableColumn.objects.filter(table=user_table)
		target_model = opengis.create_model(user_table)
		
		if row_id:
			model_obj = target_model.objects.get(pk=row_id)
		else:
			model_obj = target_model()
		
		print model_obj.id
		if op == 'delete': 
			model_obj.delete()
			return api.APIResponse(api.API_RESPONSE_SUCCESS)
		
		error = []
		for index, column in enumerate(row):
			column_name, column_data = column.split('=')
			table_column = utilities.list_find(lambda table_column: table_column.column_name == column_name, table_columns)
			physical_column_name = table_column.physical_column_name
			print column_data
			
			column_data = utilities.convert_string_to_data_with_format(column_data, table_column)
			if not column_data['error']:
				setattr(model_obj, physical_column_name, column_data['value'])
			else:
				error += [str(index + 1) + '. ' + column_data['error']]
				
		if not len(error):
			model_obj.save()
			return api.APIResponse(api.API_RESPONSE_SUCCESS, result=model_obj.id)
		else:
			return api.APIResponse(api.API_RESPONSE_ERROR, result='\n'.join(error))
	else:
		return api.APIResponse(api.API_RESPONSE_POSTONLY)
Esempio n. 6
0
def get_user_table_json(request, table_name):
	account = Account.objects.get(user=request.user)
	
	user_table = get_object_or_404(UserTable, table_name=table_name)
	user_table.columns = UserTableColumn.objects.filter(table=user_table)
	
	model_class = opengis.create_model(user_table)
	data = model_class.objects.all()
	
	result = list()
	for datum in model_class.objects.all():
		row_dict = dict()
		
		for index, column in enumerate(user_table.columns):
			row_dict[column.column_name] = getattr(datum, column.column_name)
		
		result.append(row_dict)
	
	return HttpResponse(simplejson.dumps(result), content_type='text/plain; charset=UTF-8')
Esempio n. 7
0
def generate_relate_table(column_info):
	html = ''
	if column_info.data_type != sql.TYPE_USER_TABLE:
		return ''
		
	relate_table = UserTable.objects.get(pk=column_info.related_table)
	display_column = relate_table.display_column
	relate_table = opengis.create_model(relate_table)
	
	html += '<select id="user-table-' + column_info.column_name + '">'
	html += '<option value="">- none -</option>'
	for row in relate_table.objects.all():
		row_id = str(getattr(row, 'id'))
		row_val = str(getattr(row, display_column))
		html += '<option value="' + row_val + ' [id:' + row_id + ']">'+ row_val + ' [id:' + row_id + ']</option>'
	html += '</select>'
	return html
	
		
Esempio n. 8
0
def convert_string_to_data_with_format(column_data, table_column):
	data = {'value': column_data, 'error': ''}
	if column_data.strip():
		data_type = table_column.data_type
		try:
			if data_type == sql.TYPE_CHARACTER: data['value'] = str(column_data)
			elif data_type == sql.TYPE_NUMBER: data['value'] = float(column_data)
			elif data_type == sql.TYPE_DATETIME: data['value'] = datetime.strptime(column_data, '%Y-%m-%d %H:%M:%S')
			elif data_type == sql.TYPE_DATE: data['value'] = datetime.strptime(column_data, '%Y-%m-%d').date()
			elif data_type == sql.TYPE_TIME: data['value'] = datetime.strptime(column_data, '%H:%M:%S').time()
			elif data_type == sql.TYPE_REGION: data['value'] = GEOSGeometry(column_data)
			elif data_type == sql.TYPE_LOCATION: data['value'] = GEOSGeometry(column_data)
			elif data_type == sql.TYPE_USER_TABLE or data_type == sql.TYPE_BUILT_IN_TABLE: 
				related_user_table = UserTable.objects.get(pk=table_column.related_table)
				related_model = opengis.create_model(related_user_table)
				related_model_row_id = re.search(r'\[id\:(\d+)\]', column_data)
				column_data = related_model_row_id.group(1)
				data['value'] = related_model.objects.get(pk=column_data)
		except ValueError as error:
			data['error'] = table_column.column_name + ': ' + error.message
	else:
		data['value'] = None

	return data
Esempio n. 9
0
def execute_query(user_query, parameters, result_limit=None):
	# column_manager = opengis.TableColumnManager(user_query.starter_table)
	
	display_columns = UserQueryDisplayColumn.objects.filter(query=user_query)

	# Generate 'columns' JSON
	result_columns = list()
	
	for display_column in display_columns:
		if display_column.is_aggregate:
			column_info = {'id':display_column.column_id.id,'name':display_column.column_id.id,'type':sql.TYPE_NUMBER,'physical_name':display_column.column_id.id,'related_table':''}
		else:
			column_info = _to_column_dict(display_column.column)
		
		if display_column.display_name: column_info['name'] = display_column.display_name
		column_info['column_hierarchy'] = display_column.column_hierarchy
		
		result_columns.append(column_info)
	
	# Create Starter Model
	starter_model = opengis.create_model(user_query.starter_table)
	data_objects = starter_model.objects.all()

	# Virtual Columns -- WILL DO
	# Figure out how to store virtual column login in database
	# Entry.objects.extra(select={'is_recent': "pub_date > '2006-01-01'"})

	# Group By
	group_by_columns = UserQueryGroupByColumn.objects.filter(query=user_query)
	for group_by in group_by_columns:
		data_objects = data_objects.values(group_by.column.physical_column_name)

	# Aggregate Columns
	aggregate_columns = UserQueryAggregateColumn.objects.filter(query=user_query)
	if group_by_columns: # If using 'values', we must use annotate, instead of aggregate
		for aggregate_column in aggregate_columns:
			column_info = _to_column_dict(aggregate_column.column)
			data_objects = data_objects.annotate(query.sql_aggregate(aggregate_column, column_info))
	else:
		for aggregate_column in aggregate_columns:
			column_info = _to_column_dict(aggregate_column.column)
			data_objects = data_objects.aggregate(query.sql_aggregate(aggregate_column, column_info))

	# Filter
	for filter in UserQueryFilter.objects.filter(query=user_query):
		if filter.is_variable:
			filter.filter_value = parameters.get(filter.filter_value)
			if not filter.filter_value: continue

		column_info = _to_column_dict(filter.column)
		data_objects = query.sql_filter(filter, column_info, data_objects)

	# Order by
	order_by_columns = UserQueryOrderByColumn.objects.filter(query=user_query).order_by('order_priority') # Less has more priority

	order_fields = list()
	for order_by_column in order_by_columns:
		if order_by_column.column_hierarchy:
			column_hierarchy = order_by_column.column_hierarchy.replace(".", "__") + "__"
		else:
			column_hierarchy = ""

		order_fields.append('-' if order_by_column.is_desc else '' + column_hierarchy + order_by_column.column_name)

	if order_fields: data_objects = data_objects.order_by(*order_fields)
	
	# Distinct
	if user_query.is_distinct:
		args = list()
		for result_column in result_columns:
			if result_column['column_hierarchy']:
				column_hierarchy = result_column['column_hierarchy'].replace(".", "__") + "__"
			else:
				column_hierarchy = ""
			
			args.append(column_hierarchy + result_column['physical_name'])
		
		data_objects = data_objects.values(*args)
		data_objects = data_objects.distinct()
	
	# Dump result in a list of list
	result = list()
	
	print data_objects

	if aggregate_columns and not group_by_columns: # using aggregate without grouping by will have result as a dict
		result_row = list()
		
		for result_column in result_columns:
			try:
				result_row.append(data_objects[result_column['physical_name']])
			except KeyError:
				result_row.append("")

		result.append(result_row)

	elif user_query.is_distinct: # Using distinct, result will be a list of dict that has a key like 'link1__link2__column1'
		for datum in data_objects:
			result_row = list()

			for result_column in result_columns:
				if result_column['column_hierarchy']:
					column_hierarchy = result_column['column_hierarchy'].replace(".", "__") + "__"
				else:
					column_hierarchy = ""

				result_row.append(datum[column_hierarchy + result_column['physical_name']])

			if result_row: result.append(result_row)

	else:
		result = _extract_query_result(user_query.starter_table, data_objects, result_columns)

	# Result limitation -- either define it in user query table or 'limit' parameter in request URL
	if user_query.result_limit or result_limit:
		limit = user_query.result_limit
		if result_limit: limit = int(result_limit)
		result = result[0:limit]

	return {'columns':result_columns,'values':result}
Esempio n. 10
0
def import_table(user_table, account, request):
	temp_csv_file = settings.TEMP_CSV_PATH + '/temp_' + str(account.user.id) + "_" + str(long(round(time.time()))) + '.csv'
	
	destination = open(temp_csv_file, 'wb')
	for chunk in request.FILES['file'].chunks(): destination.write(chunk)
	destination.close()
	
	destination = open(temp_csv_file, 'rb')
	csv_reader = csv.reader(destination)
	
	table_columns = UserTableColumn.objects.filter(table=user_table)
	
	target_model = opengis.create_model(user_table)
	target_model.objects.all().delete()
	
	column_mapping = list()
	
	for row in csv_reader:
		if not column_mapping: # csv.reader object is unsubscriptable
			
			# Map logical column name used in CSV to physical database column name
			for index, column_name in enumerate(row):
				(parent_column, separator, child_column) = column_name.partition("---")
				column_info = dict()
				
				table_column = utilities.list_find(lambda table_column: table_column.column_name == parent_column, table_columns)
				
				if child_column:
					column_info['physical_column_name'] = table_column.physical_column_name
					column_info['related_table'] = table_column.related_table
					
					related_user_table = UserTable.objects.get(pk=table_column.related_table)
					related_table_columns = UserTableColumn.objects.filter(table=related_user_table)
					
					related_column = utilities.list_find(lambda related_column: related_column.column_name == child_column, related_table_columns)
					column_info['related_column'] = related_column.physical_column_name
					
				else:
					column_info['physical_column_name'] = table_column.physical_column_name
				
				column_mapping.append(column_info)
				
		else:
			model_obj = target_model()
			
			for index, column_data in enumerate(row):
				column_info = column_mapping[index]
				
				if column_info.get('related_table'):
					related_user_table = UserTable.objects.get(pk=column_info['related_table'])
					related_model = opengis.create_model(related_user_table)
					
					related_model_object = related_model.objects.get(**{str(column_info['related_column']):column_data})
					setattr(model_obj, column_info['physical_column_name'], related_model_object)
					
				else:
					setattr(model_obj, column_info['physical_column_name'], column_data)
			
			model_obj.save()
	
	destination.close()
	
	import os
	os.remove(temp_csv_file)
Esempio n. 11
0
from django.contrib.gis import admin

import opengis
from opengis.models import *

admin.site.register(Account)
admin.site.register(UserTable)
admin.site.register(UserTableColumn)
admin.site.register(UserQuery)
admin.site.register(UserQueryDisplayColumn)
admin.site.register(UserQueryFilter)
admin.site.register(UserQueryAggregateColumn)
admin.site.register(UserQueryGroupByColumn)

# Register user tables

user_tables = UserTable.objects.all()

for user_table in user_tables:
	model_class = opengis.create_model(user_table)
	admin.site.register(model_class, admin.GeoModelAdmin)