def list_connector(request, project_id=None): stack_id = request.POST.get('stack_id', None) skeleton_id = request.POST.get('skeleton_id', None) def empty_result(): return HttpResponse(json.dumps({ 'iTotalRecords': 0, 'iTotalDisplayRecords': 0, 'aaData': []})) if not skeleton_id: return empty_result() else: skeleton_id = int(skeleton_id) relation_type = int(request.POST.get('relation_type', 0)) # 0: Presyn, 1 Postsyn display_start = int(request.POST.get('iDisplayStart', 0)) display_length = int(request.POST.get('iDisplayLength', 0)) sorting_column = int(request.POST.get('iSortCol_0', 0)) sort_descending = upper(request.POST.get('sSortDir_0', 'DESC')) != 'ASC' response_on_error = '' try: response_on_error = 'Could not fetch relations.' relation_map = get_relation_to_id_map(project_id) for rel in ['presynaptic_to', 'postsynaptic_to', 'element_of', 'labeled_as']: if rel not in relation_map: raise Exception('Failed to find the required relation %s' % rel) if relation_type == 1: relation_type_id = relation_map['presynaptic_to'] inverse_relation_type_id = relation_map['postsynaptic_to'] else: relation_type_id = relation_map['postsynaptic_to'] inverse_relation_type_id = relation_map['presynaptic_to'] response_on_error = 'Failed to select connectors.' cursor = connection.cursor() cursor.execute( ''' SELECT connector.id AS connector_id, tn_other.user_id AS connector_user_id, treenode_user.username AS connector_username, connector.location_x AS connector_x, connector.location_y AS connector_y, connector.location_z AS connector_z, tn_other.id AS other_treenode_id, tn_other.location_x AS other_treenode_x, tn_other.location_y AS other_treenode_y, tn_other.location_z AS other_treenode_z, tn_other.skeleton_id AS other_skeleton_id, tn_this.location_x AS this_treenode_x, tn_this.location_y AS this_treenode_y, tn_this.location_z AS this_treenode_z, tn_this.id AS this_treenode_id, tc_this.relation_id AS this_to_connector_relation_id, tc_other.relation_id AS connector_to_other_relation_id, tc_other.confidence AS confidence, to_char(connector.edition_time, 'DD-MM-YYYY HH24:MI') AS last_modified FROM treenode tn_other, treenode_connector tc_other, connector, "auth_user" treenode_user, treenode_connector tc_this, treenode tn_this WHERE treenode_user.id = tn_other.user_id AND tn_other.id = tc_other.treenode_id AND tc_other.connector_id = connector.id AND tc_other.relation_id = %s AND tc_this.connector_id = connector.id AND tn_this.id = tc_this.treenode_id AND tn_this.skeleton_id = %s AND tc_this.relation_id = %s ORDER BY connector_id, other_treenode_id, this_treenode_id ''', [inverse_relation_type_id, skeleton_id, relation_type_id]) connectors = cursor_fetch_dictionary(cursor) connected_skeletons = map(lambda con: con['other_skeleton_id'], connectors) connector_ids = map(lambda con: con['connector_id'], connectors) response_on_error = 'Failed to find counts of treenodes in skeletons.' skel_tn_count = Treenode.objects.filter(skeleton__in=connected_skeletons)\ .values('skeleton').annotate(treenode_count=Count('skeleton')) # .values to group by skeleton_id. See http://tinyurl.com/dj-values-annotate skeleton_to_treenode_count = {} for s in skel_tn_count: skeleton_to_treenode_count[s['skeleton']] = s['treenode_count'] # Rather than do a LEFT OUTER JOIN to also include the connectors # with no partners, just do another query to find the connectors # without the conditions: response_on_error = 'Failed to select all connectors.' cursor.execute( ''' SELECT connector.id AS connector_id, connector.user_id AS connector_user_id, connector_user.username AS connector_username, connector.location_x AS connector_x, connector.location_y AS connector_y, connector.location_z AS connector_z, tn_this.id AS this_treenode_id, tc_this.relation_id AS this_to_connector_relation_id, tc_this.confidence AS confidence, to_char(connector.edition_time, 'DD-MM-YYYY HH24:MI') AS last_modified FROM connector, "auth_user" connector_user, treenode_connector tc_this, treenode tn_this WHERE connector_user.id = connector.user_id AND tc_this.connector_id = connector.id AND tn_this.id = tc_this.treenode_id AND tn_this.skeleton_id = %s AND tc_this.relation_id = %s ORDER BY connector_id, this_treenode_id ''', [skeleton_id, relation_type_id]) for row in cursor_fetch_dictionary(cursor): connector_id = row['connector_id'] if connector_id not in connector_ids: connectors.append(row) connector_ids.append(connector_id) # For each of the connectors, find all of its labels: response_on_error = 'Failed to find the labels for connectors' if (connector_ids > 0): connector_labels = ConnectorClassInstance.objects.filter( project=project_id, connector__in=connector_ids, relation=relation_map['labeled_as']).values( 'connector', 'class_instance__name') labels_by_connector = {} # Key: Connector ID, Value: List of labels. for label in connector_labels: if label['connector'] not in labels_by_connector: labels_by_connector[label['connector']] = [label['class_instance__name']] else: labels_by_connector[label['connector']].append(label['class_instance__name']) # Sort labels by name for labels in labels_by_connector.values(): labels.sort(key=upper) total_result_count = len(connectors) if 0 == total_result_count: return empty_result() # Paging if display_length == 0: connectors = connectors[display_start:] connector_ids = connector_ids[display_start:] else: connectors = connectors[display_start:display_start + display_length] connector_ids = connector_ids[display_start:display_start + display_length] response_on_error = 'Could not retrieve resolution and translation parameters for project.' if stack_id: resolution = get_object_or_404(Stack, id=int(stack_id)).resolution translation = get_object_or_404(ProjectStack, stack=int(stack_id), project=project_id).translation else: resolution = Double3D(1.0, 1.0, 1.0) translation = Double3D(0.0, 0.0, 0.0) # Format output aaData_output = [] for c in connectors: response_on_error = 'Failed to format output for connector with ID %s.' % c['connector_id'] if 'other_skeleton_id' in c: connected_skeleton_treenode_count = skeleton_to_treenode_count[c['other_skeleton_id']] else: c['other_skeleton_id'] = '' c['other_treenode_id'] = '' c['other_treenode_x'] = c['connector_x'] c['other_treenode_y'] = c['connector_y'] c['other_treenode_z'] = c['connector_z'] connected_skeleton_treenode_count = 0 if c['connector_id'] in labels_by_connector: labels = ', '.join(map(str, labels_by_connector[c['connector_id']])) else: labels = '' row = [] row.append(c['connector_id']) row.append(c['other_skeleton_id']) row.append(c['other_treenode_x']) #('%.2f' % ) row.append(c['other_treenode_y']) z = c['other_treenode_z'] row.append(z) # FIXME: This is the only place we need a stack nad this can be # done in the client as well. So we really want to keep this and # have a more complicated API? row.append(int((z - translation.z) / resolution.z)) row.append(c['confidence']) row.append(labels) row.append(connected_skeleton_treenode_count) row.append(c['connector_username']) row.append(c['other_treenode_id']) row.append(c['last_modified']) aaData_output.append(row) # Sort output def fetch_value_for_sorting(row): value = row[sorting_column] if isinstance(value, str) or isinstance(value, unicode): return upper(value) return value aaData_output.sort(key=fetch_value_for_sorting) # Fix excessive decimal precision in coordinates for row in aaData_output: row[2] = float('%.2f' % row[2]) row[3] = float('%.2f' % row[3]) row[4] = float('%.2f' % row[4]) if sort_descending: aaData_output.reverse() return HttpResponse(json.dumps({ 'iTotalRecords': total_result_count, 'iTotalDisplayRecords': total_result_count, 'aaData': aaData_output})) except Exception as e: raise Exception(response_on_error + ':' + str(e))
def textlabels(request: HttpRequest, project_id=None) -> JsonResponse: params = {'pid': project_id, 'uid': request.user.id} parameter_names = [ 'sid', 'z', 'top', 'left', 'width', 'height', 'scale', 'resolution' ] for p in parameter_names: if p in ['pid', 'sid']: params[p] = int(request.POST.get(p, 0)) elif p in ['scale', 'resolution']: params[p] = float(request.POST.get(p, 1)) else: params[p] = float(request.POST.get(p, 0)) params['right'] = params['left'] + params['width'] params['bottom'] = params['top'] + params['height'] params['scale_div_res'] = params['scale'] / params['resolution'] response_on_error = '' try: response_on_error = 'Could not retrieve textlabels.' c = connection.cursor() c.execute( ''' SELECT DISTINCT ON ( "tid" ) "textlabel"."id" AS "tid", "textlabel"."type" AS "type", "textlabel"."text" AS "text", "textlabel"."font_name" AS "font_name", "textlabel"."font_style" AS "font_style", "textlabel"."font_size" AS "font_size", "textlabel"."scaling" AS "scaling", floor(255*("textlabel"."colour")."r") AS "r", floor(255*("textlabel"."colour")."g") AS "g", floor(255*("textlabel"."colour")."b") AS "b", ("textlabel"."colour")."a" AS "a", ("textlabel_location"."location")."x" AS "x", ("textlabel_location"."location")."y" AS "y", ("textlabel_location"."location")."z" AS "z", abs( ("textlabel_location"."location")."z" - ("textlabel_location"."location")."z" ) AS "z_diff" FROM "textlabel" INNER JOIN "textlabel_location" ON "textlabel"."id" = "textlabel_location"."textlabel_id" INNER JOIN "project" ON "project"."id" = "textlabel"."project_id" INNER JOIN "project_stack" ON "project"."id" = "project_stack"."project_id" INNER JOIN "stack" ON "stack"."id" = "project_stack"."stack_id" WHERE "project"."id" = %(pid)s AND "stack"."id" = %(sid)s AND NOT "textlabel"."deleted" AND NOT "textlabel_location"."deleted" AND ("textlabel_location"."location")."x" >= %(left)s AND ("textlabel_location"."location")."x" <= %(right)s AND ("textlabel_location"."location")."y" >= %(top)s AND ("textlabel_location"."location")."y" <= %(bottom)s AND ("textlabel_location"."location")."z" >= %(z)s - 0.5 * ("stack"."resolution")."z" AND ("textlabel_location"."location")."z" <= %(z)s + 0.5 * ("stack"."resolution")."z" AND ( ( "textlabel"."scaling" AND "textlabel"."font_size" * %(scale_div_res)s >= 3 ) OR NOT "textlabel"."scaling" ) ORDER BY "tid", "z_diff" ''', params) textlabels = cursor_fetch_dictionary(c) response_on_error = 'Failed to format output' for tl in textlabels: tl['colour'] = { 'r': tl['r'], 'g': tl['g'], 'b': tl['b'], 'a': tl['a'] } del (tl['r']) del (tl['g']) del (tl['b']) del (tl['a']) tl['location'] = {'x': tl['x'], 'y': tl['y'], 'z': tl['z']} del (tl['x']) del (tl['y']) del (tl['z']) if tl['scaling']: tl['scaling'] = 1 else: tl['scaling'] = 0 return JsonResponse(makeJSON_legacy_list(textlabels), safe=False) except Exception as e: raise Exception(response_on_error + ':' + str(e))
def list_connector(request, project_id=None): stack_id = request.POST.get('stack_id', None) skeleton_id = request.POST.get('skeleton_id', None) def empty_result(): return HttpResponse( json.dumps({ 'iTotalRecords': 0, 'iTotalDisplayRecords': 0, 'aaData': [] })) if not skeleton_id: return empty_result() else: skeleton_id = int(skeleton_id) relation_type = int(request.POST.get('relation_type', 0)) # 0: Presyn, 1 Postsyn, 2 Gj display_start = int(request.POST.get('iDisplayStart', 0)) display_length = int(request.POST.get('iDisplayLength', 0)) sorting_column = int(request.POST.get('iSortCol_0', 0)) sort_descending = upper(request.POST.get('sSortDir_0', 'DESC')) != 'ASC' response_on_error = '' try: response_on_error = 'Could not fetch relations.' relation_map = get_relation_to_id_map(project_id) for rel in [ 'presynaptic_to', 'postsynaptic_to', 'gapjunction_with', 'element_of', 'labeled_as' ]: if rel not in relation_map: raise Exception('Failed to find the required relation %s' % rel) if relation_type == 1: relation_type_id = relation_map['presynaptic_to'] inverse_relation_type_id = relation_map['postsynaptic_to'] elif relation_type == 2: relation_type_id = relation_map['gapjunction_with'] inverse_relation_type_id = relation_map['gapjunction_with'] else: relation_type_id = relation_map['postsynaptic_to'] inverse_relation_type_id = relation_map['presynaptic_to'] response_on_error = 'Failed to select connectors.' cursor = connection.cursor() cursor.execute( ''' SELECT connector.id AS connector_id, tn_other.user_id AS connector_user_id, treenode_user.username AS connector_username, connector.location_x AS connector_x, connector.location_y AS connector_y, connector.location_z AS connector_z, tn_other.id AS other_treenode_id, tn_other.location_x AS other_treenode_x, tn_other.location_y AS other_treenode_y, tn_other.location_z AS other_treenode_z, tn_other.skeleton_id AS other_skeleton_id, tn_this.location_x AS this_treenode_x, tn_this.location_y AS this_treenode_y, tn_this.location_z AS this_treenode_z, tn_this.id AS this_treenode_id, tc_this.relation_id AS this_to_connector_relation_id, tc_other.relation_id AS connector_to_other_relation_id, tc_other.confidence AS confidence, to_char(connector.edition_time, 'DD-MM-YYYY HH24:MI') AS last_modified FROM treenode tn_other, treenode_connector tc_other, connector, "auth_user" treenode_user, treenode_connector tc_this, treenode tn_this WHERE treenode_user.id = tn_other.user_id AND tn_other.id = tc_other.treenode_id AND tc_other.connector_id = connector.id AND tc_other.relation_id = %s AND tc_this.connector_id = connector.id AND tn_this.id = tc_this.treenode_id AND tn_this.skeleton_id = %s AND tc_this.relation_id = %s ORDER BY connector_id, other_treenode_id, this_treenode_id ''', [inverse_relation_type_id, skeleton_id, relation_type_id]) connectors = cursor_fetch_dictionary(cursor) connected_skeletons = map(lambda con: con['other_skeleton_id'], connectors) connector_ids = map(lambda con: con['connector_id'], connectors) response_on_error = 'Failed to find counts of treenodes in skeletons.' skel_tn_count = Treenode.objects.filter(skeleton__in=connected_skeletons)\ .values('skeleton').annotate(treenode_count=Count('skeleton')) # .values to group by skeleton_id. See http://tinyurl.com/dj-values-annotate skeleton_to_treenode_count = {} for s in skel_tn_count: skeleton_to_treenode_count[s['skeleton']] = s['treenode_count'] # Rather than do a LEFT OUTER JOIN to also include the connectors # with no partners, just do another query to find the connectors # without the conditions: response_on_error = 'Failed to select all connectors.' cursor.execute( ''' SELECT connector.id AS connector_id, connector.user_id AS connector_user_id, connector_user.username AS connector_username, connector.location_x AS connector_x, connector.location_y AS connector_y, connector.location_z AS connector_z, tn_this.id AS this_treenode_id, tc_this.relation_id AS this_to_connector_relation_id, tc_this.confidence AS confidence, to_char(connector.edition_time, 'DD-MM-YYYY HH24:MI') AS last_modified FROM connector, "auth_user" connector_user, treenode_connector tc_this, treenode tn_this WHERE connector_user.id = connector.user_id AND tc_this.connector_id = connector.id AND tn_this.id = tc_this.treenode_id AND tn_this.skeleton_id = %s AND tc_this.relation_id = %s ORDER BY connector_id, this_treenode_id ''', [skeleton_id, relation_type_id]) for row in cursor_fetch_dictionary(cursor): connector_id = row['connector_id'] if connector_id not in connector_ids: connectors.append(row) connector_ids.append(connector_id) # For each of the connectors, find all of its labels: response_on_error = 'Failed to find the labels for connectors' if (connector_ids > 0): connector_labels = ConnectorClassInstance.objects.filter( project=project_id, connector__in=connector_ids, relation=relation_map['labeled_as']).values( 'connector', 'class_instance__name') labels_by_connector = { } # Key: Connector ID, Value: List of labels. for label in connector_labels: if label['connector'] not in labels_by_connector: labels_by_connector[label['connector']] = [ label['class_instance__name'] ] else: labels_by_connector[label['connector']].append( label['class_instance__name']) # Sort labels by name for labels in labels_by_connector.values(): labels.sort(key=upper) total_result_count = len(connectors) if 0 == total_result_count: return empty_result() # Paging if display_length == 0: connectors = connectors[display_start:] connector_ids = connector_ids[display_start:] else: connectors = connectors[display_start:display_start + display_length] connector_ids = connector_ids[display_start:display_start + display_length] response_on_error = 'Could not retrieve resolution and translation parameters for project.' if stack_id: resolution = get_object_or_404(Stack, id=int(stack_id)).resolution translation = get_object_or_404(ProjectStack, stack=int(stack_id), project=project_id).translation else: resolution = Double3D(1.0, 1.0, 1.0) translation = Double3D(0.0, 0.0, 0.0) # Format output aaData_output = [] for c in connectors: response_on_error = 'Failed to format output for connector with ID %s.' % c[ 'connector_id'] if 'other_skeleton_id' in c: connected_skeleton_treenode_count = skeleton_to_treenode_count[ c['other_skeleton_id']] else: c['other_skeleton_id'] = '' c['other_treenode_id'] = '' c['other_treenode_x'] = c['connector_x'] c['other_treenode_y'] = c['connector_y'] c['other_treenode_z'] = c['connector_z'] connected_skeleton_treenode_count = 0 if c['connector_id'] in labels_by_connector: labels = ', '.join( map(str, labels_by_connector[c['connector_id']])) else: labels = '' row = [] row.append(c['connector_id']) row.append(c['other_skeleton_id']) row.append(c['other_treenode_x']) #('%.2f' % ) row.append(c['other_treenode_y']) z = c['other_treenode_z'] row.append(z) # FIXME: This is the only place we need a stack nad this can be # done in the client as well. So we really want to keep this and # have a more complicated API? row.append(int((z - translation.z) / resolution.z)) row.append(c['confidence']) row.append(labels) row.append(connected_skeleton_treenode_count) row.append(c['connector_username']) row.append(c['other_treenode_id']) row.append(c['last_modified']) aaData_output.append(row) # Sort output def fetch_value_for_sorting(row): value = row[sorting_column] if isinstance(value, str) or isinstance(value, unicode): return upper(value) return value aaData_output.sort(key=fetch_value_for_sorting) # Fix excessive decimal precision in coordinates for row in aaData_output: row[2] = float('%.2f' % row[2]) row[3] = float('%.2f' % row[3]) row[4] = float('%.2f' % row[4]) if sort_descending: aaData_output.reverse() return HttpResponse( json.dumps({ 'iTotalRecords': total_result_count, 'iTotalDisplayRecords': total_result_count, 'aaData': aaData_output })) except Exception as e: raise Exception(response_on_error + ':' + str(e))
def textlabels(request, project_id=None): params = {'pid': project_id, 'uid': request.user.id} parameter_names = ['sid', 'z', 'top', 'left', 'width', 'height', 'scale', 'resolution'] for p in parameter_names: if p in ['pid', 'sid']: params[p] = int(request.POST.get(p, 0)) elif p in ['scale', 'resolution']: params[p] = float(request.POST.get(p, 1)) else: params[p] = float(request.POST.get(p, 0)) params['right'] = params['left'] + params['width'] params['bottom'] = params['top'] + params['height'] params['scale_div_res'] = params['scale'] / params['resolution'] response_on_error = '' try: response_on_error = 'Could not retrieve textlabels.' c = connection.cursor() c.execute(''' SELECT DISTINCT ON ( "tid" ) "textlabel"."id" AS "tid", "textlabel"."type" AS "type", "textlabel"."text" AS "text", "textlabel"."font_name" AS "font_name", "textlabel"."font_style" AS "font_style", "textlabel"."font_size" AS "font_size", "textlabel"."scaling" AS "scaling", floor(255*("textlabel"."colour")."r") AS "r", floor(255*("textlabel"."colour")."g") AS "g", floor(255*("textlabel"."colour")."b") AS "b", ("textlabel"."colour")."a" AS "a", ("textlabel_location"."location")."x" AS "x", ("textlabel_location"."location")."y" AS "y", ("textlabel_location"."location")."z" AS "z", abs( ("textlabel_location"."location")."z" - ("textlabel_location"."location")."z" ) AS "z_diff" FROM "textlabel" INNER JOIN "textlabel_location" ON "textlabel"."id" = "textlabel_location"."textlabel_id" INNER JOIN "project" ON "project"."id" = "textlabel"."project_id" INNER JOIN "project_stack" ON "project"."id" = "project_stack"."project_id" INNER JOIN "stack" ON "stack"."id" = "project_stack"."stack_id" WHERE "project"."id" = %(pid)s AND "stack"."id" = %(sid)s AND NOT "textlabel"."deleted" AND NOT "textlabel_location"."deleted" AND ("textlabel_location"."location")."x" >= %(left)s AND ("textlabel_location"."location")."x" <= %(right)s AND ("textlabel_location"."location")."y" >= %(top)s AND ("textlabel_location"."location")."y" <= %(bottom)s AND ("textlabel_location"."location")."z" >= %(z)s - 0.5 * ("stack"."resolution")."z" AND ("textlabel_location"."location")."z" <= %(z)s + 0.5 * ("stack"."resolution")."z" AND ( ( "textlabel"."scaling" AND "textlabel"."font_size" * %(scale_div_res)s >= 3 ) OR NOT "textlabel"."scaling" ) ORDER BY "tid", "z_diff" ''', params) textlabels = cursor_fetch_dictionary(c) response_on_error = 'Failed to format output' for tl in textlabels: tl['colour'] = {'r': tl['r'], 'g': tl['g'], 'b': tl['b'], 'a': tl['a']} del(tl['r']) del(tl['g']) del(tl['b']) del(tl['a']) tl['location'] = {'x': tl['x'], 'y': tl['y'], 'z': tl['z']} del(tl['x']) del(tl['y']) del(tl['z']) if tl['scaling']: tl['scaling'] = 1 else: tl['scaling'] = 0 return JsonResponse(makeJSON_legacy_list(textlabels), safe=False) except Exception as e: raise Exception(response_on_error + ':' + str(e))