def _get_data(self, request, node, *args, **kwargs): op = request.POST["op"] params = json.loads(request.POST['params']) # Load subclass, need to use OO to handle adding/deleting and save logging.debug("Loaded node %s version %d", node.pk, node.version) if op == "move": node.x = params['x'] node.y = params['y'] node.save() elif op == "update_connection": parent_id = params["parent_id"] parent = get_node_subclass_or_non_fatal_exception(request.user, parent_id, write=True) if params.get("remove"): node.remove_parent(parent) else: kwargs = {} if isinstance(node, VennNode): side = params.get("side") # Optional for venn kwargs["side"] = side node.add_parent(parent, **kwargs) node.save() update_analysis(node.analysis_id) else: raise ValueError(f"Unknown operation '{op}'") return {}
def handle(self, *args, **options): max_queue = options["max_queue"] print("Assuming celery queues are EMPTY!") print("Marking loading nodes as dirty....") loading_nodes = AnalysisNode.objects.filter( status__in=NodeStatus.LOADING_STATUSES) loading_nodes.update(status=NodeStatus.DIRTY) dirty_nodes = AnalysisNode.objects.filter(status=NodeStatus.DIRTY) qs = Analysis.objects.filter( pk__in=dirty_nodes.values_list("analysis_id")) print( f"Reloading {dirty_nodes.count()} dirty nodes in analyses: {qs.count()} (max queue: {max_queue})" ) QUEUED_STATUS = [ NodeStatus.QUEUED, NodeStatus.LOADING, NodeStatus.LOADING_CACHE ] for analysis in qs: while True: num_queued = AnalysisNode.objects.filter( status__in=QUEUED_STATUS).count() if num_queued > max_queue: print( f"waiting for queue {num_queued} to shrink below {max_queue}" ) sleep(5) else: break print(f"Updating nodes for {analysis}") update_analysis(analysis.pk)
def post(self, request, *args, **kwargs): self.object = self.get_object() self.object.analysis.check_can_write(request.user) # Delete all old filters (probably not best way to do it) self.object.filternodeitem_set.all().delete() filters_data = request.POST.get("filters") if filters_data: filters = json.loads(filters_data) filternodeitem_set = set() opts = self.object.model._meta for i, rule in enumerate(filters['rules']): op, field, data = rule['op'], rule['field'], rule['data'] if op == "eq": # To be able to search JqGrid for isnull, field must have required=False (from blank=True) # But it can thus send through '' for no value. Some fields can't deal with that - so in those cases # we convert "equals blank" to "is null" django_field = JqGrid.lookup_foreign_key_field(opts, field) if data == '' and not django_field.empty_strings_allowed: if django_field.null: op = 'nu' else: raise ValueError(f"Field {field} (django_field) received '' but is not-nullable") fni = FilterNodeItem.objects.create(filter_node=self.object, sort_order=i, operation=op, field=field, data=data) filternodeitem_set.add(fni) self.object.group_operation = filters['groupOp'] self.object.filternodeitem_set.set(filternodeitem_set) self.object.appearance_dirty = True self.object.queryset_dirty = True self.object.save() update_analysis(self.object.analysis_id) # Trigger update_node tasks return HttpResponse()
def analysis_tag_created_task(variant_tag_id): """ Do this async to save a few miliseconds when adding/removing tags """ try: variant_tag = VariantTag.objects.get(pk=variant_tag_id) except VariantTag.DoesNotExist: return # Deleted before this got run, doesn't matter... update_analysis(variant_tag.analysis.pk) _liftover_variant_tag(variant_tag)
def form_valid(self, form): self.object = form.save(commit=False) self.object.queryset_dirty = True self.object.appearance_dirty = True self.object.save() update_analysis(self.object.analysis_id) # Trigger update_node tasks return JsonResponse({})
def node_create(request, analysis_id, node_type): global NODE_TYPES_HASH if NODE_TYPES_HASH is None: NODE_TYPES_HASH = get_node_types_hash_by_class_name() analysis = get_analysis_or_404(request.user, analysis_id, write=True) node_class = NODE_TYPES_HASH[node_type] node = node_class.objects.create(analysis=analysis) update_analysis(node.analysis_id) return JsonResponse(get_rendering_dict(node))
def create_filter_child_node(node, column_name, column_filter): if column_name == 'gene_symbol' and column_filter != 'null': child_node = create_gene_list_node(node.analysis, column_filter) else: child_node = create_filter_node(node.analysis, column_name, column_filter) child_node.x = node.x + 50 + random.randrange(-10, 10) child_node.y = node.y + 100 + random.randrange(-10, 10) child_node.add_parent(node) child_node.ready = False child_node.save() update_analysis(child_node.analysis_id) return child_node
def create_selected_child(request, node_id): node = get_node_subclass_or_404(request.user, node_id) x = node.x + 50 + random.randrange(-10, 10) y = node.y + 100 + random.randrange(-10, 10) selected_node = SelectedInParentNode.objects.create(analysis=node.analysis, x=x, y=y, ready=False) selected_node.add_parent(node) selected_node.save() update_analysis(node.analysis.pk) data = get_rendering_dict(selected_node) data["node_id"] = node.get_css_id() return JsonResponse(data)
def create_extra_filter_child(request, node_id, extra_filters): node = get_node_subclass_or_404(request.user, node_id, write=True) x = node.x + 50 + random.randrange(-10, 10) y = node.y + 100 + random.randrange(-10, 10) filter_node = BuiltInFilterNode.objects.create( analysis=node.analysis, built_in_filter=extra_filters, x=x, y=y, ready=False) filter_node.add_parent(node) filter_node.save() update_analysis(node.analysis.pk) data = get_rendering_dict(filter_node) data["node_id"] = node.get_css_id() return JsonResponse(data)
def nodes_copy(request, analysis_id): node_ids = json.loads(request.POST["nodes"]) node_ids = set([int(i) for i in node_ids]) nodes = [] edges = [] analysis = get_analysis_or_404(request.user, analysis_id, write=True) nodes_qs = analysis.analysisnode_set.filter( id__in=node_ids).select_subclasses() topo_sorted = get_toposorted_nodes(nodes_qs) old_new_map = {} for group in topo_sorted: for node in group: if analysis_id is None: analysis_id = node.analysis_id template_node = get_node_subclass_or_404(request.user, node.id) parents = template_node.analysisnode_ptr.parents().filter( id__in=old_new_map).values_list('id', flat=True) clone_node = template_node.save_clone() clone_node.x += 10 clone_node.y += 10 clone_node.status = NodeStatus.DIRTY clone_node.save() old_new_map[node.id] = clone_node clone_node.adjust_cloned_parents(old_new_map) for parent_id in parents: new_parent = old_new_map[parent_id] new_parent.add_child(clone_node) edge = clone_node.get_connection_data(new_parent) edges.append(edge) if not clone_node.is_valid(): clone_node.count = None clone_node.save() nodes.append(get_rendering_dict(clone_node)) update_analysis(analysis.pk) return JsonResponse({"nodes": nodes, "edges": edges})
def set_variant_selected(request, node_id): node = get_node_subclass_or_404(request.user, node_id, write=True) variant_id = request.POST['variant_id'] checked = json.loads(request.POST['checked']) kwargs = {"variant_id": variant_id, "node_id": node.pk} if checked: NodeVariant.objects.get_or_create(**kwargs) else: NodeVariant.objects.filter(**kwargs).delete() kids_qs = AnalysisEdge.objects.filter(parent=node).values_list( "child_id", flat=True) # @UndefinedVariable for node in SelectedInParentNode.objects.filter(pk__in=kids_qs): node.queryset_dirty = True node.save() update_analysis(node.analysis.pk) return JsonResponse({})
def nodes_delete(request, analysis_id): node_ids = json.loads(request.POST["nodes"]) node_ids = set([int(i) for i in node_ids]) analysis = get_analysis_or_404(request.user, analysis_id, write=True) nodes_qs = analysis.analysisnode_set.filter( id__in=node_ids).select_subclasses() topo_sorted = get_toposorted_nodes(nodes_qs) for group in reversed(topo_sorted): for node in group: if node.id not in node_ids: continue # parent we don't care about # Detach first for kid in node.analysisnode_ptr.children.select_subclasses(): kid.remove_parent(node) kid.parents_changed = True kid.save() node.delete() update_analysis(analysis.pk) return JsonResponse({})
def analysis_tag_deleted_task(analysis_id, _tag_id): """ Do this async to save a few miliseconds when adding/removing tags """ analysis = Analysis.objects.get(pk=analysis_id) update_analysis(analysis.pk)