def tear_down(self): super(PLMObjectsImporter, self).tear_down() instances = [] for obj in self.objects: instance = obj.object instances.append((instance._meta.app_label, instance._meta.module_name, instance._get_pk_val())) update_indexes.delay(instances)
def _build(self, tree, native_files, step_files): # tasks (handle_step_files, update_indexes) are executed # once the transaction is commited, so we do not have to # block mails and indexing # all instances are indexing by one task to avoid a lot of lock/unlock # calls self._native_files = dict((f.name.lower(), f) for f in native_files) self._step_files = dict((f.name.lower(), f) for f in step_files) self.controller.object.no_index = True if not self._send_mails: self.controller.block_mails() name = self._get_part_name(tree) self.root = self._add_part(self.controller, name) if tree["children"]: self._add_decomposed_step(self.controller, name) self._add_children(self.root, tree) else: self._add_step_file(self.controller, tree) # native files are added after step files and PCL # so that it is easier to test if it is possible to checkout a whole # assembly using native files native = self._add_native_file(self.controller, tree) # indexes all parts, documents and files updated = itertools.chain(self.created_parts, self.created_docs, self.added_files) update_indexes.delay([(c._meta.app_label, c._meta.module_name, c.pk) for c in updated]) return native
def _fast_reindex_files(self): """ Reindexes associated document files. Called after a promote or a demote to update the state_class field of each file. """ files = list(self.files.values_list("pk", flat=True)) if files: app_label = models.DocumentFile._meta.app_label module_name = models.DocumentFile._meta.module_name update_indexes.delay([(app_label, module_name, pk) for pk in files], fast_reindex=True)
def display_decompose(request, obj_type, obj_ref, obj_revi, stp_id): """ :param obj_type: Type of the :class:`.Part` from which we want to realize the decomposition :param obj_ref: Reference of the :class:`.Part` from which we want to realize the decomposition :param obj_revi: Revision of the :class:`.Part` from which we want to realize the decomposition :param stp_id: Id that identify the :class:`.DocumentFile` contained in a :class:`.Document3D` attached to the :class:`.Part` (identified by **obj_type**, **obj_ref**, **obj_revi**) that we will decompose When we demand the decomposition across the web form, the following tasks are realized - We check that the :class:`.Document3D` that contains the :class:`.DocumentFile` (**stp_id**) that will be decomposed has not been modified since the generation of the form - We check the validity of the information got in the form - If exists a native :class:`.DocumentFile` file related to :class:`.DocumentFile` (**stp_id**) that will be decomposed then this one was depreciated (afterwards will be promoted) - The :class:`.DocumentFile` (**stp_id**) was locked (afterwards will be promoted) - We call the function :meth:`.generate_part_doc_links_AUX` (with the property **transaction.commit_on_success**) - We generate the arborescense (:class:`.product`) of the :class:`.DocumentFile` (**stp_id**) - The bom-child of Parts (in relation to the arborescense of the :class:`.DocumentFile` (**stp_id**)) has been generated - For every :class:`.ParentChildLink` generated in the previous condition we attach all the :class:`.Location_link` relatives - To every generated :class:`.Part` a :class:`.Document3D` has been attached and this document has been set like the attribute PartDecompose of the :class:`.Part` - The attribute doc_id of every node of the arborescense (:class:`.Product`) is now the relative id of :class:`.Document3D` generated in the previous condition - To every generated :class:`.Document3D` has been added a new empty(locked) :class:`.DocumentFile` STP - The attribute doc_path of every node of the arborescense(:class:`.Product`) is now the path of :class:`.DocumentFile` STP generated in the previous condition - We update the indexes for the objects generated - We call the processus decomposer_all(with celeryd) """ obj, ctx = get_generic_data(request, obj_type, obj_ref, obj_revi) stp_file=pmodels.DocumentFile.objects.get(id=stp_id) assemblies=[] if stp_file.locked: raise ValueError("Not allowed operation.This DocumentFile is locked") if not obj.get_attached_documents().filter(document=stp_file.document).exists(): raise ValueError("Not allowed operation.The Document and the Part are not linked") if (models.Document3D.objects.filter(PartDecompose=obj.object).exists() and not models.Document3D.objects.get(PartDecompose=obj.object).id==stp_file.document.id): #a same document could be re-decomposed for the same part raise ValueError("Not allowed operation.This Part already forms part of another split BOM") try: doc3D=models.Document3D.objects.get(id=stp_file.document_id) except models.Document3D.DoesNotExist: raise ValueError("Not allowed operation.The document is not a subtype of document3D") if doc3D.PartDecompose and not doc3D.PartDecompose.id==obj.object.id: raise ValueError("Not allowed operation.This Document already forms part of another split BOM") document_controller = models.Document3DController(doc3D, pmodels.User.objects.get(username=settings.COMPANY)) if request.method == 'POST': extra_errors = "" product = document_controller.get_product(stp_file, False) last_mtime = forms.Form_save_time_last_modification(request.POST) obj.block_mails() if last_mtime.is_valid() and product: old_time = last_mtime.cleaned_data['last_modif_time'] old_microseconds = last_mtime.cleaned_data['last_modif_microseconds'] index=[1] if clean_form(request,assemblies,product,index,obj_type, {}): if (same_time(old_time, old_microseconds, document_controller.mtime) and stp_file.checkout_valid and not stp_file.locked): stp_file.locked=True stp_file.locker=pmodels.User.objects.get(username=settings.COMPANY) stp_file.save() native_related=stp_file.native_related if native_related: native_related.deprecated=True native_related.save() native_related_pk=native_related.pk else: native_related_pk=None try: instances = [] old_product = json.dumps(product.to_list()) # we save the product before update nodes whit new doc_id and doc_path generated during the bomb-child generate_part_doc_links_AUX(request, product, obj,instances,doc3D) update_indexes.delay(instances) except Exception as excep: if isinstance(excep, models.Document_Generate_Bom_Error): models.delete_files(excep.to_delete) extra_errors = unicode(excep) stp_file.locked = False stp_file.locker = None stp_file.save() if native_related: native_related.deprecated=False native_related.save() else: models.decomposer_all.delay(stp_file.pk, json.dumps(product.to_list()), obj.object.pk, native_related_pk, obj._user.pk, old_product) return HttpResponseRedirect(obj.plmobject_url+"BOM-child/") else: extra_errors="The Document3D associated with the file STEP to analyze has been modified by another user while the forms were refilled:Please restart the process" else: extra_errors="Error refilling the form, please check it" else: extra_errors = INVALID_TIME_ERROR else: last_mtime=forms.Form_save_time_last_modification() last_mtime.fields["last_modif_time"].initial = stp_file.document.mtime last_mtime.fields["last_modif_microseconds"].initial= stp_file.document.mtime.microsecond product= document_controller.get_product(stp_file, False) if not product or not product.links: return HttpResponseRedirect(obj.plmobject_url+"BOM-child/") group = obj.group index=[1,0] # index[1] to evade generate holes in part_revision_default generation inbulk_cache = {} initialize_assemblies(assemblies,product,group,request.user,index,obj_type, inbulk_cache) extra_errors = "" deep_assemblies=sort_assemblies_by_depth(assemblies) ctx.update({'current_page':'decomposer', 'deep_assemblies' : deep_assemblies, 'extra_errors' : extra_errors, 'last_mtime' : last_mtime }) return r2r('DisplayDecompose.htm', ctx, request)