def test_bulk_atomic_blobs_with_non_blob_docs(self): noblobs = self.make_doc(BaseFakeDocument) self.assertFalse(hasattr(noblobs, "blobs")) docs = [noblobs] with mod.bulk_atomic_blobs(docs): pass self.assertFalse(noblobs.saved)
def save_processed_models(cls, processed_forms, cases=None, stock_result=None): docs = list(processed_forms) + (cases or []) docs = filter(None, docs) assert XFormInstance.get_db().uri == CommCareCase.get_db().uri with bulk_atomic_blobs(docs): XFormInstance.get_db().bulk_save(docs) if stock_result: stock_result.commit()
def test_bulk_atomic_blobs(self): docs = [self.obj] self.assertNotIn("name", self.obj.blobs) with mod.bulk_atomic_blobs(docs): self.obj.put_attachment("data", "name") self.assertIn("name", self.obj.blobs) self.assertFalse(self.obj.saved) self.assertEqual(self.obj.fetch_attachment("name"), b"data")
def test_bulk_atomic_blobs(self): docs = [self.obj] self.assertNotIn("name", self.obj.blobs) with mod.bulk_atomic_blobs(docs): self.obj.put_attachment("data", "name") self.assertIn("name", self.obj.blobs) self.assertFalse(self.obj.saved) self.assertEqual(self.obj.fetch_attachment("name"), "data")
def save_processed_models(cls, processed_forms, cases=None, stock_result=None): docs = list(processed_forms) for form in docs: if form: form.server_modified_on = datetime.datetime.utcnow() docs += (cases or []) docs = [_f for _f in docs if _f] assert XFormInstance.get_db().uri == CommCareCase.get_db().uri with bulk_atomic_blobs(docs): XFormInstance.get_db().bulk_save(docs) if stock_result: stock_result.commit()
def test_bulk_atomic_blobs_with_deferred_deleted_blobs(self): obj = self.make_doc(DeferredPutBlobDocument) self.assertNotIn("will_delete", obj.blobs) obj.put_attachment("data", "will_delete") obj.deferred_delete_attachment("will_delete") docs = [obj] meta = obj.external_blobs["will_delete"] with mod.bulk_atomic_blobs(docs): self.assertNotIn("will_delete", obj.external_blobs) self.assertTrue(self.db.exists(key=meta.key)) self.assertFalse(obj._deferred_blobs) self.assertFalse(self.db.exists(key=meta.key)) self.assertNotIn("will_delete", obj.external_blobs)
def test_bulk_atomic_blobs_with_deferred_blobs(self): obj = self.make_doc(DeferredPutBlobDocument) self.assertNotIn("name", obj.blobs) obj.deferred_put_attachment("data", "name") docs = [obj] with mod.bulk_atomic_blobs(docs): obj.put_attachment("data", "name") self.assertIn("name", obj.external_blobs) key = obj.blobs["name"].key self.assertTrue(key) self.assertFalse(obj.saved) with self.get_blob(key).open() as fh: self.assertEqual(fh.read(), b"data")
def test_bulk_atomic_blobs_with_deferred_blobs(self): obj = self.make_doc(DeferredPutBlobDocument) self.assertNotIn("name", obj.blobs) obj.deferred_put_attachment("data", "name") docs = [obj] with mod.bulk_atomic_blobs(docs): obj.put_attachment("data", "name") self.assertIn("name", obj.external_blobs) ident = obj.blobs["name"].id self.assertTrue(ident) self.assertFalse(obj.saved) with self.get_blob(ident, obj._blobdb_bucket()).open() as fh: self.assertEqual(fh.read(), "data")
def test_bulk_atomic_blobs_with_mixed_docs(self): noblobs = self.make_doc(BaseFakeDocument) deferred = self.make_doc(DeferredPutBlobDocument) deferred.deferred_put_attachment("deferred", "att") self.assertFalse(hasattr(noblobs, "blobs")) docs = [self.obj, noblobs, deferred] self.assertNotIn("name", self.obj.blobs) with mod.bulk_atomic_blobs(docs): self.obj.put_attachment("data", "name") self.assertIn("name", self.obj.blobs) self.assertIn("att", deferred.external_blobs) self.assertFalse(any(d.saved for d in docs)) self.assertEqual(self.obj.fetch_attachment("name"), b"data") key = deferred.blobs["att"].key with self.get_blob(key).open() as fh: self.assertEqual(fh.read(), b"deferred")
def test_bulk_atomic_blobs_with_mixed_docs(self): noblobs = self.make_doc(BaseFakeDocument) deferred = self.make_doc(DeferredPutBlobDocument) deferred.deferred_put_attachment("deferred", "att") self.assertFalse(hasattr(noblobs, "blobs")) docs = [self.obj, noblobs, deferred] self.assertNotIn("name", self.obj.blobs) with mod.bulk_atomic_blobs(docs): self.obj.put_attachment("data", "name") self.assertIn("name", self.obj.blobs) self.assertIn("att", deferred.external_blobs) self.assertFalse(any(d.saved for d in docs)) self.assertEqual(self.obj.fetch_attachment("name"), "data") ident = deferred.blobs["att"].id with self.get_blob(ident, deferred._blobdb_bucket()).open() as fh: self.assertEqual(fh.read(), "deferred")
def reprocess_xform_error(form): """ Attempt to re-process an error form. This was created specifically to address the issue of out of order forms and child cases (form creates child case before parent case has been created). See http://manage.dimagi.com/default.asp?250459 :param form_id: ID of the error form to process """ from corehq.form_processor.interfaces.processor import FormProcessorInterface from corehq.form_processor.submission_post import SubmissionPost from corehq.form_processor.utils import should_use_sql_backend from corehq.form_processor.backends.sql.dbaccessors import CaseAccessorSQL, FormAccessorSQL, LedgerAccessorSQL from corehq.blobs.mixin import bulk_atomic_blobs from couchforms.models import XFormInstance from casexml.apps.case.signals import case_post_save from corehq.form_processor.interfaces.processor import ProcessedForms from corehq.form_processor.backends.sql.processor import FormProcessorSQL if not form: raise Exception('Form with ID {} not found'.format(form.form_id)) if not form.is_error: raise Exception('Form was not an error form: {}={}'.format(form.form_id, form.doc_type)) # reset form state prior to processing if should_use_sql_backend(form.domain): form.state = XFormInstanceSQL.NORMAL else: form.doc_type = 'XFormInstance' form.initial_processing_complete = True form.problem = None cache = FormProcessorInterface(form.domain).casedb_cache( domain=form.domain, lock=True, deleted_ok=True, xforms=[form] ) with cache as casedb: case_stock_result = SubmissionPost.process_xforms_for_cases([form], casedb) if case_stock_result: stock_result = case_stock_result.stock_result if stock_result: assert stock_result.populated cases = case_stock_result.case_models if should_use_sql_backend(form.domain): for case in cases: CaseAccessorSQL.save_case(case) if stock_result: LedgerAccessorSQL.save_ledger_values(stock_result.models_to_save) FormAccessorSQL.update_form_problem_and_state(form) FormProcessorSQL._publish_changes( ProcessedForms(form, None), cases, stock_result ) else: with bulk_atomic_blobs([form] + cases): XFormInstance.save(form) # use this save to that we don't overwrite the doc_type XFormInstance.get_db().bulk_save(cases) if stock_result: stock_result.commit() case_stock_result.stock_result.finalize() case_stock_result.case_result.commit_dirtiness_flags() for case in cases: case_post_save.send(case.__class__, case=case) return form
detail, lock=False) save and case_post_save.send(case.__class__, case=case) for ledger in ledgers: if ledger.ledger_reference in ledgers_updated: logger.info('Rebuilding ledger: %s', ledger.ledger_reference) if save: # only rebuild upated ledgers interface.ledger_processor.hard_rebuild_ledgers( **ledger.ledger_reference._asdict()) else: if save: with bulk_atomic_blobs([form] + cases): XFormInstance.save( form ) # use this save to that we don't overwrite the doc_type XFormInstance.get_db().bulk_save(cases) stock_result.commit() save and case_stock_result.stock_result.finalize() save and case_stock_result.case_result.commit_dirtiness_flags() return ReprocessingResult(form, cases, ledgers) def _log_changes(slug, cases, stock_updates, stock_deletes): if logger.isEnabledFor(logging.INFO): case_ids = [case.case_id for case in cases]