Пример #1
0
 def wizard_complete(self, *, added=None, updated=None):
     notify = RedisBroker(self.user)
     ws = WsBroker(self.user)
     study = self.load.study
     request_uuid = self.load.request
     status = str(self.load.status)
     if added and updated:
         changed = _("Added {added} values and updated {updated}.")
     elif updated:
         changed = _("Updated {updated} values.")
     elif added:
         changed = _("Added {added} values.")
     else:
         changed = _("No values were modified.")
     changed = changed.format(added=added, updated=updated)
     message = _("Finished loading data into {study}. {changed}")
     # this one goes to the bitbucket if user navigated away
     ws.notify(
         message.format(study=study.name, changed=changed),
         tags=["import-status-update"],
         payload={
             "uuid": request_uuid,
             "status": status
         },
     )
     # this one sticks around if the user leaves + comes back
     notify.notify(
         message.format(study=study.name, changed=changed),
         tags=["import-status-update"],
         payload={
             "uuid": request_uuid,
             "status": status
         },
     )
Пример #2
0
def update_import_status(status, import_uuid, user_pk, notify=None):
    """
        A simple task whose job is to update an import's status and send a related user
        notification
    """
    logger.info(f"Updating import status to {status} for {import_uuid}")
    User = get_user_model()
    user = User.objects.get(pk=user_pk)
    logger.debug(
        f"Marking import {user.username}'s, {import_uuid} as {status}")
    import_ = Import.objects.filter(
        uuid=import_uuid).select_related("file").get()
    import_.status = status
    import_.save()

    # send an async notification of the status update
    if not notify:
        notify = RedisBroker(user)
    file_name = import_.file.file.name
    msg = 'Your import for file "{file_name}" is {status}'.format(
        file_name=file_name, status=status.lower())
    notify.notify(
        msg,
        tags=["import-status-update"],
        payload={
            "status": status,
            "uuid": import_uuid,
            "pk": import_.pk
        },
    )
Пример #3
0
def build_ui_payload_from_cache(import_pk, user_pk):
    """
    Loads existing import records from Redis cache and parses them in lieu of re-parsing the
    file and re-resolving string-based line/assay/MeasurementType identifiers from the
    uploaded file.  This method supports the transition from Step 3 -> Step 4 of the import,
    and this implementation lets us leverage most of the same code to support the Step 3 -> 4
    transition as we use for the Step 2 -> 4 transition.

    :return: the UI JSON for Step 4 "Inspect"
    """
    import_ = Import.objects.filter(pk=import_pk).select_related("file").get()
    User = get_user_model()
    user = User.objects.get(pk=user_pk)

    logger.info(f"Building import {import_.pk}'s UI payload from cache.")
    parser = SeriesCacheParser(master_units=import_.y_units)
    import_records = parser.parse(import_.uuid)
    aggregator = ErrorAggregator()

    # look up MeasurementTypes referenced in the import so we can build JSON containing them.
    # if we got this far, they'll be in EDD's database unless recently removed, which should
    # be unlikely
    category = import_.category
    MTypeClass = MTYPE_GROUP_TO_CLASS[category.mtype_group]
    unique_mtypes = MTypeClass.objects.filter(pk__in=parser.mtype_pks)

    # get other context from the database
    hour_units = MeasurementUnit.objects.get(unit_name="hours")
    assay_time_meta_pk = MetadataType.objects.filter(
        uuid=SYSTEM_META_TYPES["Time"])
    found_count = len(unique_mtypes)

    if found_count != len(parser.mtype_pks):
        missing_pks = {mtype.pk for mtype in unique_mtypes} - parser.mtype_pks
        aggregator.raise_errors(err_codes.MEASUREMENT_TYPE_NOT_FOUND,
                                occurrences=missing_pks)

    # TODO: fold assay times into UI payload to give user helpful feedback as in UI mockup
    assay_pk_to_time = None
    if parser.matched_assays:
        assay_pks = parser.loa_pks
        assay_pk_to_time = verify_assay_times(aggregator, assay_pks, parser,
                                              assay_time_meta_pk)
    required_inputs = compute_required_context(category, import_.compartment,
                                               parser, assay_pk_to_time)
    payload = build_summary_json(import_, required_inputs, import_records,
                                 unique_mtypes, hour_units.pk)
    notify = RedisBroker(user)
    file_name = import_.file.filename
    msg = _('Your file "{file_name}" is ready to import'.format(
        file_name=file_name))
    notify.notify(msg, tags="import-status-update", payload=payload)
Пример #4
0
 def submit_export(self, request, context):
     if self._export_ok:
         broker = ExportBroker(request.user.id)
         notifications = RedisBroker(request.user)
         path = broker.save_params(request.POST)
         result = tasks.export_worklist_task.delay(request.user.id, path)
         # use task ID as notification ID; may replace message when export is complete
         notifications.notify(
             _("Your worklist request is submitted. Another message with a "
               "download link will appear when the worklist processing is complete."
               ),
             uuid=result.id,
         )
Пример #5
0
 def submit_export(self, request, context):
     # TODO: uncovered code
     if self._export_ok:
         broker = ExportBroker(request.user.id)
         notifications = RedisBroker(request.user)
         path = broker.save_params(request.POST)
         logger.debug(f"Saved export params to path {path}")
         result = tasks.export_table_task.delay(request.user.id, path)
         # use task ID as notification ID; may replace message when export is complete
         notifications.notify(
             _("Your export request is submitted. Another message with a "
               "download link will appear when the export processing is complete."
               ),
             uuid=result.id,
         )
Пример #6
0
    def post(self, request, *args, **kwargs):
        study = self.object = self.get_object()
        try:
            import_id, done = self._parse_payload(request)
            if done:
                # once all pages are parsed, submit task and send notification
                logger.debug(f"Submitting Celery task for import {import_id}")
                result = tasks.import_table_task.delay(study.pk,
                                                       request.user.pk,
                                                       import_id)
                RedisBroker(request.user).notify(
                    _("Data is submitted for import. You may continue to use EDD, "
                      "another message will appear once the import is complete."
                      ),
                    uuid=result.id,
                )
            return JsonResponse(data={}, status=codes.accepted)
        # TODO: uncovered code
        except table.ImportTooLargeException as e:
            return HttpResponse(str(e), status=codes.request_entity_too_large)
        except table.ImportBoundsException as e:
            return HttpResponse(str(e), status=codes.bad_request)
        except table.ImportException as e:
            return HttpResponse(str(e), status=codes.server_error)
        except RuntimeError as e:
            logger.exception(f"Data import failed: {e}")

            # return error synchronously so it can be displayed right away in context.
            # no need for a separate notification here
            messages.error(request, e)
Пример #7
0
def import_table_task(study_id, user_id, import_id):
    """
    Task runs the code for importing a table of data.

    :param study_id: the primary key of the target study
    :param user_id: the primary key of the user running the import
    :param import_id: the UUID of this import
    :returns: a message to display via the TaskNotification middleware
    :throws RuntimeError: on any errors occurring while running the import
    """
    try:
        study = models.Study.objects.get(pk=study_id)
        user = User.objects.get(pk=user_id)
        processor = TableProcessor(study, user, import_id)
        notifications = RedisBroker(user)
        try:
            processor.run()
            processor.send_notifications(notifications)
        except Exception as e:
            logger.exception("Failure in import_table_task", e)
            processor.send_errors(notifications, e)
            raise
    except Exception as e:
        logger.exception(f"Failure in import_table_task: {e}")
        raise exceptions.ImportTaskError(
            _(f"Failed import to study {study_id}, EDD encountered this problem: {e}"
              )) from e
Пример #8
0
 def post(self, request, *args, **kwargs):
     study = self.object = self.get_object()
     if not study.user_can_write(request.user):
         return HttpResponse(status=codes.forbidden)
     try:
         import_id, done = self._parse_payload(request)
         if done:
             # once all pages are parsed, submit task and send notification
             logger.debug(f"Submitting Celery task for import {import_id}")
             result = tasks.import_table_task.delay(study.pk,
                                                    request.user.pk,
                                                    import_id)
             RedisBroker(request.user).notify(
                 _("Data is submitted for import. You may continue to use EDD, "
                   "another message will appear once the import is complete."
                   ),
                 uuid=result.id,
             )
         return JsonResponse(data={}, status=codes.accepted)
     except exceptions.ImportBoundsError as e:
         return HttpResponse(str(e), status=codes.bad_request)
     except exceptions.LoadError as e:
         return HttpResponse(str(e), status=codes.server_error)
     except Exception as e:
         logger.exception(f"Table import failed: {e}")
         messages.error(request, e)
         return HttpResponse(f"Table import failed: {e}",
                             status=codes.server_error)
Пример #9
0
def export_worklist_task(self, user_id, param_path):
    """
    Task runs the code for creating a worklist export, from form data validated by a view.

    :param user_id: the primary key of the user running the worklist
    :param param_path: the key returned from main.redis.ScratchStorage.save()
        used to access saved worklist parameters
    :returns: the key used to access worklist data from main.redis.ScratchStorage.load()
    :throws RuntimeError: on any errors occuring while running the export
    """
    try:
        # load info needed to build worklist
        User = get_user_model()
        user = User.objects.get(id=user_id)
        notifications = RedisBroker(user)
        broker = ExportBroker(user_id)
        export_id = self.request.id[:8]
        try:
            export_name = execute_export_worklist(broker, user, export_id,
                                                  param_path)
            url = f'{reverse("export:worklist")}?download={export_id}'
            message = _(
                'Your worklist for "{name}" is ready. '
                '<a href="{url}" class="download">Download the file here</a>.'
            ).format(name=export_name, url=url)
            notifications.notify(message,
                                 tags=("download", ),
                                 payload={"url": url})
        except Exception as e:
            logger.exception(f"Failure in export_worklist_task: {e}")
            message = _("Export failed. EDD encountered this problem: {ex}"
                        ).format(ex=e)
            notifications.notify(message)
        notifications.mark_read(self.request.id)
    except Exception as e:
        logger.exception("Failure in export_worklist_task: %s", e)
        raise RuntimeError(
            _("Failed export, EDD encountered this problem: {e}").format(e=e))
Пример #10
0
def import_table_task(study_id, user_id, import_id):
    """
    Task runs the code for importing a table of data.

    :param study_id: the primary key of the target study
    :param user_id: the primary key of the user running the import
    :param import_id: the UUID of this import
    :returns: a message to display via the TaskNotification middleware
    :throws RuntimeError: on any errors occurring while running the import
    """
    try:
        study = models.Study.objects.get(pk=study_id)
        user = User.objects.get(pk=user_id)
        processor = TableProcessor(study, user, import_id)
        notifications = RedisBroker(user)
        try:
            processor.run()
            processor.send_notifications(notifications)
            # legacy import technically supports importing using multiple protocols
            # if any number other than one is used, just leave protocol blank/empty
            study_imported.send(
                sender=TableProcessor,
                study=study,
                user=user,
                protocol=None
                if len(processor.protocols) != 1 else processor.protocols[0],
                count=len(processor.lines),
            )
        except Exception as e:
            logger.exception("Failure in import_table_task", e)
            processor.send_errors(notifications, e)
            raise
    except Exception as e:
        logger.exception(f"Failure in import_table_task: {e}")
        raise exceptions.ImportTaskError(
            _(f"Failed import to study {study_id}, EDD encountered this problem: {e}"
              )) from e
Пример #11
0
def import_table_task(self, study_id, user_id, import_id):
    """
    Task runs the code for importing a table of data.

    :param study_id: the primary key of the target study
    :param user_id: the primary key of the user running the import
    :param import_id: the UUID of this import
    :returns: a message to display via the TaskNotification middleware
    :throws RuntimeError: on any errors occurring while running the import
    """
    start = arrow.utcnow()
    study = None
    user = None
    import_params = None
    try:
        # load all the import data into memory from DB/from cache, leaving it in cache for
        # potential later reuse
        study = models.Study.objects.get(pk=study_id)
        user = User.objects.get(pk=user_id)
        notifications = RedisBroker(user)

        # set a fake request object with update info
        fake_request = HttpRequest()

        try:
            # load global context for the import
            broker = ImportBroker()
            import_params = json.loads(broker.load_context(import_id))
            if "update_id" in import_params:
                update_id = import_params.get("update_id")
                fake_request.update_obj = models.Update.objects.get(pk=update_id)
            else:
                fake_request.update_obj = models.Update.load_update(user=user)
            set_thread_variable("request", fake_request)

            # load paged series data
            pages = broker.load_pages(import_id)

            # do the import
            total_added = 0
            total_updated = 0
            importer = TableImport(study, user)
            importer.parse_context(import_params)

            with transaction.atomic(savepoint=False):
                for page in pages:
                    parsed_page = json.loads(page)
                    added, updated = importer.import_series_data(parsed_page)
                    total_added += added
                    total_updated += updated
                importer.finish_import()

            # if requested, notify user of completion (e.g. for a large import)
            send_import_completion_email(
                study, user, import_params, start, total_added, total_updated
            )
            message = _(
                "Finished import to {study}: {total_added} added and {total_updated} "
                "updated measurements.".format(
                    study=study.name,
                    total_added=total_added,
                    total_updated=total_updated,
                )
            )
            notifications.notify(message, tags=("legacy-import-message",))
            notifications.mark_read(self.request.id)

        except Exception as e:
            logger.exception("Failure in import_table_task", e)

            # send configured error notifications
            send_import_failure_email(study, user, import_id, import_params)
            message = _(
                "Failed import to {study}, EDD encountered this problem: {e}"
            ).format(study=study.name, e=e)
            notifications.notify(message, tags=("legacy-import-message",))
            notifications.mark_read(self.request.id)
            raise RuntimeError(
                _(
                    f"Failed import to {study.name}, EDD encountered this problem: "
                    f"{e}"
                )
            )
        finally:
            set_thread_variable("request", None)
    except Exception as e:
        logger.exception(f"Failure in import_table_task: {e}")
        raise RuntimeError(
            _(f"Failed import to study {study_id}, EDD encountered this problem: {e}")
        )
Пример #12
0
def process_import_file(import_pk, user_pk, requested_status, initial_upload):
    """
    The back end Celery task supporting import Step 2, "Upload", and also single-request
    imports made via the REST API.  Parses and verifies the file format and content,
    then proceeds to additional phases if requested / allowed.
    This includes verifying identifiers with external databases (e.g. PubChem, UnipProt).
    """
    import_ = None
    notify = None
    handler = None
    try:
        fetch_fields = (
            "category",
            "file",
            "file_format",
            "protocol",
            "study",
            "x_units",
            "y_units",
        )
        import_ = (Import.objects.filter(pk=import_pk).select_related(
            *fetch_fields).get())
        User = get_user_model()
        user = User.objects.get(pk=user_pk)
        notify = RedisBroker(user)

        # process the file, sending notifications along the way. Raises EDDImportError.
        handler = ImportFileHandler(notify, import_, user)
        handler.process_file(initial_upload)

        # if client requested a status transition, likely to SUBMITTED, verify
        # that import state is consistent with attempting it. Raises EDDImportError.
        attempt_status_transition(
            import_,
            requested_status,
            user,
            notify=notify,
            run_async=False,
            aggregator=handler,
        )

    except (EDDImportError, ObjectDoesNotExist, RuntimeError) as e:
        file_name = import_.file.filename if import_ else ""
        study_url = (reverse("main:overview",
                             kwargs={"slug": import_.study.slug})
                     if import_ else "")
        logger.exception(
            f'Exception processing import upload for file "{file_name}".  '
            f"Study is {study_url}")
        if import_:
            import_.status = Import.Status.FAILED
            import_.save()

            # add this error to the list if it's not one detected by the import code
            if not isinstance(e, EDDImportError):
                handler.add_error(err_codes.UNEXPECTED_ERROR,
                                  occurrence=str(e))

            # build a payload including any earlier errors
            payload = build_err_payload(handler, import_) if handler else {}

            if notify:
                msg = 'Processing for your import file "{file_name}" has failed'.format(
                    file_name=file_name)
                notify.notify(msg,
                              tags=["import-status-update"],
                              payload=payload)

        # if this was a predicted error encountered during normal processing, the task has
        # succeeded...also Celery will have trouble serializing the Exception
        if isinstance(e, EDDImportError):
            logger.info("Predicted error during import processing")
            return

        raise e
Пример #13
0
    def post(self, request, *args, **kwargs):
        study = self.object = self.get_object()
        try:
            #######################################################################################
            # Extract & verify data from the request
            #######################################################################################
            body = json.loads(request.body)  # TODO: add JSON validation
            page_index = body["page"] - 1
            total_pages = body["totalPages"]
            import_id = body["importId"]
            series = body["series"]

            broker = table.ImportBroker()
            notifications = RedisBroker(request.user)

            # test result limits to prevent erroneous or malicious clients from abusing resources
            broker.check_bounds(import_id, series, total_pages)

            #######################################################################################
            # Cache this page of data
            #######################################################################################
            logger.debug(
                f"Caching import page {page_index+1} of {total_pages}: ({import_id})"
            )

            cached_pages = broker.add_page(import_id, json.dumps(series))
            # if this is the initial page of data, store the context
            if page_index == 0:
                # include an update record for the original request
                update = edd_models.Update.load_request_update(request)
                body["update_id"] = update.id
                # cache the context for the whole import (only sent with this page)
                del body["series"]
                broker.set_context(import_id, json.dumps(body))

            # Test whether all result pages are received.
            all_received = cached_pages == total_pages

            #######################################################################################
            # If all the data are received, schedule a background task to process them
            #######################################################################################
            if all_received:
                logger.debug(f"Submitting Celery task for import {import_id}")
                result = tasks.import_table_task.delay(study.pk,
                                                       request.user.pk,
                                                       import_id)

                # notify that import is processing
                notifications.notify(
                    _("Data is submitted for import. You may continue to use EDD, "
                      "another message will appear once the import is complete."
                      ),
                    uuid=result.id,
                )

            return JsonResponse(data={}, status=codes.accepted)

        # TODO: uncovered code
        except table.ImportTooLargeException as e:
            return HttpResponse(str(e), status=codes.request_entity_too_large)
        except table.ImportBoundsException as e:
            return HttpResponse(str(e), status=codes.bad_request)
        except table.ImportException as e:
            return HttpResponse(str(e), status=codes.server_error)
        except RuntimeError as e:
            logger.exception(f"Data import failed: {e}")

            # return error synchronously so it can be displayed right away in context.
            # no need for a separate notification here
            messages.error(request, e)