示例#1
0
def sync_user_profile(sender, instance, created, **kwargs):  # pylint: disable=unused-argument
    """
    Signal handler create/update a DiscussionUser every time a profile is created/updated
    """
    if not settings.FEATURES.get('OPEN_DISCUSSIONS_USER_SYNC', False):
        return
    transaction.on_commit(lambda: tasks.sync_discussion_user.delay(instance.user_id))
示例#2
0
    def save(self, **kwargs):
        new = self.pk is None
        if not new and (self.was_processed or not self.processed):
            raise TypeError

        from c3nav.mapdata.utils.cache.changes import changed_geometries

        if self.geometries_changed is None:
            self.geometries_changed = not changed_geometries.is_empty

        super().save(**kwargs)

        with suppress(FileExistsError):
            os.mkdir(os.path.dirname(self._changed_geometries_filename()))

        if self.geometries_changed:
            pickle.dump(changed_geometries, open(self._changed_geometries_filename(), 'wb'))

        if new:
            transaction.on_commit(
                lambda: cache.set('mapdata:last_update', self.to_tuple, None)
            )
            if settings.HAS_CELERY and settings.AUTO_PROCESS_UPDATES:
                transaction.on_commit(
                    lambda: process_map_updates.delay()
                )
示例#3
0
def update_attr_in_bulk_for_ids(values, attr, model):
    """Update a table using a list of ids.

    :params values: Dict of new values where the key is the pk of the element to update.
    :params attr: attr to update
    :params model: Model of the ids.
    """
    if not values:
        return

    values = [str((id, order)) for id, order in values.items()]
    sql = """
        UPDATE "{tbl}"
        SET "{attr}"=update_values.column2
        FROM (
          VALUES
            {values}
        ) AS update_values
        WHERE "{tbl}"."id"=update_values.column1;
    """.format(tbl=model._meta.db_table,
               values=', '.join(values),
               attr=attr)

    cursor = connection.cursor()

    # We can have deadlocks with multiple updates over the same object
    # In that situation we just retry
    def _run_sql(retries=0, max_retries=3):
        try:
            cursor.execute(sql)
        except DatabaseError:
            if retries < max_retries:
                _run_sql(retries + 1)

    transaction.on_commit(_run_sql)
示例#4
0
def manager_post_save_handler(sender, instance, created, **kwargs):
    """Run newly created (spawned) processes."""
    if instance.status == Data.STATUS_DONE or instance.status == Data.STATUS_ERROR or created:
        # Run manager at the end of the potential transaction. Otherwise
        # tasks are send to workers before transaction ends and therefore
        # workers cannot access objects created inside transaction.
        transaction.on_commit(lambda: commit_signal(instance.id))
示例#5
0
def m2m_changed_group_permissions(sender, instance, action, pk_set, *args, **kwargs):
    logger.debug("Received m2m_changed from group %s permissions with action %s" % (instance, action))
    if instance.pk and (action == "post_remove" or action == "post_clear"):
        logger.debug("Checking if service permission changed for group {}".format(instance))
        # As validating an entire groups service could lead to many thousands of permission checks
        # first we check that one of the permissions changed is, in fact, a service permission.
        perms = Permission.objects.filter(pk__in=pk_set)
        got_change = False
        service_perms = [svc.access_perm for svc in ServicesHook.get_services()]
        for perm in perms:
            natural_key = perm.natural_key()
            path_perm = "{}.{}".format(natural_key[1], natural_key[0])
            if path_perm not in service_perms:
                # Not a service permission, keep searching
                continue
            for svc in ServicesHook.get_services():
                if svc.access_perm == path_perm:
                    logger.debug("Permissions changed for group {} on "
                                 "service {}, re-validating services for groups users".format(instance, svc))

                    def validate_all_groups_users_for_service():
                        logger.debug("Performing validation for service {}".format(svc))
                        for user in instance.user_set.all():
                            svc.validate_user(user)

                    transaction.on_commit(validate_all_groups_users_for_service)
                    got_change = True
                    break  # Found service, break out of services iteration and go back to permission iteration
        if not got_change:
            logger.debug("Permission change for group {} was not service permission, ignoring".format(instance))
    def save(self, *args, **kwargs):
        """Insert the cluster into the database or update it if already
        present, spawning the cluster if it's not already spawned.
        """
        # actually start the cluster
        if self.jobflow_id is None:
            self.jobflow_id = self.provisioner.start(
                user_username=self.created_by.username,
                user_email=self.created_by.email,
                identifier=self.identifier,
                emr_release=self.emr_release.version,
                size=self.size,
                public_key=self.ssh_key.key,
            )
            # once we've stored the jobflow id we can fetch the status for the first time
            transaction.on_commit(self.sync)

            Metric.record('cluster-emr-version',
                          data={'version': self.emr_release.version})

        # set the dates
        if not self.expires_at:
            # clusters should expire after the lifetime it's set to
            self.expires_at = timezone.now() + timedelta(hours=self.lifetime)

        super().save(*args, **kwargs)
    def run(self):
        """Actually run the scheduled Spark job."""
        # if the job ran before and is still running, don't start it again
        if not self.is_runnable:
            return
        jobflow_id = self.provisioner.run(
            user_username=self.created_by.username,
            user_email=self.created_by.email,
            identifier=self.identifier,
            emr_release=self.emr_release.version,
            size=self.size,
            notebook_key=self.notebook_s3_key,
            is_public=self.is_public,
            job_timeout=self.job_timeout,
        )
        # Create new job history record.
        run = self.runs.create(
            spark_job=self,
            jobflow_id=jobflow_id,
            scheduled_at=timezone.now(),
            emr_release_version=self.emr_release.version,
            size=self.size,
        )
        # Remove the cached latest run to this objects will requery it.
        try:
            delattr(self, 'latest_run')
        except AttributeError:  # pragma: no cover
            pass  # It didn't have a `latest_run` and that's ok.

        Metric.record('sparkjob-emr-version',
                      data={'version': self.emr_release.version})

        # sync with EMR API
        transaction.on_commit(run.sync)
示例#8
0
 def visibility_update(self):
     """
     Perform updates that may be needed by a change in visibility at
     the next commit. Or if we are not in a transaction,
     immediately.
     """
     transaction.on_commit(self._visibility_update)
示例#9
0
 def delete(self, *args, **kwargs):
     ret = super(BaseModel, self).delete(*args, **kwargs)
     if hasattr(transaction, 'on_commit'):
         transaction.on_commit(self.flush)
     else:
         self.flush()
     return ret
    def sync(self, info=None):
        """
        Updates latest status and life cycle datetimes.
        """
        if info is None:
            info = self.info
        # a mapping between what the provisioner returns what the data model uses
        model_field_map = (
            ('state', 'status'),
            ('creation_datetime', 'started_at'),
            ('ready_datetime', 'ready_at'),
            ('end_datetime', 'finished_at'),
        )
        # set the various model fields to the value the API returned
        for api_field, model_field in model_field_map:
            field_value = info.get(api_field)
            if field_value is None:
                continue
            setattr(self, model_field, field_value)

        # if the job cluster terminated with error raise the alarm
        if self.status == Cluster.STATUS_TERMINATED_WITH_ERRORS:
            transaction.on_commit(lambda: self.alert(info))
        self.save()
        return self.status
示例#11
0
def enqueue_task(action, instance, **kwargs):
    """
    Common utility for enqueing a task for the given action and
    model instance.
    """
    identifier = get_identifier(instance)
    options = {}
    if settings.CELERY_HAYSTACK_QUEUE:
        options['queue'] = settings.CELERY_HAYSTACK_QUEUE
    if settings.CELERY_HAYSTACK_COUNTDOWN:
        options['countdown'] = settings.CELERY_HAYSTACK_COUNTDOWN

    task = get_update_task()

    def task_func():
        return task.apply_async((action, identifier), kwargs, **options)

    if hasattr(transaction, 'on_commit'):
        # Django 1.9 on_commit hook
        transaction.on_commit(
            task_func
        )
    elif hasattr(connection, 'on_commit'):
        # Django-transaction-hooks
        connection.on_commit(
            task_func
        )
    else:
        task_func()
示例#12
0
    def post(self, request, pk):
        account = self.get_object()

        tx_json = json.loads(request.body.decode('utf8'))
        on_commit(lambda: receive_transaction_hook(account, tx_json))

        return HttpResponse()
    def __add_events(self, events):
        """Add events to the commit hook.

        The events added will be sent when the transaction manager
        successfully commits the changes.
        """
        per_transaction = getattr(self.per_thread,
                                  'notifications_by_transaction', None)
        if per_transaction is None:
            per_transaction = {}
            self.per_thread.notifications_by_transaction = per_transaction

        # get a thread ID
        thread_id = threading.current_thread().ident
        notifications = per_transaction.get(thread_id, None)
        if notifications is None:
            # create an object to collect pending notifications and
            # register it with the transaction
            notifications = PendingNotifications(self)
            per_transaction[thread_id] = notifications

            def on_commit():
                """Send all pending notifications from this thread."""
                per_transaction.pop(thread_id).send()

            def _on_rollback():
                """Abort all pending notifications from this thread."""
                per_transaction.pop(thread_id, None)

            transaction.on_commit(on_commit)
            on_rollback(_on_rollback)

        notifications.add_events(events)
示例#14
0
文件: admin.py 项目: hdknr/djuploader
 def save(self, *args, **kwargs):
     transaction.on_commit(self.on_commit)
     instance = super(UploadFileAdminForm, self).save(*args, **kwargs)
     if self.cleaned_data.get('signal_event', False):
         self.uploaded = instance
         instance.save()
     return instance
 def handle(self, *args, **options):
     full_import = True
     startdate = options["startdate"]
     enddate = options.get("enddate")
     full_import = False
     response = self.get_api_results(startdate, enddate=enddate)
     metadata = {"request_time": datetime.strftime(datetime.now(), "%Y-%m-%dT%H:%M:%SZ"),
                 "startdate": datetime.strftime(startdate, "%Y-%m-%dT%H:%M:%SZ"),
                 "status": response.status_code}
     import_log = ImportLog(full=full_import)
     if response.status_code == 200:
         json_data = response.json()
         metadata["size"] = json_data["count"]
         import_log.metadata = metadata
         import_log.save()
         to_create = []
         for idx, record in enumerate(json_data["results"]):
             defaults = {"row_index": idx, "import_id": import_log}
             to_create.append(InvestmentLoad(data=record, **defaults))
         with transaction.atomic():
             transaction.on_commit(lambda: print("transaction complete"))
             print(f"creating {len(to_create)} rows")
             InvestmentLoad.objects.bulk_create(to_create, batch_size=500)
     else:
         print(f"API response {response.status_code}")
示例#16
0
 def perform_create(self, serializer):
     instance = serializer.save()
     instance.user = self.request.user
     instance.save()
     transaction.on_commit(lambda: run_command_execution.apply_async(
         args=(instance.id,), task_id=str(instance.id)
     ))
示例#17
0
文件: views.py 项目: pkimber/mail
 def form_valid(self, form):
     selection = form.cleaned_data['send_email']
     use_template = selection == 'template'
     with transaction.atomic():
         result = super().form_valid(form)
         if use_template:
             context = {
                 self.object.email: {
                     "SUBJECT": "Re: " + self.object.subject,
                     "BODY": self.object.description,
                     "DATE": self.object.created.strftime("%d-%b-%Y %H:%M:%S"),
                 },
             }
             queue_mail_template(
                 self.object,
                 'enquiry_acknowledgement',
                 context,
             )
         else:
             attachments = []
             if self.object.document:
                 attachments.append(self.object.document.file.name)
             queue_mail_message(
                 self.object,
                 [self.object.email],
                 self.object.subject,
                 self.object.description,
                 attachments=attachments,
             )
         transaction.on_commit(lambda: process_mail.delay())
         return result
示例#18
0
def umount(sender,instance,**kwargs):
    #to aviold repeated deletion
    for mount in sender.objects.select_for_update().filter(pk=instance.pk):
        @transaction.atomic
        def destroy(mount=mount):
            volume=Volume.objects.select_for_update().get(pk=mount.volume.pk)
            if not mount.ready:
                print('WARNNING: delete mount under building')
            else:
                try:
                    mount.volume.cloud.driver.volume_unmount(
                        mount.volume.cloud.platform_credential,
                        str(mount.volume.uuid),
                        str(mount.instance.uuid)
                    )
                except Exception as e:
                    mount.pk=None
                    mount.save()
                    traceback.print_exc()
                    return
                volume.status=VOLUME_STATUS.available.value
                volume.save()
                mount.instance.update_remedy_script(utils.remedy_script_mount_remove(mount))
            destroyed.send(sender=sender, instance=mount, name='destroyed')
        transaction.on_commit(Thread(target=destroy).start)
示例#19
0
def materialize_instance(sender, instance, **kwargs):
    if not kwargs['created'] or instance.ready: return
    instance.built_time=now()
    instance.save()
    instance.update_remedy_script(instance.template.remedy_script+'\n'+instance.image.remedy_script,heading=True)
    @transaction.atomic
    def materialize(instance=instance):
        instance=sender.objects.select_for_update().get(pk=instance.pk)
        remark = settings.PACKONE_LABEL+'.'+instance.cloud.name+';'
        if instance.remark: remark+=instance.remark
        info=instance.cloud.driver.vm_create(
            instance.cloud.platform_credential,
            instance.image.access_id,
            instance.template.vcpu,
            instance.template.mem,
            instance.template.access_id,
            remark
        )
        instance.uuid=UUID(info["uuid"].replace('-', ''), version=4)
        instance.vcpu=info["vcpu"]
        instance.mem=info["mem"]
        instance.built_time=info["create_time"]
        instance.ipv4=info["ipv4"]
        instance.save()
        hosts='###instance###\n'+instance.hosts_record
        if instance.cloud.hosts: hosts=hosts+'\n###cloud###\n'+instance.cloud.hosts
        instance.update_remedy_script(utils.remedy_script_hosts_add(hosts, overwrite=True),heading=True)
        materialized.send(sender=sender, instance=instance, name='materialized')
    transaction.on_commit(Thread(target=materialize).start)
示例#20
0
def inform_deleted_data(*args, information=None):
    """
    Informs the autoupdate system and the caching system about the deletion of
    elements.

    The function has to be called with the attributes collection_string and id.
    Multible elements can be used. For example:

    inform_deleted_data('motions/motion', 1, 'assignments/assignment', 5)

    The argument information is added to each collection element.
    """
    if len(args) % 2 or not args:
        raise ValueError(
            "inform_deleted_data has to be called with the same number of "
            "collection strings and ids. It has to be at least one collection "
            "string and one id.")

    # Go through each pair of collection_string and id and generate a collection
    # element from it.
    collection_elements = CollectionElementList()
    for index in range(0, len(args), 2):
        collection_elements.append(CollectionElement.from_values(
            collection_string=args[index],
            id=args[index + 1],
            deleted=True,
            information=information))
    # If currently there is an open database transaction, then the
    # send_autoupdate function is only called, when the transaction is
    # commited. If there is currently no transaction, then the function
    # is called immediately.
    transaction.on_commit(lambda: send_autoupdate(collection_elements))
示例#21
0
def inform_changed_data(instances, information=None):
    """
    Informs the autoupdate system and the caching system about the creation or
    update of an element.

    The argument instances can be one instance or an interable over instances.
    """
    root_instances = set()
    if not isinstance(instances, Iterable):
        # Make sure instances is an iterable
        instances = (instances, )
    for instance in instances:
        try:
            root_instances.add(instance.get_root_rest_element())
        except AttributeError:
            # Instance has no method get_root_rest_element. Just ignore it.
            pass

    # Generates an collection element list for the root_instances.
    collection_elements = CollectionElementList()
    for root_instance in root_instances:
        collection_elements.append(
            CollectionElement.from_instance(
                root_instance,
                information=information))
    # If currently there is an open database transaction, then the
    # send_autoupdate function is only called, when the transaction is
    # commited. If there is currently no transaction, then the function
    # is called immediately.
    transaction.on_commit(lambda: send_autoupdate(collection_elements))
示例#22
0
    def process_updates(cls):
        logger = logging.getLogger('c3nav')

        with cls.get_updates_to_process() as new_updates:
            if not new_updates:
                return ()

            if any(update.geometries_changed for update in new_updates):
                from c3nav.mapdata.utils.cache.changes import changed_geometries
                changed_geometries.reset()

                logger.info('Recalculating altitude areas...')

                from c3nav.mapdata.models import AltitudeArea
                AltitudeArea.recalculate()

                logger.info('%.3f m² of altitude areas affected.' % changed_geometries.area)

                last_processed_update = cls.last_processed_update(force=True)

                for new_update in new_updates:
                    logger.info('Applying changed geometries from MapUpdate #%(id)s (%(type)s)...' %
                                {'id': new_update.pk, 'type': new_update.type})
                    try:
                        new_changes = pickle.load(open(new_update._changed_geometries_filename(), 'rb'))
                    except FileNotFoundError:
                        logger.warning('changed_geometries pickle file not found.')
                    else:
                        logger.info('%.3f m² affected by this update.' % new_changes.area)
                        changed_geometries.combine(new_changes)

                logger.info('%.3f m² of geometries affected in total.' % changed_geometries.area)

                changed_geometries.save(last_processed_update, new_updates[-1].to_tuple)

                logger.info('Rebuilding level render data...')

                from c3nav.mapdata.render.renderdata import LevelRenderData
                LevelRenderData.rebuild()
            else:
                logger.info('No geometries affected.')

            logger.info('Rebuilding router...')
            from c3nav.routing.router import Router
            Router.rebuild(new_updates[-1].to_tuple)

            logger.info('Rebuilding locator...')
            from c3nav.routing.locator import Locator
            Locator.rebuild(new_updates[-1].to_tuple)

            for new_update in reversed(new_updates):
                new_update.processed = True
                new_update.save()

            transaction.on_commit(
                lambda: cache.set('mapdata:last_processed_update', new_updates[-1].to_tuple, None)
            )

            return new_updates
示例#23
0
 def apply_async(self, *args, **kwargs):
     """
     Unlike the default task in celery, this task does not return an async
     result
     """
     transaction.on_commit(
         lambda: super(TransactionAwareTask, self).apply_async(*args, **kwargs)
     )
示例#24
0
def handle_create_coursecertificate(sender, instance, created, **kwargs):  # pylint: disable=unused-argument
    """
    When a MicromastersCourseCertificate model is created
    """
    if created:
        user = instance.user
        program = instance.course.program
        transaction.on_commit(lambda: generate_program_certificate(user, program))
示例#25
0
    def save(self):
        session = self.session

        session.generating_pdf = True
        session.save(update_fields=('generating_pdf',))
        transaction.on_commit(
            lambda: generate_pdf_and_previews.delay(session.slug)
        )
示例#26
0
文件: tests.py 项目: LouisAmon/django
    def test_db_query_in_hook(self):
        with transaction.atomic():
            Thing.objects.create(num=1)
            transaction.on_commit(
                lambda: [self.notify(t.num) for t in Thing.objects.all()]
            )

        self.assertDone([1])
示例#27
0
文件: views.py 项目: lazywhite/python
def download(request):
    transaction.on_commit(commit_callback)  
    file_name = request.GET.get("filename")
    filename = "/Users/white/local/" + file_name
    response = StreamingHttpResponse(file_iterator(filename))
    response["Content-Type"] = "application/octet-stream"
    response["Content-Disposition"] = "attachment;filename={0}".format(file_name)
    return response
示例#28
0
 def save(self, **kwargs):
     new = self.pk is None
     with transaction.atomic():
         super().save(**kwargs)
         if new:
             transaction.on_commit(
                 lambda: cache.set('site:last_site_update', self.pk, None)
             )
示例#29
0
 def save(self, *args, **kwargs):
     self.modified = timezone.now()
     ret = super(BaseModel, self).save(*args, **kwargs)
     if hasattr(transaction, 'on_commit'):
         transaction.on_commit(self.flush)
     else:
         self.flush()
     return ret
示例#30
0
文件: forms.py 项目: mozilla/kuma
    def save(self, document, **kwargs):
        """
        Persists the revision and returns it.
        Takes the view request and document of the revision.
        Does some specific things when the revision is fully saved.
        """
        # have to check for first edit before we save
        is_first_edit = not self.request.user.wiki_revisions().exists()

        # Making sure we don't commit the saving right away since we
        # want to do other things here.
        kwargs['commit'] = False

        if self.section_id and self.instance and self.instance.document:
            # The logic to save a section is slightly different and may
            # need to evolve over time; a section edit doesn't submit
            # all the fields, and we need to account for that when we
            # construct the new Revision.
            doc = Document.objects.get(pk=self.instance.document.id)
            old_rev = doc.current_revision
            new_rev = super(RevisionForm, self).save(**kwargs)
            new_rev.document = document
            new_rev.creator = self.request.user
            new_rev.toc_depth = old_rev.toc_depth
            new_rev.save()
            new_rev.review_tags.set(*list(old_rev.review_tags.names()))

        else:
            new_rev = super(RevisionForm, self).save(**kwargs)
            new_rev.document = document
            new_rev.creator = self.request.user
            new_rev.toc_depth = self.cleaned_data['toc_depth']
            new_rev.save()
            new_rev.review_tags.set(*self.cleaned_data['review_tags'])
            new_rev.localization_tags.set(*self.cleaned_data['localization_tags'])

            # when enabled store the user's IP address
            if waffle.switch_is_active('store_revision_ips'):
                RevisionIP.objects.log(
                    revision=new_rev,
                    headers=self.request.META,
                    data=json.dumps(self.akismet_parameters(),
                                    indent=2, sort_keys=True)
                )

            # send first edit emails
            if is_first_edit:
                transaction.on_commit(
                    lambda: send_first_edit_email.delay(new_rev.pk)
                )

            # schedule a document rendering
            document.schedule_rendering('max-age=0')

            # schedule event notifications
            EditDocumentEvent(new_rev).fire(exclude=new_rev.creator)

        return new_rev
示例#31
0
def upload_acknowledged(
    subtask_id: str,
    source_file_size: str,
    source_package_hash: str,
    result_file_size: str,
    result_package_hash: str,
) -> None:
    log(logger,
        f'Upload acknowledgment starts.',
        f'Source_file_size {source_file_size}',
        f'Source_package_hash: {source_package_hash}',
        f'Result_file_size: {result_file_size}',
        f'Result_package_hash: {result_package_hash}',
        subtask_id=subtask_id)
    assert isinstance(subtask_id, str)

    try:
        verification_request = VerificationRequest.objects.select_for_update(
        ).get(subtask_id=subtask_id)
    except VerificationRequest.DoesNotExist:
        log(
            logger,
            f'Task `upload_acknowledged` tried to get VerificationRequest object with ID {subtask_id} but it does not exist.',
            subtask_id=subtask_id,
            logging_level=LoggingLevel.ERROR,
        )
        return

    if verification_request.upload_acknowledged is True:
        log(
            logger,
            f'Task `upload_acknowledged` scheduled but VerificationRequest with with ID {subtask_id} is already acknowledged.',
            subtask_id=subtask_id,
        )
        raise VerificationRequestAlreadyAcknowledgedError(
            f'Task `upload_acknowledged` scheduled but VerificationRequest with with ID {subtask_id} is already acknowledged.',
            ErrorCode.CONDUCTOR_VERIFICATION_REQUEST_ALREADY_ACKNOWLEDGED)
    else:
        verification_request.upload_acknowledged = True
        verification_request.full_clean()
        verification_request.save()

    frames = filter_frames_by_blender_subtask_definition(
        verification_request.blender_subtask_definition)

    def call_blender_verification_order() -> None:
        blender_crop_script_parameters = verification_request.blender_subtask_definition.blender_crop_script_parameters
        blender_verification_order.delay(
            subtask_id=verification_request.subtask_id,
            source_package_path=verification_request.source_package_path,
            source_size=source_file_size,
            source_package_hash=source_package_hash,
            result_package_path=verification_request.result_package_path,
            result_size=result_file_size,
            result_package_hash=result_package_hash,
            output_format=verification_request.blender_subtask_definition.
            output_format,
            scene_file=verification_request.blender_subtask_definition.
            scene_file,
            verification_deadline=parse_datetime_to_timestamp(
                verification_request.verification_deadline),
            frames=frames,
            blender_crop_script_parameters=
            parse_blender_crop_script_parameters_to_dict_from_query(
                blender_crop_script_parameters),
        )

    transaction.on_commit(
        call_blender_verification_order,
        using='storage',
    )

    log(logger,
        f'Upload acknowledgment finished.',
        f'Source_file_size {source_file_size}',
        f'Source_package_hash: {source_package_hash}',
        f'Result_file_size: {result_file_size}',
        f'Result_package_hash: {result_package_hash}',
        subtask_id=subtask_id)
示例#32
0
def blender_verification_request(
    subtask_id: str,
    source_package_path: str,
    result_package_path: str,
    output_format: str,
    scene_file: str,
    verification_deadline: int,
    frames: List[int],
    blender_crop_script_parameters: Dict[str, Any],
) -> None:
    log(logger,
        f'Blender verification request starts.',
        f'Source_package_path {source_package_path}',
        f'Result_package_path: {result_package_path}',
        f'Output_format: {output_format}',
        f'Scene_file: {scene_file}',
        f'Frames: {frames}',
        f'Verification_deadline: {verification_deadline}',
        f'With blender_crop_script_parameters: {blender_crop_script_parameters}',
        subtask_id=subtask_id)
    assert isinstance(output_format, str)
    assert isinstance(verification_deadline, int)

    assert output_format in BlenderSubtaskDefinition.OutputFormat.__members__.keys(
    )

    # The app creates a new instance of VerificationRequest in the database
    # and a BlenderSubtaskDefinition instance associated with it.
    (verification_request, blender_subtask_definition
     ) = store_verification_request_and_blender_subtask_definition(
         subtask_id=subtask_id,
         source_package_path=source_package_path,
         result_package_path=result_package_path,
         verification_deadline=verification_deadline,
         output_format=output_format,
         scene_file=scene_file,
         blender_parameters=blender_crop_script_parameters,
     )

    store_frames(
        blender_subtask_definition=blender_subtask_definition,
        frame_list=frames,
    )

    # If there are already UploadReports corresponding to some files, the app links them with the VerificationRequest
    # by setting the value of the foreign key in UploadReport.
    for path in [source_package_path, result_package_path]:
        UploadReport.objects.select_for_update().filter(
            path=path,
            verification_request=None,
        ).update(verification_request=verification_request)

    # The app checks if files indicated by source_package_path
    # and result_package_path in the VerificationRequest have reports.
    if (verification_request.upload_reports.filter(
            path=verification_request.source_package_path).exists()
            and verification_request.upload_reports.filter(
                path=verification_request.result_package_path).exists()):
        log(logger,
            'All expected files have been uploaded',
            f'Result package path: {verification_request.result_package_path}'
            f'Source package path: {verification_request.source_package_path}',
            subtask_id=subtask_id)

        verification_request.upload_finished = True
        verification_request.full_clean()
        verification_request.save()

        # If all expected files have been uploaded, the app sends upload_finished task to the work queue.
        def call_upload_finished() -> None:
            tasks.upload_finished.delay(verification_request.subtask_id)

        transaction.on_commit(
            call_upload_finished,
            using='storage',
        )
示例#33
0
def field_annotation_deleted(sender, instance: FieldAnnotation, **kwargs):
    from apps.document.async_notifications import notify_field_annotation_deleted
    from django.db import transaction
    transaction.on_commit(lambda: notify_field_annotation_deleted(instance))
示例#34
0
文件: signals.py 项目: n86cc/demozoo
def on_save(sender, **kwargs):
    if not hasattr(sender, 'index_components'):
        return
    transaction.on_commit(make_updater(kwargs['instance']))
示例#35
0
 def perform_create(self, serializer):
     instance = serializer.save()
     transaction.on_commit(lambda: start_deploy_execution.apply_async(
         args=(instance.id, ), task_id=str(instance.id)))
     return instance
示例#36
0
 def perform_create(self, serializer):
     instance = serializer.save()
     transaction.on_commit(lambda: instance.gather_info())
示例#37
0
 def _rescore(self, contest_key):
     from judge.tasks import rescore_contest
     transaction.on_commit(rescore_contest.s(contest_key).delay)
示例#38
0
def _create_order(*,
                  checkout_info: "CheckoutInfo",
                  order_data: dict,
                  user: User,
                  manager: "PluginsManager",
                  site_settings=None) -> Order:
    """Create an order from the checkout.

    Each order will get a private copy of both the billing and the shipping
    address (if shipping).

    If any of the addresses is new and the user is logged in the address
    will also get saved to that user's address book.

    Current user's language is saved in the order so we can later determine
    which language to use when sending email.
    """
    from ..order.utils import add_gift_card_to_order

    checkout = checkout_info.checkout
    order = Order.objects.filter(checkout_token=checkout.token).first()
    if order is not None:
        return order

    total_price_left = order_data.pop("total_price_left")
    order_lines_info = order_data.pop("lines")

    if site_settings is None:
        site_settings = Site.objects.get_current().settings

    status = (OrderStatus.UNFULFILLED
              if site_settings.automatically_confirm_all_new_orders else
              OrderStatus.UNCONFIRMED)
    order = Order.objects.create(
        **order_data,
        checkout_token=checkout.token,
        status=status,
        channel=checkout_info.channel,
    )
    if checkout.discount:
        # store voucher as a fixed value as it this the simplest solution for now.
        # This will be solved when we refactor the voucher logic to use .discounts
        # relations
        order.discounts.create(
            type=OrderDiscountType.VOUCHER,
            value_type=DiscountValueType.FIXED,
            value=checkout.discount.amount,
            name=checkout.discount_name,
            translated_name=checkout.translated_discount_name,
            currency=checkout.currency,
            amount_value=checkout.discount_amount,
        )

    order_lines = []
    for line_info in order_lines_info:
        line = line_info.line
        line.order_id = order.pk
        order_lines.append(line)

    OrderLine.objects.bulk_create(order_lines)

    country_code = checkout_info.get_country()
    allocate_stocks(order_lines_info, country_code)

    # Add gift cards to the order
    for gift_card in checkout.gift_cards.select_for_update():
        total_price_left = add_gift_card_to_order(order, gift_card,
                                                  total_price_left)

    # assign checkout payments to the order
    checkout.payments.update(order=order)

    # copy metadata from the checkout into the new order
    order.metadata = checkout.metadata
    order.redirect_url = checkout.redirect_url
    order.private_metadata = checkout.private_metadata
    order.save()

    transaction.on_commit(
        lambda: order_created(order=order, user=user, manager=manager))

    # Send the order confirmation email
    transaction.on_commit(lambda: send_order_confirmation.delay(
        order.pk, checkout.redirect_url, user.pk))
    transaction.on_commit(lambda: send_staff_order_confirmation.delay(
        order.pk, checkout.redirect_url))

    return order
示例#39
0
def sign_file(file_obj):
    """Sign a File if necessary.

    If it's not necessary (file exists but it's a mozilla signed one, or it's
    a search plugin) then return the file directly.

    If there's no endpoint (signing is not enabled) or isn't reviewed yet,
    or there was an error while signing, raise an exception - it
    shouldn't happen.

    Otherwise proceed with signing and return the signed file.
    """
    from olympia.git.utils import create_git_extraction_entry

    if (file_obj.version.addon.type == amo.ADDON_SEARCH
            and file_obj.version.is_webextension is False):
        # Those aren't meant to be signed, we shouldn't be here.
        return file_obj

    if not settings.ENABLE_ADDON_SIGNING:
        raise SigningError(u'Not signing file {0}: no active endpoint'.format(
            file_obj.pk))

    # No file? No signature.
    if not os.path.exists(file_obj.current_file_path):
        raise SigningError(u'File {0} doesn\'t exist on disk'.format(
            file_obj.current_file_path))

    # Don't sign Mozilla signed extensions (they're already signed).
    if file_obj.is_mozilla_signed_extension:
        # Don't raise an exception here, just log and return file_obj even
        # though we didn't sign, it's not an error - we just don't need to do
        # anything in this case.
        log.info(u'Not signing file {0}: mozilla signed extension is already '
                 u'signed'.format(file_obj.pk))
        return file_obj

    # We only sign files that are compatible with Firefox.
    if not supports_firefox(file_obj):
        raise SigningError(
            u'Not signing version {0}: not for a Firefox version we support'.
            format(file_obj.version.pk))

    # Sign the file. If there's any exception, we skip the rest.
    cert_serial_num = str(call_signing(file_obj))

    size = storage.size(file_obj.current_file_path)

    # Save the certificate serial number for revocation if needed, and re-hash
    # the file now that it's been signed.
    file_obj.update(cert_serial_num=cert_serial_num,
                    hash=file_obj.generate_hash(),
                    is_signed=True,
                    size=size)
    log.info(u'Signing complete for file {0}'.format(file_obj.pk))

    if waffle.switch_is_active('enable-uploads-commit-to-git-storage'):
        # Schedule this version for git extraction.
        transaction.on_commit(
            lambda: create_git_extraction_entry(version=file_obj.version))

    return file_obj
示例#40
0
 def form_valid(self, form):
     self.configuration_profile = form.save()
     transaction.on_commit(lambda: send_mbu_device_notifications(self.meta_business_unit))
     return super().form_valid(form)
示例#41
0
def greet_user(sender, instance: User, created, **kwargs):
    if created and instance.email is not None:
        transaction.on_commit(lambda: send_greeting_email(instance.pk))
示例#42
0
文件: signals.py 项目: ez-einsatz/ez
def nachricht_send_sichtung_hooks(sender, instance, created, update_fields, **kwargs):
    if created:
        transaction.on_commit(lambda: nachricht_send_sichtung_hooks_post_commit(instance))
示例#43
0
    def from_upload(cls,
                    upload,
                    addon,
                    selected_apps,
                    channel,
                    parsed_data=None):
        """
        Create a Version instance and corresponding File(s) from a
        FileUpload, an Addon, a list of compatible app ids, a channel id and
        the parsed_data generated by parse_addon().

        Note that it's the caller's responsability to ensure the file is valid.
        We can't check for that here because an admin may have overridden the
        validation results.
        """
        assert parsed_data is not None

        if addon.status == amo.STATUS_DISABLED:
            raise VersionCreateError(
                'Addon is Mozilla Disabled; no new versions are allowed.')

        license_id = None
        if channel == amo.RELEASE_CHANNEL_LISTED:
            previous_version = addon.find_latest_version(channel=channel,
                                                         exclude=())
            if previous_version and previous_version.license_id:
                license_id = previous_version.license_id
        approval_notes = None
        if parsed_data.get('is_mozilla_signed_extension'):
            approval_notes = (u'This version has been signed with '
                              u'Mozilla internal certificate.')
        version = cls.objects.create(
            addon=addon,
            approval_notes=approval_notes,
            version=parsed_data['version'],
            license_id=license_id,
            channel=channel,
        )
        log.info('New version: %r (%s) from %r' %
                 (version, version.id, upload))
        activity.log_create(amo.LOG.ADD_VERSION, version, addon)

        if addon.type == amo.ADDON_STATICTHEME:
            # We don't let developers select apps for static themes
            selected_apps = [app.id for app in amo.APP_USAGE]

        compatible_apps = {}
        for app in parsed_data.get('apps', []):
            if app.id not in selected_apps:
                # If the user chose to explicitly deselect Firefox for Android
                # we're not creating the respective `ApplicationsVersions`
                # which will have this add-on then be listed only for
                # Firefox specifically.
                continue

            compatible_apps[app.appdata] = ApplicationsVersions(
                version=version, min=app.min, max=app.max, application=app.id)
            compatible_apps[app.appdata].save()

        # See #2828: sometimes when we generate the filename(s) below, in
        # File.from_upload(), cache-machine is confused and has trouble
        # fetching the ApplicationsVersions that were just created. To work
        # around this we pre-generate version.compatible_apps and avoid the
        # queries completely.
        version._compatible_apps = compatible_apps

        # For backwards compatibility. We removed specific platform
        # support during submission but we don't handle it any different
        # beyond that yet. That means, we're going to simply set it
        # to `PLATFORM_ALL` and also have the backend create separate
        # files for each platform. Cleaning that up is another step.
        # Given the timing on this, we don't care about updates to legacy
        # add-ons as well.
        # Create relevant file and update the all_files cached property on the
        # Version, because we might need it afterwards.
        version.all_files = [
            File.from_upload(upload=upload,
                             version=version,
                             platform=amo.PLATFORM_ALL.id,
                             parsed_data=parsed_data)
        ]

        version.inherit_nomination(from_statuses=[amo.STATUS_AWAITING_REVIEW])
        version.disable_old_files()
        # After the upload has been copied to all platforms, remove the upload.
        storage.delete(upload.path)
        version_uploaded.send(sender=version)

        # Extract this version into git repository
        transaction.on_commit(
            lambda: extract_version_to_git_repository(version, upload))

        # Generate a preview and icon for listed static themes
        if (addon.type == amo.ADDON_STATICTHEME
                and channel == amo.RELEASE_CHANNEL_LISTED):
            theme_data = parsed_data.get('theme', {})
            generate_static_theme_preview(theme_data, version.pk)

        # Track the time it took from first upload through validation
        # (and whatever else) until a version was created.
        upload_start = utc_millesecs_from_epoch(upload.created)
        now = datetime.datetime.now()
        now_ts = utc_millesecs_from_epoch(now)
        upload_time = now_ts - upload_start

        log.info('Time for version {version} creation from upload: {delta}; '
                 'created={created}; now={now}'.format(delta=upload_time,
                                                       version=version,
                                                       created=upload.created,
                                                       now=now))
        statsd.timing('devhub.version_created_from_upload', upload_time)

        return version
示例#44
0
    def update_multiple(self, data, signal):  # noqa: C901
        """
        Perform one atomic update on multiple properties of `Signal` object.

        Note, this updates:
        - CategoryAssignment, Location, Priority, Note, Status
        :param data: deserialized data dict
        :param signal: Signal object
        :returns: Updated Signal object
        """
        from signals.apps.signals.models import Signal

        with transaction.atomic():
            locked_signal = Signal.objects.select_for_update(nowait=True).get(
                pk=signal.pk)  # Lock the Signal

            to_send = []
            sender = self.__class__

            if 'location' in data:
                location, prev_location = self._update_location_no_transaction(
                    data['location'], locked_signal)  # noqa: E501
                to_send.append((update_location, {
                    'sender': sender,
                    'signal_obj': locked_signal,
                    'location': location,
                    'prev_location': prev_location
                }))

            if 'status' in data:
                status, prev_status = self._update_status_no_transaction(
                    data['status'], locked_signal)
                to_send.append((update_status, {
                    'sender': sender,
                    'signal_obj': locked_signal,
                    'status': status,
                    'prev_status': prev_status
                }))

            if 'category_assignment' in data:
                # Only update if category actually changes (TODO: remove when we
                # add consistency checks to API -- i.e. when we check that only
                # the latest version of a Signal can be mutated.)
                if 'category' not in data['category_assignment']:
                    raise ValidationError('Category not found in data')
                elif locked_signal.category_assignment.category.id != data[
                        'category_assignment']['category'].id:  # noqa: E501
                    category_assignment, prev_category_assignment = \
                        self._update_category_assignment_no_transaction(
                            data['category_assignment'], locked_signal)

                    to_send.append((update_category_assignment, {
                        'sender':
                        sender,
                        'signal_obj':
                        locked_signal,
                        'category_assignment':
                        category_assignment,
                        'prev_category_assignment':
                        prev_category_assignment
                    }))

            if 'priority' in data:
                priority, prev_priority = \
                    self._update_priority_no_transaction(data['priority'], locked_signal)
                to_send.append((update_priority, {
                    'sender': sender,
                    'signal_obj': locked_signal,
                    'priority': priority,
                    'prev_priority': prev_priority
                }))

            if 'notes' in data:
                # The 0 index is there because we only allow one note to be
                # added per PATCH.
                note = self._create_note_no_transaction(
                    data['notes'][0], locked_signal)
                to_send.append((create_note, {
                    'sender': sender,
                    'signal_obj': locked_signal,
                    'note': note
                }))

            if 'type' in data:
                previous_type = locked_signal.type_assignment
                signal_type = self._update_type_no_transaction(
                    data['type'], locked_signal)
                to_send.append((update_type, {
                    'sender': sender,
                    'signal_obj': locked_signal,
                    'type': signal_type,
                    'prev_type': previous_type
                }))

            if 'directing_departments_assignment' in data:
                previous_directing_departments = locked_signal.directing_departments_assignment
                directing_departments = self._update_directing_departments_no_transaction(
                    data['directing_departments_assignment'], locked_signal)
                to_send.append((update_type, {
                    'sender':
                    sender,
                    'signal_obj':
                    locked_signal,
                    'directing_departments':
                    directing_departments,
                    'prev_directing_departments':
                    previous_directing_departments
                }))

            # Send out all Django signals:
            transaction.on_commit(lambda: send_signals(to_send))

        locked_signal.refresh_from_db()
        return locked_signal
示例#45
0
 def save(self, *args, **kwargs):
     if self._state.adding:
         from monitoring.tasks import run_manual_service_monitoring
         transaction.on_commit(lambda: run_manual_service_monitoring.apply_async(args=(self.pk, ), countdown=settings.CELERY_DEFAULT_COUNTDOWN))
     super().save(*args, **kwargs)
示例#46
0
 def complete(self):
     with transaction.atomic():
         self.students.set([])
         self.save()
         transaction.on_commit(self.mark_as_finished)
def send_transcode_task(sender: Any, *, instance: models.Video, created: bool,
                        **kw: Any) -> None:
    if not created:
        return
    transaction.on_commit(lambda: helpers.send_transcode_task(instance))
示例#48
0
def contact_sync_es(sender, instance, **kwargs):
    """Sync contact to the Elasticsearch."""
    transaction.on_commit(
        lambda: sync_object_async(ESContact, DBContact, str(instance.pk)),
    )
示例#49
0
    def split(self, split_data, signal, user=None):  # noqa: C901
        """ Split the original signal into 2 or more (see settings SIGNAL_MAX_NUMBER_OF_CHILDREN)
            new signals

        :param split_data: deserialized data dict containing data for new signals
        :param signal: Signal object, the original Signal
        :return: Signal object, the original Signal
        """
        # See: https://docs.djangoproject.com/en/2.1/topics/db/queries/#copying-model-instances
        from .models import (Attachment, CategoryAssignment, Location,
                             Priority, Reporter, Signal, Status, Type)
        from signals.apps.signals import workflow

        loop_counter = 0
        with transaction.atomic():
            parent_signal = Signal.objects.select_for_update(nowait=True).get(
                pk=signal.pk)
            for validated_data in split_data:
                loop_counter += 1

                # Create a new Signal, save it to get an ID in DB.
                child_signal = Signal.objects.create(
                    **{
                        'text': validated_data['text'],
                        'incident_date_start':
                        parent_signal.incident_date_start,
                        'parent': parent_signal,
                    })

                # Set the relevant properties: location, status, reporter, priority, cate
                # Deal with reverse foreign keys to child signal (for history tracking):
                status = Status.objects.create(
                    **{
                        '_signal': child_signal,
                        'state': workflow.GEMELD,
                        'text': None,
                        'user': None,  # i.e. SIA system
                    })

                location_data = {'_signal': child_signal}
                location_data.update({
                    k: getattr(parent_signal.location, k)
                    for k in [
                        'geometrie', 'stadsdeel', 'buurt_code', 'address',
                        'created_by', 'extra_properties', 'bag_validated'
                    ]
                })
                location = Location.objects.create(**location_data)

                reporter_data = {'_signal': child_signal}
                reporter_data.update({
                    k: getattr(parent_signal.reporter, k)
                    for k in [
                        'email', 'phone', 'email_anonymized',
                        'phone_anonymized', 'sharing_allowed'
                    ]  # noqa
                })
                reporter = Reporter.objects.create(**reporter_data)

                priority = None
                if parent_signal.priority:
                    priority_data = {'_signal': child_signal}
                    priority_data.update({
                        k: getattr(parent_signal.priority, k)
                        for k in ['priority', 'created_by']
                    })
                    priority = Priority.objects.create(**priority_data)

                if 'category_url' in validated_data['category']:
                    category = validated_data['category']['category_url']
                elif 'sub_category' in validated_data['category']:
                    # Only for backwards compatibility
                    category = validated_data['category']['sub_category']

                category_assignment_data = {
                    '_signal': child_signal,
                    'category': category,
                }

                category_assignment = CategoryAssignment.objects.create(
                    **category_assignment_data)

                if 'type' in validated_data:
                    type_data = validated_data[
                        'type']  # Will create a type with the given name
                    type_data[
                        'created_by'] = None  # noqa We also set the other fields to None. Shouldn't this be "user if user else None"?
                elif parent_signal.type_assignment:
                    type_data = {
                        'name': parent_signal.type_assignment.
                        name,  # noqa Will copy the type with name from the parent signal
                        'created_by':
                        None  # noqa We also set the other fields to None. Shouldn't this be "user if user else None"?
                    }
                else:
                    type_data = {
                    }  # Will create a default type with name "SIGNAL"

                # Creates the Type for the child signal
                Type.objects.create(**type_data, _signal_id=child_signal.pk)

                # Deal with forward foreign keys from child signal
                child_signal.location = location
                child_signal.status = status
                child_signal.reporter = reporter
                child_signal.priority = priority
                child_signal.category_assignment = category_assignment
                child_signal.save()

                # Ensure each child signal creation sends a DjangoSignal.
                transaction.on_commit(lambda: create_child.send_robust(
                    sender=self.__class__, signal_obj=child_signal))

                # Check if we need to copy the images of the parent
                if 'reuse_parent_image' in validated_data and validated_data[
                        'reuse_parent_image']:
                    parent_image_qs = parent_signal.attachments.filter(
                        is_image=True)
                    if parent_image_qs.exists():
                        for parent_image in parent_image_qs.all():
                            # Copy the original file and rename it by pre-pending the name with
                            # split_{loop_counter}_{original_name}
                            child_image_name = 'split_{}_{}'.format(
                                loop_counter,
                                parent_image.file.name.split('/').pop())

                            attachment = Attachment()
                            attachment._signal = child_signal
                            try:
                                attachment.file.save(name=child_image_name,
                                                     content=parent_image.file)
                            except FileNotFoundError:
                                pass
                            else:
                                attachment.save()

            # Let's update the parent signal status to GESPLITST
            status, prev_status = self._update_status_no_transaction(
                {
                    'state': workflow.GESPLITST,
                    'text': 'Deze melding is opgesplitst.',
                    'created_by': user.email if user else None,
                },
                signal=parent_signal)

            transaction.on_commit(
                lambda: update_status.send_robust(sender=self.__class__,
                                                  signal_obj=parent_signal,
                                                  status=status,
                                                  prev_status=prev_status))

        return signal
def send_mail(recipient_list, template, context=None, from_email=None, send_mail=True, on_commit=False, *args, **kwargs):
    """
    Wrapper around ``django.core.mail.send_mail`` that generates the subject
    and message body from a template.

    Usage::

        >>> from email_from_template import send_mail
        >>> send_mail([user.email], 'path/to/my_email.email', {
            'a': 1,
            'user': user,
        })

    path/to/my_email.email::

        {% extends email_from_template %}

        {% block subject %}
        Hi {{ user.username }}
        {% endblock %}

        {% block body %}
        Hi {{ user.username }}.
        Did you know that a = {{ a }} ?
        {% endblock %}
    """

    # Explicitly check that we have been installed as an app, otherwise we get
    # a confusing traceback that `template` does not exist, rather than
    # `email_from_template/component.email`.
    if 'email_from_template' not in settings.INSTALLED_APPS:
        raise ImproperlyConfigured(
            "'email_from_template' missing from INSTALLED_APPS"
        )

    context = Context(context)
    for fn in [import_string(x) for x in app_settings.CONTEXT_PROCESSORS]:
        context.update(fn())

    render_fn = import_string(app_settings.RENDER_METHOD)

    def render(component, fail_silently=False):
        context.push({
            'email_from_template': 'email_from_template/%s.email' % component,
        })

        txt = render_fn(template, context.flatten()).strip()

        if not fail_silently:
            assert txt, "Refusing to send mail with empty %s - did you forget to" \
                " add a {%% block %s %%} to %s?" % (component, component, template)

        context.pop()

        return txt

    kwargs.setdefault('connection', get_connection(
        username=kwargs.get('auth_user', None),
        password=kwargs.get('auth_password', None),
        fail_silently=kwargs.pop('fail_silently', False),
    ))

    mail = EmailMultiAlternatives(
        render('subject').split('\n')[0],
        render('body'),
        from_email,
        recipient_list,
        *args,
        **kwargs
    )

    html_message = render('html', fail_silently=True)
    if html_message:
        mail.attach_alternative(html_message, 'text/html')

    if not send_mail:
        return mail

    if on_commit:
        transaction.on_commit(mail.send)
        return None

    return mail.send()
示例#51
0
文件: views.py 项目: LDO-CERT/orochi
    def import_local(self, request):
        local_path = Path(request.data["filepath"])
        media_path = "{}/{}".format(settings.MEDIA_ROOT, "uploads")

        uploaded_name = "{}/{}".format(media_path, local_path.name)

        if not local_path.exists():
            return Response(
                {"Error": "Filepath does not exists!"},
                status=status.HTTP_400_BAD_REQUEST,
            )

        if not Path(settings.MEDIA_ROOT) in Path(local_path).parents:
            return Response(
                {"Error": "Filepath must be under MEDIA PATH!"},
                status=status.HTTP_400_BAD_REQUEST,
            )

        # IF ALREADY UNDER RIGHT FOLDER OK, ELSE MOVE IT
        if local_path.parent.absolute() == media_path:
            uploaded_name = local_path
        else:
            local_path.rename(uploaded_name)

        operating_system = request.data["operating_system"]
        operating_system = operating_system.capitalize()
        if operating_system not in ["Linux", "Windows", "Mac"]:
            return Response(
                {
                    "Error":
                    "Option selected for OS is not valid [Linux, Windows, Mac]."
                },
                status=status.HTTP_400_BAD_REQUEST,
            )

        name = request.data["name"]
        operating_system = operating_system

        with transaction.atomic():
            dump = Dump(
                author=request.user,
                index=str(uuid.uuid1()),
                name=name,
                operating_system=operating_system,
            )
            dump.upload.name = str(uploaded_name)
            dump.save()
            Result.objects.bulk_create([
                Result(
                    plugin=up.plugin,
                    dump=dump,
                    result=5 if not up.automatic else 0,
                ) for up in UserPlugin.objects.filter(
                    plugin__operating_system__in=[
                        operating_system,
                        "Other",
                    ],
                    user=request.user,
                    plugin__disabled=False,
                )
            ])
            transaction.on_commit(
                lambda: index_f_and_f(dump.pk, request.user.pk))

        return Response(
            status=status.HTTP_200_OK,
            data=ShortDumpSerializer(dump, context={
                "request": request
            }).data,
        )
示例#52
0
def company_sync_es(instance):
    """Sync company to the Elasticsearch."""
    transaction.on_commit(
        lambda: sync_object_async(CompanySearchApp, instance.pk), )
示例#53
0
 def inner(*args, **kwargs):
     transaction.on_commit(lambda: func(*args, **kwargs))
示例#54
0
 def save(self, *args, **kwargs):
     super().save(*args, **kwargs)
     if self.should_notify():
         email = self.user.email
         transaction.on_commit(lambda: notify_auditlog.delay(self.pk, email))
示例#55
0
def notify_create_user(sender, instance, password, created=False, **kwargs):
    transaction.on_commit(lambda: tasks.notify_create_user.delay(
        instance.id, password, instance.settings.backend_url))
示例#56
0
def initiate_messaging_rule_run(domain, rule_id):
    MessagingRuleProgressHelper(rule_id).set_initial_progress()
    AutomaticUpdateRule.objects.filter(pk=rule_id).update(locked_for_editing=True)
    transaction.on_commit(lambda: run_messaging_rule.delay(domain, rule_id))
示例#57
0
    def clone_product(self, shop_product: ShopProduct):
        # clone product
        product = shop_product.product
        new_product = copy_model_instance(product)
        new_product.sku = "{}-{}".format(product.sku, Product.objects.count())
        new_product.name = ("{name} - Copy").format(name=product.name)
        new_product.save()

        for trans in product.translations.all():
            trans_product_data = get_data_dict(trans)
            trans_product_data["master"] = new_product
            new_trans = Product._parler_meta.get_model_by_related_name(
                "translations").objects.get_or_create(
                    language_code=trans.language_code, master=new_product)[0]
            for (key, value) in trans_product_data.items():
                setattr(new_trans, key, value)

            new_trans.save()

        # clone shop product
        new_shop_product = copy_model_instance(shop_product)
        new_shop_product.product = new_product
        new_shop_product.save()

        for trans in shop_product.translations.all():
            trans_shop_product_data = get_data_dict(trans)
            trans_shop_product_data["master"] = new_shop_product
            ShopProduct._parler_meta.get_model_by_related_name(
                "translations").objects.get_or_create(
                    **trans_shop_product_data)

        # clone suppliers
        if self.current_supplier:
            new_shop_product.suppliers.add(self.current_supplier)
        else:
            new_shop_product.suppliers.set(shop_product.suppliers.all())

        new_shop_product.categories.set(shop_product.categories.all())

        # clone attributes
        for original_product_attribute in product.attributes.all():
            product_attribute = ProductAttribute.objects.create(
                product=new_product,
                attribute=original_product_attribute.attribute,
            )
            product_attribute.value = original_product_attribute.value
            product_attribute.save()

        # clone media
        for media in product.media.all():
            media_copy = copy_model_instance(media)
            media_copy.product = new_product
            media_copy.file = media.file
            media.shops.add(shop_product.shop)
            if product.primary_image == media:
                new_product.primary_image = media_copy

            for trans in media.translations.all():
                trans_product_media_data = get_data_dict(trans)
                trans_product_media_data["master"] = new_shop_product
                ProductMedia._parler_meta.get_model_by_related_name(
                    "translations").objects.create(**trans_product_media_data)
            media_copy.save()

        product_copied.send(sender=type(self),
                            shop=shop_product.shop,
                            suppliers=self.current_supplier,
                            copied=product,
                            copy=new_product)

        transaction.on_commit(
            lambda: run_task("shuup.core.catalog.tasks.index_shop_product",
                             shop_product_id=new_product.pk))

        return new_shop_product
示例#58
0
    def post(self, request, *args, **kwargs):
        if "file" not in request.FILES or "json" not in request.POST:
            raise BadRequestError(
                ERROR_CODES_DETAILS["ftl_missing_file_or_json_in_body"],
                "ftl_missing_file_or_json_in_body",
            )

        file_obj = request.FILES["file"]

        if file_obj.size == 0:
            raise BadRequestError(
                ERROR_CODES_DETAILS["ftl_file_empty"],
                "ftl_file_empty",
            )

        mime = guess_mimetype(file_obj, filename=file_obj.name)
        extension = mimetype_to_ext(mime)
        if not extension:
            raise UnsupportedMediaType(
                mime,
                ERROR_CODES_DETAILS["ftl_document_type_unsupported"],
                "ftl_document_type_unsupported",
            )

        payload = json.loads(request.POST["json"])

        if "ftl_folder" in payload and payload["ftl_folder"]:
            try:
                ftl_folder = get_object_or_404(
                    FTLFolder.objects.filter(org=self.request.user.org),
                    id=payload["ftl_folder"],
                )
            except Http404:
                raise BadRequestError(
                    ERROR_CODES_DETAILS["ftl_folder_not_found"],
                    "ftl_folder_not_found",
                )
        else:
            ftl_folder = None

        ftl_doc = FTLDocument()
        ftl_doc.ftl_folder = ftl_folder
        ftl_doc.ftl_user = self.request.user
        ftl_doc.binary = file_obj
        ftl_doc.size = file_obj.size
        ftl_doc.type = mime

        md5 = hashlib.md5()
        for data in ftl_doc.binary.chunks():
            md5.update(data)
        ftl_doc.md5 = md5.hexdigest()

        if "md5" in payload and payload["md5"]:
            if payload["md5"] != ftl_doc.md5:
                raise BadRequestError(
                    ERROR_CODES_DETAILS["ftl_document_md5_mismatch"],
                    "ftl_document_md5_mismatch",
                )

        ftl_doc.org = self.request.user.org

        if "title" in payload and payload["title"]:
            ftl_doc.title = payload["title"]
        else:
            if file_obj.name.lower().endswith(extension):
                ftl_doc.title = file_obj.name[:-(len(extension))]
            else:
                ftl_doc.title = file_obj.name

        # The actual name of the file doesn't matter because we use a random UUID. On the contrary, the extension
        # is important.
        ftl_doc.binary.name = f"document{extension}"

        if "created" in payload and payload["created"]:
            ftl_doc.created = payload["created"]

        if "note" in payload and payload["note"]:
            ftl_doc.note = payload["note"]

        if "thumbnail" in request.POST and request.POST["thumbnail"]:
            try:
                ftl_doc.thumbnail_binary = ContentFile(
                    _extract_binary_from_data_uri(request.POST["thumbnail"]),
                    "thumb.png",
                )
            except ValueError as e:
                if ("ignore_thumbnail_generation_error" in payload
                        and not payload["ignore_thumbnail_generation_error"]):
                    raise BadRequestError(
                        ERROR_CODES_DETAILS["ftl_thumbnail_generation_error"],
                        "ftl_thumbnail_generation_error",
                    )
                else:
                    pass

        ftl_doc.save()

        transaction.on_commit(lambda: apply_ftl_processing.delay(
            ftl_doc.pid,
            ftl_doc.org.pk,
            ftl_doc.ftl_user.pk,
            force=[FTLPlugins.LANG_DETECTOR_LANGID],
        ))

        return Response(self.serializer_class(ftl_doc).data, status=201)
示例#59
0
 def delete(self, request, *args, **kwargs):
     self.object = self.get_object()
     self.object.trashed_at = timezone.now()
     self.object.save()
     transaction.on_commit(lambda: send_mbu_device_notifications(self.object.meta_business_unit))
     return HttpResponseRedirect(reverse("mdm:mbu", args=(self.object.meta_business_unit.pk,)))
示例#60
0
    def expire(self, *fields):
        def func():
            super(OrgRelatedCache, self).expire(*fields)

        on_commit(func)