Beispiel #1
0
 def perform_create(self, serializer):
     letter = serializer.save()
     django_rq.enqueue(mail.send_imidiate_notifications, letter)
     till_sending = 24 * 3 * settings.SECONDS_IN_HOUR
     sending_datetime = datetime.now() + timedelta(seconds=till_sending)
     scheduler = django_rq.get_scheduler()
     scheduler.enqueue_at(sending_datetime, mail.send_letter, letter)
def enqueue(request):
	if request.method == "POST":
		# TODO: Correct method.
		django_rq.enqueue(dummy_method, request.body)
		return HttpResponse(status=204)
	else:
		return HttpResponseForbidden()
Beispiel #3
0
def shuffle_members(members):
	members_id = []
	for member in members:
		if not member.id in members_id:
			members_id.append(member.id)

	pairings = []
	random.shuffle(members_id)
	mail_subject = 'Kris Kringle'
	message_body = 'You picked {0}!\n\n{0} wishes to have at least one of the following:\n{1}'
	for index in range(len(members_id)):
		try:
			pair = members.get(pk=members_id[index])
			riap = members.get(pk=members_id[index+1])
		except IndexError:
			pair = members.get(pk=members_id[index])
			riap = members.get(pk=members_id[0])
		finally:
			django_rq.enqueue(
				send_mail,
				subject=mail_subject,
				message=message_body.format(riap.code_name, riap.wish_list), 
				from_email=SENDER,
				recipient_list=[pair.email,],
				fail_silently=False
			)
Beispiel #4
0
def send_communication(communication_type="EMail",
                       message_key="", *args, **kwargs):
    this_module = modules[__name__]
    api_full_name = _get_param('communication_dispatcher')[communication_type]
    api_name = api_full_name.split('.')[-1]
    message_api = getattr(this_module, api_name)

    if not message_api:
        raise ValidationError("No API has been defined for communication type '%s'"
                              % (communication_type))

    message_container = message_api(message_key=message_key)

    if settings.ASYNC:
        django_rq.enqueue(message_container.send_message)
        message_status = "Complete"
    else:
        message_status = message_container.send_message()


    if message_status != comm_settings.COMMUNICATION_STATUS[2][0]:
        raise ValidationError("Message send for key '%s' does not report success. "
                              "Review communication status inside payload"
                              "for error details" % (message_key))

    return message_status
Beispiel #5
0
def done(request):
    secret = request.POST.get('secret', '')
    sessionid = request.POST.get('sessionid', '')

    # TODO done does not check if the live is within a reasonable time ago
    try:
        s = StandSession.objects.get(secret=secret, id=sessionid)

        if 'duration' in request.POST:
            duration = int(request.POST.get('duration', '0'))

            s.duration = duration
            s.datefinished = s.datecreated + datetime.timedelta(seconds=duration)

        s.save()

        # Post the done session to slack so we can watch along
        import django_rq
        django_rq.enqueue(s.post_to_slack)

        code = 1
    except StandSession.DoesNotExist:
        code = 0

    response = {
        'status': code
    }

    return HttpResponse(json.dumps(response), content_type='application/json')
Beispiel #6
0
def daily_work(test=False):
	if not test:
		result = django_rq.enqueue(profile_builder_alert_email)
		result = django_rq.enqueue(check_block_updates)
	return True

	
Beispiel #7
0
    def post(self, request):
        """
        Builds a new property and sets the scraper on it.  Needs the upstream id and yelp_url of the property to get started.
        """
        print "Recieved scrape request for upstream property {0}".format(request.POST["upstream_id"])

        # Grab the property from the local db, create it if it doesn't exist
        prop_l = Property.objects.filter(upstream_id=request.POST["upstream_id"])
        if len(prop_l):
            prop = prop_l[0]
        else:
            prop = Property(upstream_id=request.POST["upstream_id"], yelp_url=request.POST["yelp_url"])
            prop.save()

        print "Scraped: {0} Processing: {1}".format(prop.yelp_scraped, prop.yelp_processing)
        # If scraping hasn't been run yet (initial GET) grab 'em
        if not prop.yelp_scraped:
            if not prop.yelp_processing:
                prop.yelp_processing = True
                prop.reviews.all().delete()
                prop.save()
                django_rq.enqueue(scrape_yelp_for_reviews, prop.id)
        else:
            print "NOTHING TO SCRAPE"

        return HttpResponse(json.dumps(prop.get_property_scrape_dict()), content_type="application/json")
Beispiel #8
0
def process_entity_comment(entity, profile, receiving_profile=None):
    """Process an entity of type Comment."""
    fid = safe_text(entity.id)
    if not validate_against_old_content(fid, entity, profile):
        return
    try:
        parent = Content.objects.fed(entity.target_id).get()
    except Content.DoesNotExist:
        logger.warning("No target found for comment: %s", entity)
        return
    values = {
        "text": safe_text_for_markdown(entity.raw_content),
        "author": profile,
        "visibility": parent.visibility,
        "remote_created": safe_make_aware(entity.created_at, "UTC"),
        "parent": parent,
    }
    values["text"] = _embed_entity_images_to_post(entity._children, values["text"])
    if getattr(entity, "guid", None):
        values["guid"] = safe_text(entity.guid)
    content, created = Content.objects.fed_update_or_create(fid, values)
    _process_mentions(content, entity)
    if created:
        logger.info("Saved Content from comment entity: %s", content)
    else:
        logger.info("Updated Content from comment entity: %s", content)
    if parent.visibility != Visibility.PUBLIC and receiving_profile:
        content.limited_visibilities.add(receiving_profile)
        logger.info("Added visibility to Comment %s to %s", content.uuid, receiving_profile.uuid)
    if parent.local:
        # We should relay this to participants we know of
        from socialhome.federate.tasks import forward_entity
        django_rq.enqueue(forward_entity, entity, parent.id)
Beispiel #9
0
def update_index(request):
    func = 'bicycle.searchextensions.jobs.update_index'
    django_rq.enqueue(func)
    try:
        return redirect(request.META['HTTP_REFERER'])
    except KeyError:
        return redirect('/')
Beispiel #10
0
    def post(self, request, *args, **kwargs):
        print "post received"
        data = request.DATA
        print "data received"

        try:
            print "trying to get data"
            fb_id = data['fb_id']
            oauth = data['oauth']
            email = data['email']
        except KeyError:
            error = {'error': "Invalid request"}
            return Response(error,
                            status=status.HTTP_400_BAD_REQUEST)

        print "trying to get User"
        try:
            user = User(fb_id=fb_id, email=email)
            user.save()
        except Exception:
            content = {'content': 'account already exists'}
            return Response(content, status=status.HTTP_200_OK)
        print("starting redis queue")
        django_rq.enqueue(run_queue, fb_id, oauth, email, user)
        print("redis queue started")
        return Response(status=status.HTTP_201_CREATED)
Beispiel #11
0
def grab_user_location(request):
    import django_rq

    print "SESSION",request.session
    #non rendering function that serves to log the location to the current session
    #will also be the basis to trigger the 'nearest me' job
    if request.method == "POST":
        import datetime
        from locationmanager import calculate_restaurants_by_distance
        latlong = request.POST['position'].split(',')
        lat = latlong[0]
        long = latlong[1]
        request.session['user_location'] = {"coordinates":
                                                        {"lat": lat, "lng":long},
                                                        "timestamp": str(datetime.datetime.now())
                                            }
        request.session.modified=True
        print request.session['user_location']['coordinates']
        print request.session._session_key
        response = HttpResponse()
        response.status_code=200
        #stick a job on the queue to make sure that session['restaurantlocations'] always up to date.
        django_rq.enqueue(calculate_restaurants_by_distance, request.session)
        return response
    else:
        response = HttpResponse()
        response.status_code=405
        return response
Beispiel #12
0
def federate_content_retraction(instance, **kwargs):
    """Send out local content retractions to the federation layer."""
    if instance.local:
        logger.debug('federate_content_retraction: Sending out Content retraction: %s', instance)
        try:
            django_rq.enqueue(send_content_retraction, instance, instance.author_id)
        except Exception as ex:
            logger.exception("Failed to federate_content_retraction %s: %s", instance, ex)
Beispiel #13
0
    def run(cls):
        response = urlopen(cls.url)
        html = response.read()

        cities = SiteParser().run(html)

        for city_data in cities:
            city = City.create_or_update(city_data)
            django_rq.enqueue(CityImporter.run, city)
Beispiel #14
0
    def run(cls, city):
        response = urlopen(urljoin(city.url, cls.path, allow_fragments=True))
        html = response.read()

        posts = CityParser().run(city, html)

        for post_data in posts:
            post = Post.create_or_update(urljoin(city.url, post_data))
            django_rq.enqueue(PostImporter.run, post)
Beispiel #15
0
 def handle(self, *args, **options):
     for refresh_request in DatabaseRefresh.objects.filter(refresh_status='ST'):
         print('processing ', refresh_request.id)
         if rq_present():
             print('queueing into rq')
             enqueue(refresh_database_backup, refresh_request.id)
         else:
             print('executing backup')
             refresh_database_backup(refresh_request.id)
 def handle(self, *args, **options):
     _dryrun=False
     
     for _notify in Notifier.objects.filter(notify_status='Scheduled'):
         print(_notify.notify_title)
         if rq_present():
             enqueue(send_email, notify_id=_notify.id, dryrun=_dryrun)
         else:
             send_email(notify_id=_notify.id, dryrun=_dryrun)
Beispiel #17
0
def enqueue_send_mail(subject, message, from_email, recipient_list,
                      fail_silently=False, auth_user=None, auth_password=None, connection=None, html_message=None):
    """
    `send_mail` function automatically enqueued

    Usage: enqueue_send_mail(args)
    """
    enqueue(send_mail, subject, message, from_email, recipient_list,
            fail_silently, auth_user, auth_password, connection, html_message)
Beispiel #18
0
def import_image(request):
    form = ImportImageForm(request.POST)
    hosts = form.data.getlist('hosts')
    for i in hosts:
        host = Host.objects.get(id=i)
        django_rq.enqueue(host.import_image, form.data.get('repository'))
    messages.add_message(request, messages.INFO, _('Importing') + ' {0}'.format(
        form.data.get('repository')) + '. ' + _('This may take a few minutes.'))
    return redirect('dashboard.views.index')
Beispiel #19
0
def on_commit_profile_following_change(action, pks, instance):
    for id in pks:
        # Send out on the federation layer if local follower, remote followed/unfollowed
        if Profile.objects.filter(id=id, user__isnull=True).exists() and instance.user:
            django_rq.enqueue(
                send_follow_change, instance.id, id, True if action == "post_add" else False
            )
        # Send out notification if local followed
        if action == "post_add" and Profile.objects.filter(id=id, user__isnull=False):
            django_rq.enqueue(send_follow_notification, instance.id, id)
Beispiel #20
0
    def obj_create(self, bundle, **kwargs):
        bundle = super(CreateUserResource, self).obj_create(
            bundle, **kwargs)

        bundle.obj.set_password(bundle.obj.password)
        bundle.obj.save()

        enqueue(generate_activation_code, bundle.obj.pk)

        return bundle
Beispiel #21
0
	def process(f):
			location = ROOT + '/apk/' + naem
			with open(location, 'wb+') as destinantion:
				for chunk in f.chunks():
					destinantion.write(chunk)
			django_rq.enqueue(main.processApk, location)
			@job
			def long_running_func():
			    pass
			long_running_func.delay()
    def perform_create(self, serializer):
        pk = self.kwargs.get(self.lookup_url_kwargs)
        team = get_object_or_404(Team, pk=pk)
        self.check_object_permissions(self.request, team)
        if len(team.members.all()) >= settings.NUMBER_OF_MEMBERS[team.event][1]:
            raise APIException("Your team already has max number of members")

        member = serializer.save(team=team)

        #send_mail('member-create', member_changed=member)
        django_rq.enqueue(send_mail, 'member-create', member_changed=member)
Beispiel #23
0
    def deploy(self):
        deploys_in_progress = Deploy.objects.filter(
            in_progress=True, env=self.env
        ).count()

        if deploys_in_progress:
            raise DeployAlreadyInProgress

        self.in_progress = True
        self.save()
        enqueue(captain_deploy, self)
 def _create_task(self, bundle, project):
     """Create node task for project"""
     task = NodeTask.objects.create(
         project=project,
         revision=bundle.data['after'],
         branch=self._get_branch(bundle.data['ref']),
     )
     enqueue(
         run_node_task, args=(task.id,),
         timeout=settings.NODE_MAX_WAIT_TIME,
     )
def create_product(request):
    params = {
        'ringRadius': float(request.POST.get('ringsize')) / (math.pi * 2),
        'initials1': request.POST.get('initials1'),
        'initials2': request.POST.get('initials2'),
        'pattern': create_pattern(request.POST.get('digits'))
    }

    order = Order.objects.create(
        params=params,
        digits=request.POST.get('digits', ''),
        material=request.POST.get('material', 0),
        email=request.POST.get('email', ''),
    )

    source_file = open(os.path.join(settings.BASE_DIR, 'scad', 'CipheRing.scad'))
    scad = compile_scad(source_file, params)

    bucket = s3bucket()
    key = bucket.new_key('scad:{0}'.format(order.uuid))
    key.set_contents_from_string(scad)

    materials = {}
    for material_id in settings.SHAPEWAYS_MATERIALS:
        materials[material_id] = dict(
            materialId=material_id,
            isActive=1,
            markup=float(settings.SHAPEWAYS_MARKUP),
        )

    compile_job = django_rq.enqueue(compile_scad_to_stl, kwargs={
        'order_id': order.id,
    })

    upload_job = django_rq.enqueue(upload_stl_to_shapeways, kwargs={
        'compile_job_id': compile_job.id,
        'order_id': order.id,
        'materials': materials,
        'default_material': order.material,
        'title': 'CipheRing[{0}{1}{2}]'.format(request.POST.get('digits'), params['initials1'], params['initials2']),
    }, depends_on=compile_job)

    email_job = django_rq.enqueue(send_email, kwargs={
        'upload_job_id': upload_job.id,
        'order_id': order.id,
        'email': request.POST.get('email') or settings.ADMINS[0][1],
    }, depends_on=upload_job)

    order.compile_job_id = compile_job.id
    order.upload_job_id = upload_job.id
    order.email_job_id = email_job.id
    order.save()

    return HttpResponseRedirect('{0}/order/?id={1}'.format(settings.FRONTEND_BASE_URL, order.uuid))
    def perform_destroy(self, instance):
        team = get_object_or_404(Team, pk=self.kwargs.get(self.lookup_field))
        if team.leader == instance:
            raise APIException("Leader should not be deleted. You can delete leader only when deleting the team.")
        
        if len(team.members.all()) <= settings.NUMBER_OF_MEMBERS[team.event][0]:
            raise APIException("You cannot delete member because You must have at least " + str(settings.NUMBER_OF_MEMBERS[team.event][0]) + " members")

        #send_mail('member-delete', member_changed=instance)
        django_rq.enqueue(send_mail, 'member-delete', member_changed=instance)
        instance.delete()
Beispiel #27
0
def task(request, pk):
    """Create task and assign job to employees."""
    schedule = get_object_or_404(Schedule, pk=pk)
    if settings.DEV:
        set_task(schedule)
        messages.success(request, 'Tasks set')
    else:
        # Place in queue to run in the background
        django_rq.enqueue(set_task, schedule)
        messages.success(request, 'Setting tasks, please wait')
    return HttpResponseRedirect(reverse('schedules.views.detail', 
        args=[schedule.pk]))
Beispiel #28
0
def process_entity_share(entity, profile):
    """Process an entity of type Share."""
    if not entity.entity_type == "Post":
        # TODO: enable shares of replies too
        logger.warning("Ignoring share entity type that is not of type Post")
        return
    try:
        target_content = Content.objects.fed(entity.target_id, share_of__isnull=True).get()
    except Content.DoesNotExist:
        # Try fetching. If found, process and then try again
        remote_target = retrieve_remote_content(
            entity.target_id,
            guid=entity.target_guid,
            handle=entity.target_handle,
            entity_type=entity.entity_type,
            sender_key_fetcher=sender_key_fetcher,
        )
        if remote_target:
            process_entities([remote_target])
            try:
                target_content = Content.objects.fed(entity.target_id, share_of__isnull=True).get()
            except Content.DoesNotExist:
                logger.warning("Share target was fetched from remote, but it is still missing locally! Share: %s",
                               entity)
                return
        else:
            logger.warning("No target found for share even after fetching from remote: %s", entity)
            return
    values = {
        "text": safe_text_for_markdown(entity.raw_content),
        "author": profile,
        # TODO: ensure visibility constraints depending on shared content?
        "visibility": Visibility.PUBLIC if entity.public else Visibility.LIMITED,
        "remote_created": safe_make_aware(entity.created_at, "UTC"),
        "service_label": safe_text(entity.provider_display_name) or "",
    }
    values["text"] = _embed_entity_images_to_post(entity._children, values["text"])
    fid = safe_text(entity.id)
    if getattr(entity, "guid", None):
        values["guid"] = safe_text(entity.guid)
    content, created = Content.objects.fed_update_or_create(fid, values, extra_lookups={'share_of': target_content})
    _process_mentions(content, entity)
    if created:
        logger.info("Saved share: %s", content)
    else:
        logger.info("Updated share: %s", content)
    # TODO: send participation to the share from the author, if local
    # We probably want that to happen even though our shares are not separate in the stream?
    if target_content.local:
        # We should relay this share entity to participants we know of
        from socialhome.federate.tasks import forward_entity
        django_rq.enqueue(forward_entity, entity, target_content.id)
Beispiel #29
0
    def send_email(self, request, queryset):
        types = queryset.values_list('type', flat=True)
        if not types:
            messages.error(request, _("Choose policy document types to send emails about first!"))
            return

        if len(types) == 2:
            docs = 'both'
        else:
            docs = types[0].value

        django_rq.enqueue(send_policy_document_update_notifications, docs)
        messages.info(request, _("Policy document update emails queued for sending."))
Beispiel #30
0
    def enqueue(self, action, instance, sender, **kwargs):
        if sender._meta.label in ('admin.LogEntry', 'sessions.Session',
                                  'reversion.Revision', 'reversion.Version'):
            return

        django_rq.enqueue(
            auto_invalidate,
            args=(action,
                  instance._meta.app_label, instance._meta.model_name,
                  instance.pk),
            result_ttl=0,  # Doesn't store result
            timeout=3600,  # Avoids never-ending jobs
        )
Beispiel #31
0
def post_prediction(request):
    """ Post request to have a single static prediction

        :param request: json
        :return: Response
    """
    jobs = []
    data = request.data
    job_id = int(data['jobId'])
    split_id = int(data['splitId'])
    split = Split.objects.get(pk=split_id)

    try:
        job = Job.objects.get(pk=job_id)
        # new_job = duplicate_orm_row(job)  #todo: replace with simple CREATE
        new_job = Job.objects.create(
            created_date=job.created_date,
            modified_date=job.modified_date,
            error=job.error,
            status=job.status,
            type=job.type,
            create_models=job.create_models,
            case_id=job.case_id,
            event_number=job.event_number,
            gold_value=job.gold_value,
            results=job.results,
            parent_job=job.parent_job,
            split=job.split,
            encoding=job.encoding,
            labelling=job.labelling,
            clustering=job.clustering,
            predictive_model=job.predictive_model,
            evaluation=job.evaluation,
            hyperparameter_optimizer=job.hyperparameter_optimizer,
            incremental_train=job.incremental_train)
        new_job.type = JobTypes.RUNTIME.value
        new_job.status = JobStatuses.CREATED.value
        new_job.split = split
        new_job.save()
    except Job.DoesNotExist:
        return Response({'error': 'Job ' + str(job_id) + ' not in database'},
                        status=status.HTTP_404_NOT_FOUND)

    django_rq.enqueue(runtime_task, new_job)
    serializer = JobSerializer(jobs, many=True)
    return Response(serializer.data, status=status.HTTP_201_CREATED)
Beispiel #32
0
def on_commit_profile_following_change(action, pks, instance):
    for _id in pks:
        if instance.user:
            # Create an activity
            # UNDO is a bit silly, but that is the activity that is done in AP
            # Maybe we should use local activity verbs instead of the federation library?
            activity_type = ActivityType.FOLLOW if action == "post_add" else ActivityType.UNDO
            instance.create_activity(activity_type, object_id=_id)
        # Send out on the federation layer if local follower, remote followed/unfollowed
        if Profile.objects.filter(
                id=_id, user__isnull=True).exists() and instance.user:
            django_rq.enqueue(send_follow_change, instance.id, _id,
                              True if action == "post_add" else False)
        # Send out notification if local followed
        if action == "post_add" and Profile.objects.filter(id=_id,
                                                           user__isnull=False):
            django_rq.enqueue(send_follow_notification, instance.id, _id)
    def send_custom_email(self, teams, subject, mail_text_message,
                          mail_html_message):
        for team in teams.all():
            addresses = []
            for member in team.team_members.all():
                if (member.user):
                    addresses.append(member.user.email)
                else:
                    addresses.append(member.email)

            mail = EmailMultiAlternatives(
                subject=subject,
                body=mail_text_message,
                to=addresses,
            )
            mail.attach_alternative(mail_html_message, 'text/html')
            django_rq.enqueue(mail.send)
Beispiel #34
0
def enqueue_moss_check(task_id: int, notify=False):
    cache = caches['default']
    moss_delete_result_from_cache(task_id)
    job = django_rq.enqueue(moss_check_task,
                            task_id,
                            notify,
                            job_timeout=60 * 60)
    cache.set(moss_job_cache_key(task_id), job.id, timeout=60 * 60 * 8)
Beispiel #35
0
def process_entity_relationship(entity, profile):
    """Process entity of type Relationship."""
    from socialhome.notifications.tasks import send_follow_notification
    if not entity.relationship == "following":
        logger.debug("Ignoring relationship of type %s", entity.relationship)
        return
    try:
        user = User.objects.get(profile__handle=entity.target_handle,
                                is_active=True)
    except User.DoesNotExist:
        logging.warning(
            "Could not find local user %s for relationship entity %s",
            entity.target_handle, entity)
        return
    profile.following.add(user.profile)
    django_rq.enqueue(send_follow_notification, profile.id, user.profile.id)
    logger.info("Profile %s now follows user %s", profile, user)
Beispiel #36
0
def update_clusters_info():
    """
    Update the cluster metadata from AWS for the pending
    clusters.

    - To be used periodically.
    - Won't update state if not needed.
    - Will queue updating the Cluster's public IP address if needed.
    """
    # only update the cluster info for clusters that are pending
    active_clusters = Cluster.objects.active()

    # Short-circuit for no active clusters (e.g. on weekends)
    if not active_clusters.exists():
        return

    # get the start dates of the active clusters, set to the start of the day
    # to counteract time differences between atmo and AWS and use the oldest
    # start date to limit the ListCluster API call to AWS
    oldest_start_date = active_clusters.datetimes('start_date', 'day')

    # build a mapping between jobflow ID and cluster info
    cluster_mapping = {}
    for cluster_info in provisioning.cluster_list(oldest_start_date[0]):
        cluster_mapping[cluster_info['jobflow_id']] = cluster_info

    # go through pending clusters and update the state if needed
    for cluster in active_clusters:
        info = cluster_mapping.get(cluster.jobflow_id)
        # ignore if no info was found for some reason,
        # the cluster was deleted in AWS but it wasn't deleted here yet
        if info is None:
            continue

        # don't update the state if it's equal to the already stored state
        if info['state'] == cluster.most_recent_status:
            continue

        # run an UPDATE query for the cluster
        cluster.most_recent_status = info['state']
        cluster.save(update_fields=['most_recent_status'])

        # if not given enqueue a job to update the public IP address
        if not cluster.master_address:
            django_rq.enqueue(update_master_address, cluster.id)
Beispiel #37
0
def generate_share(request, cid):
    """generate a temporary share link for a container

       Parameters
       ==========
       cid: the container to generate a share link for
    """
    container = get_container(cid)
    edit_permission = container.has_edit_permission(request)

    if edit_permission:
        days = request.POST.get("days", None)
        if days is not None:
            days = int(days)
            try:
                expire_date = calculate_expiration_date(days)
                share, _ = Share.objects.get_or_create(container=container,
                                                       expire_date=expire_date)
                share.save()

                # Generate an expiration task
                django_rq.enqueue(expire_share, sid=share.id, eta=expire_date)

                link = reverse(
                    "download_share",
                    kwargs={
                        "cid": container.id,
                        "secret": share.secret
                    },
                )

                expire_date = datetime.strftime(expire_date, "%b %m, %Y")
                response = {
                    "status": "success",
                    "days": days,
                    "expire": expire_date,
                    "link": link,
                }
            except:
                response = {"status": "error", "days": days}

        return JsonResponse(response)

    return JsonResponse(
        {"error": "You are not allowed to perform this action."})
    def post(self, request, format=None):
        serializer = AreaCoordinatorArchiveOperationSerializer(
            data=request.data, context={'request': request})
        serializer.is_valid(raise_exception=True)
        object = serializer.save()

        # Run the following functions in the background so our API performance
        # would not be impacted with not-import computations.
        django_rq.enqueue(geoip2_area_coordinator_audit_func, request.tenant,
                          object)

        read_serializer = AreaCoordinatorRetrieveSerializer(object,
                                                            many=False,
                                                            context={
                                                                'request':
                                                                request,
                                                            })
        return Response(read_serializer.data, status=status.HTTP_200_OK)
Beispiel #39
0
def vote(request, question_id):
    question = get_object_or_404(Question, pk=question_id)
    try:
        selected_choice = question.choice_set.get(pk=request.POST['choice'])
    except (KeyError, Choice.DoesNotExist):
        # Redisplay the question voting form.
        return render(request, 'polls/detail.html', {
            'question': question,
            'error_message': "You didn't select a choice.",
        })
    else:
        django_rq.enqueue('polls.worker.vote', selected_choice.id)

        # Always return an HttpResponseRedirect after successfully dealing
        # with POST data. This prevents data from being posted twice if a
        # user hits the Back button.
        return HttpResponseRedirect(
            reverse('polls:voting', args=(question.id, )))
Beispiel #40
0
def process_entity_follow(entity, profile):
    """Process entity of type Follow."""
    from socialhome.notifications.tasks import send_follow_notification
    try:
        user = User.objects.get(profile__handle=entity.target_handle,
                                is_active=True)
    except User.DoesNotExist:
        logging.warning("Could not find local user %s for follow entity %s",
                        entity.target_handle, entity)
        return
    if entity.following:
        profile.following.add(user.profile)
        django_rq.enqueue(send_follow_notification, profile.id,
                          user.profile.id)
        logger.info("Profile %s now follows user %s", profile, user)
    else:
        profile.following.remove(user.profile)
        logger.info("Profile %s has unfollowed user %s", profile, user)
Beispiel #41
0
def refresh(name):
    response = {}
    try:
        player = Player.objects.get(name__iexact=name)
    except:
        response["text"] = "Player Not Found. Try add"
        response["response_type"] = "ephemeral"
        return response

    player = rapi.getRanks(player)

    django_rq.enqueue(rapi.populate_solo, player)
    django_rq.enqueue(rapi.populate_flex, player)

    response["text"] = "Player Refreshed. Adding Games"
    response["response_type"] = "ephemeral"

    return response
Beispiel #42
0
def enqueue_moving_average(request):
    """
    Async calculate the moving average from last 3 months.
    This will return a url where you can poll for the result.
    """
    job = django_rq.enqueue(get_txn_average)
    polling_url = settings.BACKEND_URL + '/transactions/service2/poll/' + str(
        job.id)
    return JsonResponse({'url': polling_url}, status=202)
Beispiel #43
0
def user_force_update_site(request):
    """
    强制刷新源,用户手动触发
    """
    site_id = request.POST.get('site_id', '')

    site = Site.objects.get(pk=site_id, status='active')

    if site:
        # 异步刷新
        logger.info(f"强制刷新源:`{site_id}")
        django_rq.enqueue(update_sites_async, [
            site.pk,
        ], True)

        return JsonResponse({})

    return HttpResponseNotFound("Param Error")
def test_azure_sync_resource_group_delete(session_get_func,
                                          get_subscription_and_session_func,
                                          mock_response_class, json_file,
                                          subscription,
                                          mce_app_resource_type_azure_group,
                                          django_rq_worker):
    """Check sync Azure ResourceGroup - Delete"""

    assert ResourceEventChange.objects.count() == 0
    assert ResourceGroupAzure.objects.count() == 0
    assert ResourceAzure.objects.count() == 0

    data = json_file("resource_group_list.json")
    count_groups = len(data['value'])

    get_subscription_and_session_func.return_value = (subscription,
                                                      requests.Session())

    # --- create
    session_get_func.return_value = mock_response_class(200, data)
    job = django_rq.enqueue('mce_tasks_rq.azure.sync_resource_group',
                            args=[subscription.subscription_id])
    django_rq_worker.work()

    # --- Delete
    session_get_func.return_value = mock_response_class(200, {"value": []})

    job = django_rq.enqueue('mce_tasks_rq.azure.sync_resource_group',
                            args=[subscription.subscription_id])
    django_rq_worker.work()

    assert job.get_status() == JobStatus.FINISHED

    # FIXME: bug pour prévoir nombre de delete
    # il affiche 10 alors que j'ai 4 objects (Tag, Event, Group et Resource)
    assert job.result == dict(errors=0,
                              created=0,
                              updated=0,
                              deleted=count_groups)

    assert ResourceEventChange.objects.filter(
        action=constants.EventChangeType.DELETE).count() == count_groups

    assert ResourceGroupAzure.objects.count() == 0
Beispiel #45
0
    def post(self, request, format=None):
        ''' Takes a request of the following form:
        'app_facebook_id': Facebook ID of the app
        'oauth_token': OAuth Token for the particular user
        'facebook_id': Facebook ID of the user
        '''
        data = request.DATA
        try:
            app_facebook_id = data['app_facebook_id']
            oauth_token = data['oauth_token']
            facebook_id = data['facebook_id']
        except KeyError:
            error = {'error': "Invalid request"}
            return Response(error, status=status.HTTP_400_BAD_REQUEST)

        django_rq.enqueue(process_request, app_facebook_id, oauth_token,
                          facebook_id)
        # django_rq.enqueue_call(func=process_request, args=(app_facebook_id, oauth_token, facebook_id), timeout=1500)
        return Response(status=status.HTTP_201_CREATED)
Beispiel #46
0
def update_streams_with_content(content):
    """Handle content adding to streams.

    First adds to the author streams, then queues the rest of the user streams to a background job.
    """
    if content.content_type == ContentType.REPLY:
        # No need to do these just now
        return
    # Do author immediately
    if content.author.is_local:
        user = content.author.user
        keys = []
        for stream_cls in CACHED_STREAM_CLASSES:
            keys = check_and_add_to_keys(stream_cls, user, content, keys)
        add_to_redis(content, keys)
    # Queue rest to RQ
    for stream_cls in CACHED_STREAM_CLASSES:
        django_rq.enqueue(add_to_stream_for_users, content.id,
                          stream_cls.__name__)
Beispiel #47
0
 def handle(self, *args, **kwargs):
     eregs_args = argparse_to_click(**kwargs)
     if not eregs_args:
         show_stats(self.stdout.write)
     else:
         job = django_rq.enqueue(run_eregs_command, eregs_args,
                                 # Run for at most half an hour
                                 # Don't delete successes
                                 timeout=60*30, result_ttl=-1)
         self.stdout.write("OK: {}".format(job))
Beispiel #48
0
def addsearch(request):
    newkw = request.POST.get('newkw')

    # add db entry
    query = Searches(keyword=newkw)
    query.save()
    sfn = "{}/pickles/{}.pkl".format(basedir, query.id)
    # create search results file
    s = search(sfn, newkw)

    # get list of data files
    datafiles = os.listdir("{}/data/".format(basedir))

    # enqueue all data file additions
    for d in datafiles:
        django_rq.enqueue(do_add_file, sfn, "{}/data/{}".format(basedir, d))

    return inner(request,
                 {'message': "Added kw %s with id %d" % (newkw, query.id)})
    def post(self, request, format=None):
        """
        Create
        """
        write_serializer = StaffCommentCreateSerializer(
            data=request.data, context={'request': request})
        write_serializer.is_valid(raise_exception=True)
        obj = write_serializer.save()

        # Run the following functions in the background so our API performance
        # would not be impacted with not-import computations.
        django_rq.enqueue(geoip2_staff_comment_audit_func, request.tenant, obj)

        read_serializer = StaffCommentListSerializer(
            obj,
            many=False,
            context={'request': request},
        )
        return Response(read_serializer.data, status=status.HTTP_201_CREATED)
Beispiel #50
0
def contacts_view(request):
    form = ContactForm(request.POST or None)
    if form.is_valid():
        subject = form.cleaned_data['subject']
        message = form.cleaned_data['message']
        sender = form.cleaned_data['sender']

        recipients = ['*****@*****.**']

        django_rq.enqueue(send_mail, subject, message, sender, recipients)

        new_subject = 'Your mail to mycourse'
        new_message = 'Your mail to mycourse has been sended'
        new_sender = '*****@*****.**'
        new_recipients = [sender]

        django_rq.enqueue(send_mail, new_subject, new_message, new_sender, new_recipients)

    return render(request, 'contacts.html', context={"form": form})
Beispiel #51
0
    def notification_register(self):
        # get setting appearance
        from academy.apps.offices import utils
        sett = utils.get_settings(serializer=True)

        data = {
            'token': default_token_generator.make_token(self),
            'uid': int_to_base36(self.id),
            'host': settings.HOST,
            'user': self,
            'email_title': 'Aktivasi Akun'
        }
        data.update(sett)
        kwargs = construct_email_args(
            recipients=[self.email],
            subject='Aktivasi Akun',
            content=render_to_string('emails/register.html', context=data),
            priority=PRIORITY.now)
        django_rq.enqueue(mail.send, **kwargs)
Beispiel #52
0
def content_post_save(instance, **kwargs):
    fetch_preview(instance)
    render_content(instance)
    if kwargs.get("created"):
        notify_listeners(instance)
        if instance.parent:
            transaction.on_commit(lambda: django_rq.enqueue(
                send_reply_notifications, instance.id))
    if instance.is_local:
        transaction.on_commit(lambda: federate_content(instance))
def prediction_task(job_id):
    logger.info("Start prediction task ID {}".format(job_id))
    job = Job.objects.get(id=job_id)

    try:
        if (job.status == JobStatuses.CREATED.value and job.type != JobTypes.UPDATE.value) or \
           (job.status == JobStatuses.CREATED.value and job.type == JobTypes.UPDATE.value and
            job.incremental_train.status == JobStatuses.COMPLETED.value):

            job.status = JobStatuses.RUNNING.value
            job.save()
            job_start_time = time.time()
            if job.hyperparameter_optimizer is not None and \
                job.hyperparameter_optimizer.optimization_method != HyperparameterOptimizationMethods.NONE.value:
                result, model_split = hyperopt_task(job)
            else:
                result, model_split = calculate(job)
            job_elapsed_time = time.time() - job_start_time
            if job.hyperparameter_optimizer is not None and \
                job.hyperparameter_optimizer.optimization_method != HyperparameterOptimizationMethods.NONE:
                job.hyperparameter_optimizer.elapsed_time = timedelta(
                    seconds=job_elapsed_time)
                job.hyperparameter_optimizer.save()
                job.save()
            logger.info('\tJob took: {} in HH:MM:ss'.format(
                time.strftime("%H:%M:%S", time.gmtime(job_elapsed_time))))
            if job.create_models:
                save_models(model_split, job)
            job.result = result
            job.status = JobStatuses.COMPLETED.value
        elif job.status in [
                JobStatuses.COMPLETED.value, JobStatuses.ERROR.value,
                JobStatuses.RUNNING.value
        ]:
            django_rq.enqueue(prediction_task, job.id)
    except Exception as e:
        logger.error(e)
        job.status = JobStatuses.ERROR.value
        job.error = str(e.__repr__())
        raise e
    finally:
        job.save()
        publish(job)
def test_azure_sync_resource_group_create(
        session_get_func,
        get_subscription_and_session_func,
        mock_response_class,
        json_file,
        subscription,
        mce_app_resource_type_azure_group,  # Le laisser car nécessaire
        django_rq_worker):
    """Check sync Azure ResourceGroup - Create"""

    data = json_file("resource_group_list.json")
    count_groups = len(data['value'])

    get_subscription_and_session_func.return_value = (subscription,
                                                      requests.Session())
    session_get_func.return_value = mock_response_class(200, data)

    job = django_rq.enqueue('mce_tasks_rq.azure.sync_resource_group',
                            args=[subscription.subscription_id])
    django_rq_worker.work()

    assert job.get_status() == JobStatus.FINISHED

    assert job.result == dict(errors=0,
                              created=count_groups,
                              updated=0,
                              deleted=0)

    assert ResourceGroupAzure.objects.count() == count_groups

    # 2 tag dans la 1ère ResourceGroup de resource_group_list.json
    assert Tag.objects.count() == 2

    assert ResourceEventChange.objects.filter(
        action=constants.EventChangeType.CREATE).count() == count_groups

    # Restart - No changes
    job = django_rq.enqueue('mce_tasks_rq.azure.sync_resource_group',
                            args=[subscription.subscription_id])

    django_rq_worker.work()

    assert job.result == dict(errors=0, created=0, updated=0, deleted=0)
Beispiel #55
0
 def post(self, request):
     # Since it is primitive APIView. I have to make my own context
     context = {'request': PushContext(request.user)}
     serializer = PushNotificationSerializer(data=request.data,
                                             context=context)
     if serializer.is_valid():
         """Enqueue job to rqworker and let it send the message with FCM"""
         devices = GCMDevice.objects.filter(
             user=serializer.validated_data.get('customer'))
         django_rq.enqueue(
             worker_wrapper,
             devices,
             message=serializer.validated_data.get('message', ""),
             extra=serializer.validated_data.get('extra',
                                                 {'message': 'Blank'}))
         logger.info(f"data: {serializer.data}")
         return Response(data=serializer.data, status=status.HTTP_200_OK)
     return Response(data=serializer.errors,
                     status=status.HTTP_400_BAD_REQUEST)
 def handle(self, *args, **kwargs):
     errored_jobs = Job.objects.filter(status=JobStatuses.ERROR.value)
     for j in errored_jobs:
         j.status = JobStatuses.CREATED.value
         j.error = ''
         j.save()
     jobs_to_requeue = [j.id for j in errored_jobs ]
     print('Requeue of', jobs_to_requeue)
     [ django_rq.enqueue(tasks.prediction_task, j) for j in jobs_to_requeue ]
     print('done')
Beispiel #57
0
def create_checks(request):  # создание чеков для заказа
    if request.method == 'POST':
        body_unicode = request.body.decode('UTF-8')
        try:
            body_data = json.loads(body_unicode)
        except json.JSONDecodeError:
            return JsonResponse({"error": "Invalid JSON"}, status=400)

        if Check.objects.filter(order__id=body_data['id']).exists(
        ):  # если чек с данным id существует
            return JsonResponse(
                {"error": "Для данного заказа уже созданы чеки"}, status=400)

        if not Printer.objects.filter(point_id=body_data["point_id"]).exists(
        ):  # если не сущ. ни одного принтера
            return JsonResponse(
                {"error": "Для данной точки не настроено ни одного принтера"},
                status=400)

        kitchen_printer = Printer.objects.get(point_id=body_data['point_id'],
                                              check_type='kitchen')
        kitchen_path = f'media/pdf/{body_data["id"]}_{"kitchen"}.pdf'
        kitchen_check = Check.objects.create(printer_id=kitchen_printer,
                                             type='kitchen',
                                             order=body_data,
                                             status='new',
                                             pdf_file=kitchen_path)
        enqueue(generate_pdf, 'kitchen', kitchen_check.id, kitchen_path,
                body_data)  # добавление задачи в очередь

        client_printer = Printer.objects.get(point_id=body_data['point_id'],
                                             check_type='client')
        client_path = f'media/pdf/{body_data["id"]}_{"client"}.pdf'
        client_check = Check.objects.create(printer_id=client_printer,
                                            type='client',
                                            order=body_data,
                                            status='new',
                                            pdf_file=client_path)
        enqueue(generate_pdf, 'client', client_check.id, client_path,
                body_data)  # добавление задачи в очередь

        return JsonResponse({"ok": "Чеки успешно созданы"}, status=200)
    return JsonResponse({"error": "Method Not Allowed"}, status=405)
Beispiel #58
0
    def get(self, request, name):

        # The request specifies ?arch=amd64 but that's all we got
        print("GET GetImageView")

        names = parse_image_name(name)

        # The user can specify an arch, currently only support amd64
        arch = request.query_params.get('arch', 'amd64')
        container = get_container(names)

        # If an arch is defined, ensure it matches the request
        if arch:
            if container.metadata.get('arch', 'amd64') != "amd64":
                return Response(status=404)

        # If no container, regardles of permissions, 404
        if container is None:
            return Response(status=404)

        # Private containers we check the token
        if container.collection.private:
            token = get_token(request)

            # Only owners and contributors can pull
            collection = container.collection
            if token.user not in collection.owners.all() and token.user not in collection.contributors.all():
                return Response(status=404)

        # Generate log for downloads (async with worker)
        django_rq.enqueue(generate_log,
                          view_name = 'shub.apps.api.urls.containers.ContainerDetailByName',
                          ipaddr = request.META.get("HTTP_X_FORWARDED_FOR", None),
                          method = request.method,
                          params = request.query_params.dict(),
                          request_path = request.path,
                          remote_addr = request.META.get("REMOTE_ADDR", ""),
                          host = request.get_host(),
                          request_data = request.data,
                          auth_header = request.META.get("HTTP_AUTHORIZATION"))

        data = generate_container_metadata(container)
        return Response(data={"data": data}, status=200)
Beispiel #59
0
def verify_payload(request, collection):
    """verify payload will verify a payload"""

    from shub.plugins.google_build.tasks import parse_hook
    from shub.plugins.google_build.actions import is_over_limit

    payload = load_body(request)

    # Validate the payload with the collection secret
    signature = request.META.get("HTTP_X_HUB_SIGNATURE")
    if not signature:
        return JsonResponseMessage(message="Missing credentials.")

    status = validate_payload(
        collection=collection, payload=request.body, request_signature=signature
    )
    if not status:
        return JsonResponseMessage(message="Invalid credentials.")

    # If a branch is provided, this is the version  "ref": "refs/heads/master",
    try:
        branch = payload.get("ref", "refs/heads/master").replace("refs/heads/", "")
    except:
        branch = "master"

    # Some newer webhooks have commits
    commits = payload.get("commits")

    # Ensure we aren't over limit
    if is_over_limit():
        message = (
            "Registry concurrent build limit is "
            + "%s" % SREGISTRY_GOOGLE_BUILD_LIMIT
            + ". Please try again later."
        )

        return JsonResponseMessage(message=message, status_message="Permission Denied")

    django_rq.enqueue(parse_hook, cid=collection.id, branch=branch, commits=commits)

    return JsonResponseMessage(
        message="Hook received and parsing.", status=200, status_message="Received"
    )
Beispiel #60
0
def _delete_collection(request, cid):
    '''the underlying function to delete a collection, returns True/False
       if done to the calling view.

       Parameters
       ==========
       cid: the collection id to delete
    '''
    collection = get_collection(cid)
    
    # Only an owner can delete
    if not collection.has_edit_permission(request):
        return False
    
    # Queue the job to delete the collection
    django_rq.enqueue(delete_container_collection, 
                      cid=collection.id,
                      uid=request.user.id)
    return True