Beispiel #1
0
def scan(tags=None, random_selection=False):

    client = MongoClient()
    db = client.stooge

    scan_tags = tags if tags is not None else []

    scan = {"tags": scan_tags,
            "created": datetime.datetime.utcnow(),
            "state": "CREATED",
            "started": None,
            "finished": None,
            "sites": []}

    sites = list(db.sites.find())
    if random_selection:
        random.shuffle(sites)
        sites = sites[:10]

    for site in sites:
        scan["sites"].append({"_id": site["_id"],
                              "responses": {"http":[], "https":[]},
                              "results": {},
                              "ssllabs": None,
                              "error": None,
                              "url": site["url"],
                              "bugs": None})

    scan_id = db.scans.insert(scan)

    send_task("stooge.scanner.tasks.execute_scan", [str(scan_id)])
Beispiel #2
0
 def notify_progress(self, workflow_task, progress):
     """Execute task for updating the progress of a task."""
     send_task('ws.celery.bpm.task_progress',
               kwargs={
                   'pk': workflow_task,
                   'progress': progress
               })
Beispiel #3
0
def run():
    """
    return a list of items suitable for invoicing
    """

    #get timesheets
    sql = text("""
        SHOW FULL PROCESSLIST
        """)

    conn = engine.connect()
    data = conn.execute(sql).fetchall()

    for d in data:
        if d['Command'] != 'Query':
            continue

        if d['Time'] >= SECONDS_KILL_QUERY:
            report = """Sorry guys, I need to kill this query.\n\n%s
            """ % (pformat(d.items(), 4))
            send_task("notify_devs.send", ("KILL QUERY REPORT", report))
            sql = text("""
                KILL :process_id
                """)
            conn.execute(sql, process_id=d['Id'])
            continue

        if d['Time'] > SECONDS_SLOW_QUERY:
            report = """Please optimize this query.\n\n%s
            """ % (pformat(d.items(), 4))
            send_task("notify_devs.send", ("SLOW QUERY REPORT", report))

    conn.close()
Beispiel #4
0
def send_join_request_notification(sender, instance, created, raw, using, **kwargs):
    if created and instance.status == 'W' and notification:
        send_task("profiles.tasks.notify_member_join_request", [unicode(instance.team),
                                                                  instance.profile.user.username,
                                                                  instance.id,
                                                                  instance.team.captain_ids ])
    return instance
Beispiel #5
0
def youtubedl_default(options):
    print options
    task_id = youtubedl_default.request.id
    youtubedl_default.update_state(state="STARTING", meta={"progress": 0})
    is_succ, msg = downloadvid(options["ytid"], options["task_id"])
    if not is_succ:
        send_task("tasks.youtubedl_alt1", [options])
Beispiel #6
0
def refetchResearcher(request, pk):
    researcher = get_object_or_404(Researcher, pk=pk)
    if researcher.user != request.user and not request.user.is_staff:
        return HttpResponseForbidden(
            "Not authorized to update papers for this researcher.")
    send_task('fetch_everything_for_researcher', [], {'pk': pk})
    return redirect(request.META['HTTP_REFERER'])
 def run_task(self, request, queryset):
     if request.user.is_admin:
         for task in queryset.all():
             send_task(task.task, args=json.loads(task.args), kwargs=json.loads(task.kwargs))
         self.message = 'Tasks are running'
     else:
         self.message = 'You must be an admin to perform this action.'
Beispiel #8
0
def check_leads_id_for_zero_hourly_rate(subcontractors_id):
    """
    given the subcontractors_id, retrieve the leads_id
    leads_id=11 / chris is excempted
    leads_id=7935 / rica is excempted
    """
    s = couchdb.Server(settings.COUCH_DSN)
    db = s['rssc']
    doc_id = 'subcon-%s' % subcontractors_id
    doc = db.get(doc_id)
    if doc == None:
        logging.info(
            'check_leads_id_for_zero_hourly_rate cannot find document %s' %
            doc_id)
        send_task('notify_devs.send', [
            'celery error subcontractors.check_leads_id_for_zero_hourly_rate',
            'Cannot find document %s' % doc_id
        ])
        return
    leads_id = doc.get('leads_id')
    if leads_id not in [11, 7935]:  #chris and rica are excempted
        logging.info(
            'check_leads_id_for_zero_hourly_rate hourly rate is 0 for %s' %
            doc_id)
        send_task('notify_devs.send', [
            'Hourly Rate is zero for sid:%s' % subcontractors_id,
            'Please check rates!'
        ])
        return
def callback(ch, method, properties, body):
    """
    expecting a json data
    data = dict(
        subcon_id = request.POST['subcon_id'],
        scheduled_date = schedule.scheduled_date.strftime('%Y-%m-%d %H:%M:%S'),
        timezone = 'Asia/Manila',
        admin = '%s %s <%s>' % (admin.pk, admin.admin_fname, admin.admin_lname)
    )
    """
    logging.info(" [x] salary scheduling %r" % (body, ))
    send_task('skype_messaging.notify_devs',
              ['Schedule Salary Update : %r' % body])
    data = json.loads(body)
    subcon_id = data['subcon_id']
    eta = datetime.strptime(data['scheduled_date'], '%Y-%m-%d %H:%M:%S')
    tz = timezone(data['timezone'])
    eta = tz.localize(eta)
    ph_tz = timezone('Asia/Manila')
    eta = eta.astimezone(ph_tz)
    logging.info(' [x] sending task %s @ %s' % (subcon_id, eta))
    send_task('ScheduleActivation.StaffSalaryUpdate',
              args=[
                  subcon_id,
              ],
              eta=eta)
Beispiel #10
0
    def run(self, task_queue, uid, document_type, callback_url, callback_verb, expire, cloudooo_settings, sam_settings):
        print 'Metadata extraction started...'
        self._sam = Restfulie.at(sam_settings['url']).auth(*sam_settings['auth']).as_('application/json')
        cloudooo = Server("%s/RPC2" % cloudooo_settings['url'])

        response = loads(self._sam.get(key=uid).body)
        doc = response['data']['file']
        filename = response['data']['filename']
        if not filename.endswith('pdf'):
            print "Converting document to pdf..."
            doc = cloudooo.convertFile(doc, filename[-3:], 'pdf')

        temp_doc = TemporaryFile(suffix=filename[:-3] + "pdf", delete=False)
        print temp_doc.name, filename
        temp_doc.write(decodestring(doc))
        temp_doc.close()

        extractor = {'tcc': TccExtractor, 'event': EventExtractor}
        print "Extracting the metadata..."
        metadata = extractor[document_type](temp_doc.name).all_metadata()
        metadata_key = self._sam.post(value=metadata, expire=expire).resource().key
        response['data']['metadata_key'] = metadata_key

        self._sam.put(key=uid, value=response['data']).resource()
        print "Metadata extraction complete. Sending callback task..."

        send_task('nsicloudooomanager.tasks.MetadataCallback',
                  args=(callback_url, callback_verb, uid, metadata_key),
                  queue=task_queue, routing_key=task_queue)
        print "Metadata callback task sent."
Beispiel #11
0
def db_delete(request, slug):
    "Drop a database"
    if not re.match(r"[a-zA-Z0-9-]+$", slug):
        return redirect("limeade_mysql_db_list")

    send_task("mysql.delete_db", kwargs={"user": request.user.username, "name": slug}, routing_key="limeade.mysql")
    return redirect("limeade_mysql_db_list")
 def run_task(self, request, queryset):
     if request.user.is_superuser:
         for task in queryset.all():
             send_task(task.task)
         self.message = 'Tasks are running'
     else:
         self.message = 'You must be a superuser to perform this action.'
 def run_task(self, request, queryset):
     if request.user.is_superuser:
         for task in queryset.all():
             send_task(task.task)
         self.message = 'Tasks are running'
     else:
         self.message = 'You must be a superuser to perform this action.'
Beispiel #14
0
def send_team_registration_notification(sender, instance, created, raw, using,
                                        **kwargs):
    if created and notification:
        send_task("profiles.tasks.notify_team_registration",
                  [instance.captain_id, settings.CONTACT_EMAIL])

    return instance
Beispiel #15
0
def add_feed(request):
    if request.method == 'POST':
        form = FeedForm(request.POST)
        if form.is_valid():
            url = urlparse(form.clean_url())
            feed_hash = md5.new(url.netloc)  # pylint: disable-msg=E1101
            feed_hash.update(url.path)  # pylint: disable-msg=E1101
            if url.username:
                feed_hash.update(url.username)
            feed_hash = feed_hash.hexdigest()

            try:
                feed = Feed.objects.get(hash=feed_hash)
            except Feed.DoesNotExist:
                feed = form.save(commit=False)
                feed.hash = feed_hash
                feed.save()

            Subscription.objects.create(feed=feed, user=request.user,
                                        subscribed_at=tz_utcnow())

            # work around bs4.element.NavigableString not being serializable
            feed.title = unicode(feed.title)
            send_task("feedmanager.prepare_preview_email",
                      (request.user, feed))

            messages.success(request, 'New feed added successfully. Check '
                                      'your inbox for a preview of this feed!')
        else:
            messages.error(request, 'Only RSS and ATOM URLs/feeds are '
                                    'currently supported.')

    return HttpResponseRedirect(reverse('feedmanager:feeds'))
Beispiel #16
0
def get_hourly_rate(subcontractors_id, datetime_req):
    """given the subcontractors_id and datetime_req in Asia/Manila timezone
    return decimal hourly rate
    tables involved subcontractors_client_rate, subcontractors
    """
    try:
        #force to retrieve rates from mysql if datetime_req is very recent
        ph_time = get_ph_time()
        ph_time_past = ph_time - timedelta(minutes=30)
        ph_time += timedelta(minutes=5)
        if datetime_req >= ph_time_past and datetime_req <= ph_time:
            hourly_rate = get_rates_from_mysql(subcontractors_id, datetime_req)

            if hourly_rate == Decimal(0):
                check_leads_id_for_zero_hourly_rate(subcontractors_id)
            else:
                compare_hourly_rates(subcontractors_id, hourly_rate)

        else:
            hourly_rate = get_rates_from_couch(subcontractors_id, datetime_req)
            if hourly_rate == None:
                hourly_rate = get_rates_from_mysql(subcontractors_id,
                                                   datetime_req)
    except Exception, exc:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        logging.info(exc_type)
        logging.info(exc_value)
        logging.info('%r' % (traceback.extract_stack()))
        send_task('notify_devs.send', [
            'celery error subcontractors.get_hourly_rate',
            '%s\n%s\ntraceback:\n%s' %
            (exc_type, exc_value, repr(traceback.extract_stack()))
        ])
        raise get_hourly_rate.retry(exc=exc)
Beispiel #17
0
    def run(self, uid, callback_url, video_link, sam_settings):
        self.callback_url = callback_url
        self.sam = Restfulie.at(sam_settings['url']).auth(*sam_settings['auth']).as_('application/json')
        self.destination_uid = uid
        self.tmp_path = "/tmp/original-%s" % uuid4()
        video_is_converted = False

        if video_link:
            self._download_video(video_link)
        else:
            response = self._get_from_sam(uid)
            self._original_video = response.data.video
            if not hasattr(response, 'converted'):
                video_is_converted = False
            else:
                video_is_converted = response.data.converted

        if not video_is_converted:
            print "Conversion started."
            self._process_video()
            print "Conversion finished."
            if not self.callback_url == None:
                print "Callback task sent."
                send_task('nsivideoconvert.tasks.Callback', args=(callback_url, self.destination_uid), queue='convert',
                          routing_key='convert')
            else:
                print "No callback."
            return self.destination_uid
        else:
            raise VideoException("Video already converted.")
Beispiel #18
0
def NAB_surcharge(doc_credit_accounting, doc_nab_transaction):
    """create surcharges for NAB payment
    """
    logging.info('doc_credit_accounting %s' % doc_credit_accounting)
    logging.info('doc_nab_transaction %s' % doc_nab_transaction)

    #get card type
    card_number = doc_nab_transaction.get('GET').get('pan')
    card_type = GetCardType(card_number)
    if card_type in ('Visa', 'MasterCard'):
        merchant_facility_percentage = 1
    elif card_type == 'American Express':
        merchant_facility_percentage = 2
    else:
        raise ClientDocumentError(
            'CreateNABSurcharges : unknown card type for doc_nab_transaction %s , doc_credit_accounting %s'
            % (doc_nab_transaction, doc_credit_accounting))

    currency = doc_credit_accounting.get('currency')
    leads_id = doc_credit_accounting.get('client_id')
    now = GetPhTime(as_array=True)

    amount = Decimal(doc_credit_accounting.get('credit'))
    charge = amount * Decimal(merchant_facility_percentage) / Decimal(100)
    particular = '%s%% Merchant Facility Surcharge for the %s %0.2f %s Card Payment' % (
        merchant_facility_percentage, currency, amount, card_type)

    couch_server = couchdb.Server(settings.COUCH_DSN)
    db_client_docs = couch_server['client_docs']

    r = db_client_docs.view('client/running_balance', key=leads_id)

    if len(r.rows) == 0:
        running_balance = Decimal('0')
    else:
        running_balance = Decimal('%s' % r.rows[0].value)

    running_balance -= charge

    doc_transaction = dict(
        added_by=
        'automatic charge on payment (celery_surcharge:CreditCardSurcharges.py)',
        added_on=now,
        charge='%0.2f' % charge,
        client_id=leads_id,
        credit='0.00',
        credit_type='CARD_SURCHARGE',
        currency=currency,
        remarks='Credit Card Surcharge',
        type='credit accounting',
        running_balance='%0.2f' % running_balance,
        particular=particular)

    db_client_docs.save(doc_transaction)
    send_task('notify_devs.send', [
        'NAB Credit Card Surcharge FEE EXTRACTED',
        'Please check NAB payment doc %s, fee:%s' %
        (doc_nab_transaction, charge)
    ])  #TODO delete this notification once stable
Beispiel #19
0
def event_create(request, oauth2_context):
    """Creates an event and returns its id.
    
    Request::
    
        POST /0.1/events/create
    
    Parameters:
        event_name (mandatory)
            Name of the event
        account_id (mandatory for INTERNAL scope)
            The event will be linked to this account. This parameter is 
            only used by Tickee.
        venue_id (optional)
            Identifier of the venue where the event will be held
            
    Returns::
    
        {
            "dates": [
                null
            ],
            "account": 1,
            "name": "The Event Name",
            "id": 11
        }
        
    """
    # Mandatory parameter check
    if not set(['event_name']).issubset(set(request.params)):
        raise HTTPBadRequest()
    # Mandatory parameter check for INTERNAL
    if oauth_scopes.INTERNAL in oauth2_context.scopes:
        if not 'account_id' in request.params:
            raise HTTPBadRequest()

    # Parameters
    try:
        event_name = request.params.get('event_name')
        venue_id = int(request.params.get('venue_id', 0))
        account_id = int(request.params.get('account_id', 0))
    except ValueError:
        raise HTTPBadRequest()

    # create event linked to account_id
    if oauth_scopes.INTERNAL in oauth2_context.scopes:
        result = send_task("tickee.events.entrypoints.event_create",
                           kwargs=dict(client_id=None,
                                       event_name=event_name,
                                       venue_id=venue_id,
                                       account_id=account_id))
    # create event linked to own account
    elif oauth_scopes.ACCOUNT_MGMT in oauth2_context.scopes:
        result = send_task("tickee.events.entrypoints.event_create",
                           kwargs=dict(client_id=oauth2_context.client_id,
                                       event_name=event_name,
                                       venue_id=venue_id,
                                       account_id=None))
    return result.get()
Beispiel #20
0
def callback(ch, method, properties, body):
    logging.info(" [x] scheduling %r" % (body, ))
    response = body
    body = json.loads(body)
    query = body["query"]
    eta = datetime(2012,1,1,0,0,0)
    logging.info(' [x] sending task %s @ %s' % (query, eta))
    send_task("job_order_sync.job_order_sync", args=[query], eta=eta)
Beispiel #21
0
def send_join_request_notification(sender, instance, created, raw, using,
                                   **kwargs):
    if created and instance.status == 'W' and notification:
        send_task("profiles.tasks.notify_member_join_request", [
            unicode(instance.team), instance.profile.user.username,
            instance.id, instance.team.captain_ids
        ])
    return instance
Beispiel #22
0
 def _enqueue_uid_to_convert(self, uid, callback_url, video_link):
     try:
         send_task('nsivideoconvert.tasks.VideoConversion', args=(uid, callback_url, video_link, self.sam_settings),
                   queue=self._task_queue, routing_key=self._task_queue)
     except:
         log.msg("POST failed.")
         log.msg("Couldn't put the job in the queue.")
         raise cyclone.web.HTTPError(503, "Queue service unavailable.")
Beispiel #23
0
def get_rates_from_mysql(subcontractors_id, datetime_req):
    """Fallback function querying mysql database
##~    >>> get_rates_from_mysql(3159, datetime(2012,11,7,0,0,0))
##~    Decimal('4.32')
    >>> get_rates_from_mysql(3000, datetime.now())
    Decimal('0.00')
    >>> get_rates_from_mysql(3159, datetime.now())
    Decimal('8.64')
    >>> get_rates_from_mysql(2714, datetime(2012,9,2,0,0,0))
    Decimal('4.11')
    >>> get_rates_from_mysql(3205, datetime(2012,11,21,0,0,0))
    Decimal('6.82')
    """

    sql = text("""
        SELECT rate AS client_price, work_status 
        FROM subcontractors_client_rate
        WHERE subcontractors_id = :subcontractors_id
        AND start_date <= :datetime_req
        AND (end_date > :datetime_req OR end_date IS NULL)
        """)
    conn = engine.connect()
    data = conn.execute(sql,
                        subcontractors_id=subcontractors_id,
                        datetime_req=datetime_req).fetchall()

    if len(data) == 1:  #retrieve from subcontractors table
        client_price, work_status = data[0]
        if work_status == None:  #old data, no associated work_status, get it from subcontractors table
            sql = text("""
                SELECT work_status
                FROM subcontractors
                WHERE id = :subcontractors_id
                """)
            data = conn.execute(
                sql, subcontractors_id=subcontractors_id).fetchone()
            work_status = data.work_status

    elif len(data) == 0:  #retrieve from subcontractors table
        sql = text("""
            SELECT client_price, work_status
            FROM subcontractors
            WHERE id = :subcontractors_id
            """)
        data = conn.execute(sql,
                            subcontractors_id=subcontractors_id).fetchone()
        client_price, work_status = data
    else:
        send_task('notify_devs.send', [
            'multiple records found for subcontractors_client_rate',
            'Please check subcontractors_id %s. Multiple records from subcontractors_client_rate found for date %s!'
            % (subcontractors_id, datetime_req)
        ])
        client_price, work_status = data[0]

    hourly_rate = compute_hourly_rate(client_price, work_status)
    conn.close()
    return hourly_rate
Beispiel #24
0
def process():
    #logging.info('sending task to sync_leave_request_summary.process %s' % get_ph_time())

    if settings.DEBUG:
        send_task("sync_leave_request_summary.process", [])
    else:
        celery = Celery()
        celery.config_from_object(sc_celeryconfig)
        celery.send_task("sync_leave_request_summary.process", [])
Beispiel #25
0
    def queue(self):
        if self.status != 'idle':
            return

        self.status = 'queued'
        self.last_queued = datetime.datetime.now()
        self.save()

        send_task('humfrey.elasticsearch.update_index', kwargs={'index': self.slug})
Beispiel #26
0
    def try_fire(self, my_task, force=False):
        """Returns False when successfully fired, True otherwise"""

        # Deserialize async call if necessary
        if not hasattr(my_task, 'async_call') and \
                my_task._get_internal_attribute('task_id') is not None:
            task_id = my_task._get_internal_attribute('task_id')
            my_task.async_call = app_or_default().AsyncResult(task_id)
            my_task.deserialized = True
            LOG.debug("Reanimate AsyncCall %s" % task_id)

        # Make the call if not already done
        if not hasattr(my_task, 'async_call'):
            if self.args:
                args = eval_args(self.args, my_task)
                if self.kwargs:
                    async_call = send_task(self.call,
                                           args=args,
                                           kwargs=eval_kwargs(
                                               self.kwargs, my_task))
                else:
                    async_call = send_task(self.call, args=args)
            else:
                if self.kwargs:
                    async_call = send_task(self.call,
                                           kwargs=eval_kwargs(
                                               self.kwargs, my_task))
                else:
                    async_call = send_task(self.call)
            my_task._set_internal_attribute(task_id=str(async_call))
            my_task.async_call = async_call
            LOG.debug("'%s' called: %s" % (self.call, my_task.async_call))

        # Get call status (and manually refr4esh if deserialized)
        if getattr(my_task, "deserialized", False):
            my_task.async_call.state  # must manually refresh if deserialized
        if my_task.async_call.ready():
            result = my_task.async_call.result
            if isinstance(result, Exception):
                LOG.warn("Celery call %s failed: %s" % (self.call, result))
                my_task.set_attribute(error=str(result))
                return False
            LOG.debug("Completed celery call %s with result=%s" %
                      (self.call, result))
            if self.result_key:
                my_task.set_attribute(**{self.result_key: result})
            else:
                if isinstance(result, dict):
                    my_task.set_attribute(**result)
                else:
                    my_task.set_attribute(**{'result': result})
            return True
        else:
            LOG.debug("async_call.ready()=%s. TryFire for '%s' "
                      "returning False" %
                      (my_task.async_call.ready(), my_task.get_name()))
            return False
Beispiel #27
0
    def queue(self):
        if self.status != 'idle':
            return

        self.status = 'queued'
        self.last_queued = datetime.datetime.now()
        self.save()

        send_task('humfrey.elasticsearch.update_index',
                  kwargs={'index': self.slug})
Beispiel #28
0
    def try_fire(self, my_task, force=False):
        """Returns False when successfully fired, True otherwise"""

        # Deserialize async call if necessary
        if not hasattr(my_task, 'async_call') and \
                my_task._get_internal_attribute('task_id') is not None:
            task_id = my_task._get_internal_attribute('task_id')
            my_task.async_call = app_or_default().AsyncResult(task_id)
            my_task.deserialized = True
            LOG.debug("Reanimate AsyncCall %s" % task_id)

        # Make the call if not already done
        if not hasattr(my_task, 'async_call'):
            if self.args:
                args = eval_args(self.args, my_task)
                if self.kwargs:
                    async_call = send_task(self.call, args=args,
                            kwargs=eval_kwargs(self.kwargs, my_task))
                else:
                    async_call = send_task(self.call, args=args)
            else:
                if self.kwargs:
                    async_call = send_task(self.call,
                            kwargs=eval_kwargs(self.kwargs, my_task))
                else:
                    async_call = send_task(self.call)
            my_task._set_internal_attribute(task_id=str(async_call))
            my_task.async_call = async_call
            LOG.debug("'%s' called: %s" % (self.call, my_task.async_call))

        # Get call status (and manually refr4esh if deserialized)
        if getattr(my_task, "deserialized", False):
            my_task.async_call.state  # must manually refresh if deserialized
        if my_task.async_call.ready():
            result = my_task.async_call.result
            if isinstance(result, Exception):
                LOG.warn("Celery call %s failed: %s" % (self.call, result))
                my_task.set_attribute(error=str(result))
                return False
            LOG.debug("Completed celery call %s with result=%s" % (self.call,
                    result))
            if self.result_key:
                my_task.set_attribute(**{self.result_key: result})
            else:
                if isinstance(result, dict):
                    my_task.set_attribute(**result)
                else:
                    my_task.set_attribute(**{'result': result})
            return True
        else:
            LOG.debug("async_call.ready()=%s. TryFire for '%s' "
                    "returning False" % (my_task.async_call.ready(),
                            my_task.get_name()))
            return False
Beispiel #29
0
def send_remote_task(i):
    if i == 0:
        msg = {}
        msg['recipient'] = '*****@*****.**'
        msg['subject'] = 'hello'
        msg['html'] = '<html><body><h1>hello</h1></body></html>'
        msg['text'] = 'hello'
        task = send_task('worker.send_email', [json.dumps(msg)])
    else:
        task = send_task('worker.long_task', [i])
    return jsonify({'task_id': task.id})
Beispiel #30
0
 def _enqueue_uid_to_granulate(self, video_uid, filename, callback_url, callback_verb, video_link):
     try:
         send_task(
                     'nsivideogranulate.tasks.VideoGranulation',
                     args=(self._task_queue, video_uid, filename, callback_url, self.sam_settings, video_link, callback_verb),
                     queue=self._task_queue, routing_key=self._task_queue
                  )
     except:
         log.msg('POST failed.')
         log.msg('Could not enqueue the video to granulate.')
         raise cyclobe.web.HTTPError(500, 'Can not enqueue the video to granulate.')
Beispiel #31
0
def add_event_task(event):
    try:
        execute.send_task('tasks.tasks.process_event', args=[event], expires=datetime.now()+timedelta(seconds=3), retry_policy={
            'max_retries': -1,
            'interval_start': 0,
            'interval_step': 0.01,
            'interval_max': 0.01,
            })
    except ConnectionError:
        # in case we don't have redis running
        logger.error('Redis does not seem to be running')
Beispiel #32
0
 def save(self, notify=True, *args, **kwargs):
     created = self.id is None
     if created and not self.creation_date:  # set creation date if it wasn't set already
         self.creation_date = timezone.now()
     if self.published and not self.publish_date:
         self.publish_date = timezone.now()
     super(Match, self).save(*args, **kwargs)
     if "notification" in settings.INSTALLED_APPS and notification and created and notify:
         send_task("tournaments.tasks.notify_match_creation", [unicode(self),
                                                               self.home_team_id,
                                                               self.away_team_id, ])
Beispiel #33
0
 def save(self, notify=True, *args, **kwargs):
     created = self.id is None
     if created and not self.creation_date:  # set creation date if it wasn't set already
         self.creation_date = timezone.now()
     if self.published and not self.publish_date:
         self.publish_date = timezone.now()
     super(Match, self).save(*args, **kwargs)
     if "notification" in settings.INSTALLED_APPS and notification and created and notify:
         send_task("tournaments.tasks.notify_match_creation", [unicode(self),
                                                               self.home_team_id,
                                                               self.away_team_id, ])
Beispiel #34
0
 def _enqueue_uid_to_granulate(self, uid, filename, callback_url, callback_verb, doc_link, expire):
     try:
         send_task('nsicloudooomanager.tasks.GranulateDoc', args=(self._task_queue, uid, filename, callback_url,
                                                                  callback_verb, doc_link, expire,
                                                                  self.cloudooo_settings, self.sam_settings),
                                                            queue=self._task_queue,
                                                            routing_key=self._task_queue)
     except:
         log.msg('POST failed!')
         log.msg("Couldn't conncet to the queue service.")
         raise cyclone.web.HTTPError(503, 'Queue service unavailable')
Beispiel #35
0
 def _enqueue_uid_to_metadata_extraction(self, uid, document_type, callback_url, callback_verb, expire):
     try:
         send_task('nsicloudooomanager.tasks.ExtractMetadata', args=(self._task_queue, uid, document_type,
                                                                  callback_url, callback_verb, expire, self.cloudooo_settings,
                                                                  self.sam_settings),
                                                            queue=self._task_queue,
                                                            routing_key=self._task_queue)
     except:
         log.msg('POST failed!')
         log.msg("Couldn't conncet to the queue service.")
         raise cyclone.web.HTTPError(503, 'Queue service unavailable')
Beispiel #36
0
def db_edit(request, slug):
    "Form to set a new password for the database"
    form = DBEditForm(request.POST or None)
    if form.is_valid():
        send_task(
            "mysql.edit_db",
            kwargs={"user": request.user.username, "name": slug, "password": form.cleaned_data["password"]},
            routing_key="limeade.mysql",
        )
        return redirect("limeade_mysql_db_list")
    return render_to_response("limeade_mysql/db_edit.html", {"form": form}, context_instance=RequestContext(request))
Beispiel #37
0
def update_epg(modeladmin, request, queryset):
    try:
        from celery.execute import send_task
        for playlist in queryset.all():
            send_task('playlist.tasks.update_playlist', [playlist.id])
        if queryset.count() == 1:
            message_bit = '1 item was'
        else:
            message_bit = '%s items were' % queryset.count()
        modeladmin.message_user(request, '%s queued for EPG update.' % message_bit)
    except ImportError:
        modeladmin.message_user(request, 'EPG update could not be queued - please install celery on this platform.')
Beispiel #38
0
    def save(self, *args, **kwargs):
        try:
            async_task = kwargs.pop('async_task')
        except KeyError:
            async_task = True

        results = super(Repo, self).save(*args, **kwargs)

        if async_task:
            send_task('gungnir.projects.tasks.fetch_repo_for_existing_entry', args=(self.application.pk, self.url))

        return results
Beispiel #39
0
    def form_valid(self, form):
        """set the user to the current user before saving and update if same day"""
        self.object, created = Checkin.objects.update_or_create(
            user=self.request.user, date=timezone.now().date(),
            defaults=form.cleaned_data)

        if created:
            send_task(settings.OPEN_TEAM_STATUS_CHECKIN_TASK,
                      (self.object.id,))

        return HttpResponseRedirect(reverse(
            'checkin-day', kwargs={'day': 'today'}))
Beispiel #40
0
def StaffSalaryUpdate(subcon_id):
    """This will call a php cli
    """
    logging.info('Executing %s %s' %
                 (settings.PHP_UPDATE_STAFF_SALARY, subcon_id))
    send_task(
        'skype_messaging.notify_devs',
        ['Executing %s %s' % (settings.PHP_UPDATE_STAFF_SALARY, subcon_id)])
    call(["php", settings.PHP_UPDATE_STAFF_SALARY, "%s" % subcon_id])
    send_task(
        'skype_messaging.notify_devs',
        ['Executed %s %s' % (settings.PHP_UPDATE_STAFF_SALARY, subcon_id)])
Beispiel #41
0
def notify_csro(csro_email, doc_user):
    subject = 'FORCED TIMEOUT DUE TO DEPLETED LOAD BALANCE'
    if settings.DEBUG:
        subject = 'TEST %s' % subject

    if doc_user.has_key('working_details'):
        working_details = doc_user['working_details']
        client_fname = working_details['client_fname']
        client_lname = working_details['client_lname']
        job_designation = working_details['job_designation']
    else:
        client_fname = ''
        client_lname = ''
        job_designation = ''

    if doc_user.has_key('leads_id') == False:
        send_task("notify_devs.send", ("ALERT CANNOT FIND leads_id", "Cannot find leads_id from rssc document %s" % doc_user))
        leads_id = 'Missing'
    else:
        leads_id = doc_user['leads_id']

    message = """Staff %s %s, %s <userid:%s, email:%s, skype:%s>
for Client %s %s <client_id:%s>
was forced timeout while working due to clients depleted load balance.

Timestamp: %s
    """ % (doc_user['fname'], doc_user['lname'], job_designation, 
        doc_user['reference_id'], doc_user['_id'], doc_user['skype_id'],
        client_fname, client_lname, leads_id, get_ph_time())

    msg = MIMEText(message)
    msg['Subject'] = subject
    msg['From'] = '*****@*****.**'
    msg['To'] = csro_email

    s = smtplib.SMTP(host = settings.SMTP_CONFIG_SES['server'],
        port = settings.SMTP_CONFIG_SES['port'])
    s.starttls()
    s.login(settings.SMTP_CONFIG_SES['username'],
        settings.SMTP_CONFIG_SES['password']
        )

    recipients = [csro_email]
    if settings.DEBUG:
        recipients = []

    recipients.append('*****@*****.**')    #TODO remove devs

    s.sendmail('*****@*****.**', 
        recipients,
        msg.as_string())
    s.quit()
Beispiel #42
0
def receivers():
    current_page('receivers')
    if request.method == 'POST':
        receiver = Receiver(
                request.form['mail'],
                request.form['phone'])

        db.session.add(receiver)
        db.session.commit()
        send_task("tasks.fetion.add_contact", [request.form['phone']])

    receivers = Receiver.query.order_by('id desc').all()
    return render_template('frontend/receivers.html', receivers=receivers)
Beispiel #43
0
def changePublisherStatus(request):
    allowedStatuses = [s[0] for s in OA_STATUS_CHOICES]
    try:
        pk = request.POST.get('pk')
        publisher = Publisher.objects.get(pk=pk)
        status = request.POST.get('status')
        if status in allowedStatuses and status != publisher.oa_status:
            send_task('change_publisher_oa_status', [], {'pk':pk,'status':status})
            return HttpResponse('OK', content_type='text/plain')
        else:
            raise ObjectDoesNotExist
    except ObjectDoesNotExist:
        return HttpResponseNotFound('NOK: '+message, content_type='text/plain')
Beispiel #44
0
def hey_back(self):
    logger = self.get_logger()

    logger.debug("Check if someone said hello")

    for mingle in models.Hello.objects.filter(done=False):
        """
        Time to mingle back
        """
        logger.debug("Trying to say hello back to %s" % mingle.entry)
        send_task('rockit.plugins.%s.tasks.hey' % mingle.entry)

        mingle.done = True
        mingle.save()
Beispiel #45
0
def callback(ch, method, properties, data):
    logging.info(" [x] %r" % (data, ))
    send_task('skype_messaging.notify_devs', ['ConvertVoice %s' % data])
    json_data = json.loads(data)
    userid = json_data.get('userid')
    scale = json_data.get('scale')
    if scale == None:
        scale = 1

    r = redis.StrictRedis(host='localhost', port=6379, db=0)
    if r.get('mp3_conversion:%s' % userid) != None:  #key found ignore
        send_task('skype_messaging.notify_devs', ['Skipped ConvertVoice %s' % data])
        return

    filename = ConvertVoice.download(userid)
    if filename == None:
        send_task('skype_messaging.notify_devs', ['Failed ConvertVoice %s' % data])
        return
    ConvertVoice.media_to_mp3_ogg(userid, scale, filename)
    ConvertVoice.move_mp3_ogg(userid)
    ConvertVoice.update_voice_path(userid)
    send_task('skype_messaging.notify_devs', ['Finished ConvertVoice %s' % data])

    r.set('mp3_conversion:%s' % userid, data)
    r.expire('mp3_conversion:%s' % userid, SECONDS_LIMIT_SUBSEQUENT_CONVERSION)
Beispiel #46
0
def getPosting(gs_job_titles_details_id):
    db = MySQLdb.connect(**settings.DB_ARGS)
    c = db.cursor()
    sql = "SELECT p.id FROM posting AS p WHERE p.job_order_id = '{gs_job_titles_details_id}'".format(
        gs_job_titles_details_id=gs_job_titles_details_id)
    c.execute(sql)
    result = dictfetchall(c)
    if result and len(result) > 0:
        return result[0]["id"]
    else:
        send_task('skype_messaging.notify_devs',
                  ["Job Order failed to sync: {'sql':" + sql])

        return None
Beispiel #47
0
def callback(ch, method, properties, body):
    logging.info(" [x] scheduling %r" % (body, ))
    subcontractors_temp = SubcontractorsTemp()
    """
    eta = subcontractors_temp.get_execution_date(body)
    if eta == None:
        logging.info("Failed to schedule %s." % body)
        return
    """
    eta = datetime(2012, 1, 1, 0, 0, 0)
    logging.info(' [x] sending task %s @ %s' % (body, eta))
    send_task('ScheduleActivation.ScheduleActivation', args=[
        body,
    ], eta=eta)
Beispiel #48
0
 def save_model(self, request, obj, form, change):
     if conf.GOSCALE_UPDATE_FROM_ADMIN:
         use_celery = False
         if 'goscale.tasks' in conf.CELERY_IMPORTS:
             try:
                 from celery.execute import send_task
                 use_celery = True
             except ImportError:
                 pass
         if use_celery:
             send_task('goscale.tasks.update_goscale_plugin_posts', [obj.id])
         else:
             obj.update()
     super(GoscaleCMSPluginBase, self).save_model(request, obj, form, change)
Beispiel #49
0
 def _enqueue_uid_to_granulate(self, video_uid, filename, callback_url,
                               callback_verb, video_link):
     try:
         send_task('nsivideogranulate.tasks.VideoGranulation',
                   args=(self._task_queue, video_uid, filename,
                         callback_url, self.sam_settings, video_link,
                         callback_verb),
                   queue=self._task_queue,
                   routing_key=self._task_queue)
     except:
         log.msg('POST failed.')
         log.msg('Could not enqueue the video to granulate.')
         raise cyclobe.web.HTTPError(
             500, 'Can not enqueue the video to granulate.')
Beispiel #50
0
    def save(self, *args, **kwargs):
        try:
            async_task = kwargs.pop('async_task')

        except KeyError:
            async_task = True

        results = super(Build, self).save(*args, **kwargs)

        if async_task:
            send_task('gungnir.builds.tasks.build_image', args=[self.config.pk])



        return results
Beispiel #51
0
def harvestData(lrUrl, config):
    resp = urllib2.urlopen(lrUrl)
    data = json.load(resp)
    for i in data['listrecords']:
        envelope = i['record']['resource_data']
        send_task(config['validationTask'], [envelope, config])
    if data.has_key("resumption_token") and \
       data['resumption_token'] is not None and \
       data['resumption_token'] != "null":
        urlParts = urlparse.urlparse(lrUrl)
        newQuery = urllib.urlencode(
            {"resumption_token": data['resumption_token']})
        lrUrl = urlparse.urlunparse((urlParts[0], urlParts[1], urlParts[2],
                                     urlParts[3], newQuery, urlParts[5]))
        harvestData.delay(lrUrl, config)
Beispiel #52
0
def execute_celery_task(ch, method, properties, body):
    """
    expects a json data
    body = dict(
        task = 'activity_tracker_notes.email_notes',
        args = []
    )
    """
    logging.info(" [x] received %s" % (body, ))
    data = json.loads(body)
    task = data['task']
    args = data['args']
    logging.info(" [x] task %s" % (task, ))
    logging.info(" [x] args %s" % (args, ))
    send_task(task, args=args)
Beispiel #53
0
def instance_add(request):
    """View for adding a new instance.
    
    :param request: the request object
    
    :returns: an edit form template
    """
    form = InstanceForm(request.POST or None)
    form.fields["base_image"].choices = send_task("cloud.list_base_images", routing_key="limeade.cloud").get()
    form.fields["product"].queryset = Product.objects.filter(
        Q(owner=request.user.get_profile().parent) | Q(owner=request.user), limitset_cloud__isnull=False
    )
    form.fields["sshkeys"].queryset = SSHKey.objects.filter(owner=request.user)

    if form.is_valid():
        limits = form.cleaned_data["product"].limitset_cloud.get()
        node = get_best_node(limits.cpu_cores, limits.memory, limits.storage)
        if not node:
            messages.add_message(request, messages.ERROR, "We are currently over capacity. Please try again soon.")
            return redirect("limeade_cloud_instance_list")

        i = Instance(hostname=form.cleaned_data["hostname"])
        i.node = node
        i.owner = request.user
        i.save()  # generate a pk for the instance, so we can use the m2m field
        i.sshkeys = form.cleaned_data["sshkeys"]
        i.generate_mac_addr()
        i.save()

        send_task(
            "cloud.create_instance",
            kwargs={
                "base_image": form.cleaned_data["base_image"],
                "cpu_cores": limits.cpu_cores,
                "memory": limits.memory,
                "storage": limits.storage,
                "domain": i.domain,
                "instance": i.pk,
                "mac_addr": i.mac_addr,
            },
            routing_key="limeade.cloud",
        )

        return redirect("limeade_cloud_instance_list")

    return render_to_response(
        "limeade_cloud/instance_add.html", {"form": form}, context_instance=RequestContext(request)
    )
Beispiel #54
0
    def _create_webstorer_task(self, resource):
        user = get_action('get_site_user')({'model': model,
                                            'ignore_auth': True,
                                            'defer_commit': True}, {})
        context = json.dumps({
            'site_url': self.site_url,
            'apikey': user.get('apikey'),
            'username': user.get('name'),
            'webstore_url': self.webstore_url
        })
        data = json.dumps(resource_dictize(resource, {'model': model}))
        webstorer_task = send_task("webstorer.upload", [context, data])

        # update the task_status table
        webstorer_task_status = {
            'entity_id': resource.id,
            'entity_type': u'resource',
            'task_type': u'webstorer',
            'key': u'celery_task_id',
            'value': webstorer_task.task_id,
            'last_updated': datetime.now().isoformat()
        }
        
        archiver_task_context = {
            'model': model, 
            'session': model.Session, 
            'user': user.get('name'),
            'defer_commit': True
        }
        
        get_action('task_status_update')(archiver_task_context, webstorer_task_status)
Beispiel #55
0
def hotspotsRange(start_time, stop_time, location, **kwargs):
    ''' Run ofver a range of timesteps at 5 minute intervals in between '''
    start = datetime.strptime(start_time, '%Y%m%d.%H%M%S')
    stop = datetime.strptime(stop_time, '%Y%m%d.%H%M%S')
    kwargs.update({'task_id': hotspotsRange.request.id})
    subtasks = [ send_task("cybercomq.gis.hotspotpysal.hotspots", args=(ts,location), kwargs=kwargs, queue="gis", track_started=True).task_id for ts in date_range(start,stop) ]
    return subtasks
Beispiel #56
0
def main():
    parser = argparse.ArgumentParser(
        description='do a task for all documents in a doc_class',
    )

    parser.add_argument('task', type=str, help='task name to apply')
    parser.add_argument('doc_class', type=str,
                        help='doc_class to apply function to')

    args = parser.parse_args()

    docs = kernel.db.tracked.find({'doc_class': args.doc_class}, timeout=False)
    print '%s docs in %s' % (docs.count(), args.doc_class)

    for doc in docs:
        send_task(args.task, (doc['_id'], ))
Beispiel #57
0
    def test_regular_task(self):
        T1 = self.createTask('c.unittest.t.t1')
        self.assertIsInstance(T1, BaseTask)
        self.assertTrue(T1.run())
        self.assertTrue(isinstance(T1, Callable), 'Task class is callable()')
        self.assertTrue(T1(), 'Task class runs run() when called')

        consumer = T1.get_consumer()
        with self.assertRaises(NotImplementedError):
            consumer.receive('foo', 'foo')
        consumer.purge()
        self.assertIsNone(consumer.queues[0].get())

        # Without arguments.
        presult = T1.delay()
        self.assertNextTaskDataEqual(consumer, presult, T1.name)

        # With arguments.
        presult2 = T1.apply_async(kwargs=dict(name='George Costanza'))
        self.assertNextTaskDataEqual(
            consumer, presult2, T1.name, name='George Costanza',
        )

        # send_task
        sresult = send_task(T1.name, kwargs=dict(name='Elaine M. Benes'))
        self.assertNextTaskDataEqual(
            consumer, sresult, T1.name, name='Elaine M. Benes',
        )

        # With eta.
        presult2 = T1.apply_async(
            kwargs=dict(name='George Costanza'),
            eta=now() + timedelta(days=1),
            expires=now() + timedelta(days=2),
        )
        self.assertNextTaskDataEqual(
            consumer, presult2, T1.name,
            name='George Costanza', test_eta=True, test_expires=True,
        )

        # With countdown.
        presult2 = T1.apply_async(kwargs=dict(name='George Costanza'),
                                  countdown=10, expires=12)
        self.assertNextTaskDataEqual(
            consumer, presult2, T1.name,
            name='George Costanza', test_eta=True, test_expires=True,
        )

        # Discarding all tasks.
        consumer.purge()
        T1.apply_async()
        self.assertEqual(consumer.purge(), 1)
        self.assertIsNone(consumer.queues[0].get())

        self.assertFalse(presult.successful())
        T1.backend.mark_as_done(presult.id, result=None)
        self.assertTrue(presult.successful())

        publisher = T1.get_publisher()
        self.assertTrue(publisher.exchange)
Beispiel #58
0
    def run(self, task, task_args, task_kwargs, task_queue, user, tags):
        """ 
        Submit task to celerey async tasks
        """
        celery = Celery().config_from_object(celeryconfig)
        from celery.execute import send_task

        # Submit task
        task_obj = send_task(task,
                             args=task_args,
                             kwargs=task_kwargs,
                             queue=task_queue,
                             track_started=True)
        task_log = {
            'task_id': task_obj.task_id,
            'user': user,
            'task_name': task,
            'args': task_args,
            'kwargs': task_kwargs,
            'queue': task_queue,
            'timestamp': datetime.now(),
            'tags': tags
        }
        self.db[self.database][self.collection].insert(task_log)

        return {"task_id": task_obj.task_id}
Beispiel #59
0
 def _notify_plugins(self, models, instance, notification):
     """
     Notify plugins
     """
     for item in models.filter(holder=instance):
         t = send_task("%s.%s" % (item.target.entry, notification), [item.identifier])
         v = t.wait(timeout=15)
Beispiel #60
0
    def test_regular_task(self):
        T1 = self.createTask('c.unittest.t.t1')
        self.assertIsInstance(T1, BaseTask)
        self.assertTrue(T1.run())
        self.assertTrue(isinstance(T1, Callable), 'Task class is callable()')
        self.assertTrue(T1(), 'Task class runs run() when called')

        consumer = T1.get_consumer()
        with self.assertRaises(NotImplementedError):
            consumer.receive('foo', 'foo')
        consumer.purge()
        self.assertIsNone(consumer.queues[0].get())

        # Without arguments.
        presult = T1.delay()
        self.assertNextTaskDataEqual(consumer, presult, T1.name)

        # With arguments.
        presult2 = T1.apply_async(kwargs=dict(name='George Costanza'))
        self.assertNextTaskDataEqual(
            consumer, presult2, T1.name, name='George Costanza',
        )

        # send_task
        sresult = send_task(T1.name, kwargs=dict(name='Elaine M. Benes'))
        self.assertNextTaskDataEqual(
            consumer, sresult, T1.name, name='Elaine M. Benes',
        )

        # With eta.
        presult2 = T1.apply_async(
            kwargs=dict(name='George Costanza'),
            eta=now() + timedelta(days=1),
            expires=now() + timedelta(days=2),
        )
        self.assertNextTaskDataEqual(
            consumer, presult2, T1.name,
            name='George Costanza', test_eta=True, test_expires=True,
        )

        # With countdown.
        presult2 = T1.apply_async(kwargs=dict(name='George Costanza'),
                                  countdown=10, expires=12)
        self.assertNextTaskDataEqual(
            consumer, presult2, T1.name,
            name='George Costanza', test_eta=True, test_expires=True,
        )

        # Discarding all tasks.
        consumer.purge()
        T1.apply_async()
        self.assertEqual(consumer.purge(), 1)
        self.assertIsNone(consumer.queues[0].get())

        self.assertFalse(presult.successful())
        T1.backend.mark_as_done(presult.id, result=None)
        self.assertTrue(presult.successful())

        publisher = T1.get_publisher()
        self.assertTrue(publisher.exchange)