コード例 #1
0
ファイル: request.py プロジェクト: bobquest33/pyvac
    def render(self):

        req_id = self.request.params.get('request_id')
        req = Request.by_id(self.session, req_id)
        if not req:
            return ''
        reason = self.request.params.get('reason')

        req.reason = reason
        RequestHistory.new(self.session,
                           req,
                           req.status,
                           'DENIED',
                           self.user,
                           reason=reason)
        req.update_status('DENIED')
        # save who performed this action
        req.last_action_user_id = self.user.id

        self.session.flush()

        # call celery task directly, do not wait for polling
        from celery.registry import tasks
        from celery.task import subtask
        req_task = tasks['worker_denied']
        data = {'req_id': req.id}
        subtask(req_task).apply_async(kwargs={'data': data}, countdown=5)

        log.info('scheduling task worker_denied for %s' % data)

        return req.status
コード例 #2
0
ファイル: tasks.py プロジェクト: airstrike/athena
    def run(self, opts, callback=None):
        logging.debug(getattr(self, 'upload', 'no self.upload'))
        logging.debug(opts)

        try:
            from pannel.converters import get_converter
            from pannel.models import Upload
            self.upload = Upload.objects.get(pk=opts['upload_pk'])
            self.upload.result = 'PROCESSING'
            self.upload.save()
            self.upload_pk = opts['upload_pk']
            self.opts = opts
            converter = get_converter(opts.get('converter', None))

            c = converter(**opts)
            result = c.run()

            opts.update({'result': result.name})

        except:
            if getattr(self, 'upload', None) is not None:
                self.upload.result = 'TASK ERROR'
                self.upload.save()
            raise

        if callback:
            subtask(callback).delay(opts)

        return result
コード例 #3
0
ファイル: worker.py プロジェクト: nukah/csound-util
def convert(id, name, path, quality, callback = None):
    db = DB()
    log = analyze.get_logger()


    optdict = {
                "FFMPEG" : FFMPEG,
                "INPUT_FILE" : path,
                "FORMAT" : "flv",
                "QUALITY" : quality,
                "ADDITIONAL_OPTS" : "-y",
                "FILEPATH" : '%s/%s' % (SAVE_PATH, name)
               }
    options = "{FFMPEG} -i {INPUT_FILE} -sn -f {FORMAT} {QUALITY} {ADDITIONAL_OPTS} {FILEPATH}".format(**optdict)

    log.info("Converting process for [%s] starting with params [%s]" % (name, optdict))
    db.collection.update({'_id' : id}, {'$set' : {'convert.host' : HOST}})
    process = Popen(options, shell = True, stderr = PIPE, close_fds = True)
    output = process.stderr.read()
    match = SUCCESSFUL_CONVERSION_PATTERN.search(output)
    if not match:
        db.collection.update({'_id' : id }, {'$set' : {'converted' : 'False', 'convert.failed' : 'True', 'convert.last_retry' : datetime.now()}, '$inc' : {'convert.retries' : 1}})
        log.error("Converting process failed, retrying.")
    else:
        db.collection.update({'_id' : id }, {'$set' : {'converted' : 'True'}, '$unset' : { 'convert.failed' : 1, 'convert.retries' : 1, 'convert.last_retry' : 1}})
        log.info("Converting process finished successfully.")

    if callback is not None:
        subtask(callback).delay(path, name)
コード例 #4
0
ファイル: request.py プロジェクト: grigouze/pyvac
    def render(self):

        req_id = self.request.params.get('request_id')
        req = Request.by_id(self.session, req_id)
        if not req:
            return ''

        data = {'req_id': req.id}

        if self.user.is_admin:
            req.update_status('APPROVED_ADMIN')
            # save who performed this action
            req.last_action_user_id = self.user.id

            task_name = 'worker_approved'
            settings = self.request.registry.settings
            with open(settings['pyvac.celery.yaml']) as fdesc:
                Conf = yaml.load(fdesc, YAMLLoader)
            data['caldav.url'] = Conf.get('caldav').get('url')
        else:
            req.update_status('ACCEPTED_MANAGER')
            # save who performed this action
            req.last_action_user_id = self.user.id
            task_name = 'worker_accepted'

        self.session.flush()

        # call celery task directly, do not wait for polling
        from celery.registry import tasks
        from celery.task import subtask
        req_task = tasks[task_name]

        subtask(req_task).delay(data=data)

        return req.status
コード例 #5
0
def fetch_rsv_from_cal(room, curr_motion_time, occupied_pct, callback=None):
    
    rc = None
    for r in ReaperConfig.objects.all():
        rc = r
        break
    
    #initialize google calendar api
    gcal = GCalendar(rc.g_consumer_key, rc.g_consumer_secret, rc.g_admin_user_email, rc.g_developer_key)
    
    if not gcal:
        print("Google Calendar API not initialized")
        return None

    #Get reservation if found at current time.     
    #event = gcal.getCurrentEventFromCalendar(room['calendar_id'])
    (event, next_event) = gcal.getCurrentOrNextEventFromCalendar(room['calendar_id'])
    
    if event and event.has_key('id'):
        rsv_status=True
    else:
        rsv_status=False
        
    print ("fetch_rsv_from_cal --> room_id: %d curr_motion_time: %r rsv_status: %d" 
                %(room['room_id'], curr_motion_time, rsv_status))
    
    
    if callback:
        subtask(callback).delay(room, rsv_status, event, curr_motion_time, occupied_pct, next_event)
        
    return rsv_status
コード例 #6
0
ファイル: tasks.py プロジェクト: TDMangukiya/celery
def unlock_graph(result, callback, interval=1, propagate=False, max_retries=None):
    if result.ready():
        second_level_res = result.get()
        if second_level_res.ready():
            subtask(callback).delay(list(joinall(second_level_res, propagate=propagate)))
    else:
        unlock_graph.retry(countdown=interval, max_retries=max_retries)
コード例 #7
0
ファイル: worker.py プロジェクト: nukah/csound-util
def analyze(name, path, aspect, height, oid, callback = None):
    db = DB().collection
    id = bson.ObjectId(oid = oid)
    height = height
    path = path
    name = "%s.%s" % (name, 'flv')
    quality = ""

    log = analyze.get_logger()

    if aspect in formats.keys():
        if height < QTypes['LQ']:
            quality = formats[aspect]['LD']
        if height >= QTypes['LQ'] and height <= QTypes['SQ']:
            quality = formats[aspect]['SD']
        if height >= QTypes['SQ'] and height <= QTypes['HQ']:
            quality = formats[aspect]['HD']
        if height > QTypes['HQ']:
            quality = formats[aspect]['HD']
    else:
        asp_size = aspect.split(':')
        w = 640
        h = (w * asp_size[1]) / asp_size[0]
        quality = '-s %dx%d' % (w, h)
    log.info("Starting analyze task with params %s [%s]" % (path, quality))

    if callback is not None:
        subtask(callback).delay(id, name, path, quality)
コード例 #8
0
ファイル: tasks.py プロジェクト: westurner/celery
def unlock_graph(result, callback, interval=1, propagate=False,
        max_retries=None):
    if result.ready():
        second_level_res = result.get()
        if second_level_res.ready():
            subtask(callback).delay(list(joinall(
                second_level_res, propagate=propagate)))
    else:
        unlock_graph.retry(countdown=interval, max_retries=max_retries)
コード例 #9
0
def get_config(key, callback=None):
    cfg = configure()
    config_dir = cfg.get("general").get("config_dir")
    filename = os.path.join(config_dir, key + ".yml")
    if not os.path.exists(filename): return dict()
    stream = file(filename, "r")
    cfg = yaml.load(stream)
    if callback is not None:
        subtask(callback).delay(cfg)
    return cfg
コード例 #10
0
def fetch_occ_from_director(room,callback=None):
    print ("fetch_occ_from_director --> room_id: %d" %(room['room_id']))    
    
    (occupied_pct, motion_instant) = get_current_pct_occupancy(room)
    
    curr_motion_time = datetime.datetime.utcfromtimestamp(motion_instant).replace(tzinfo=pytz.timezone('utc'))
      
    if callback:
        subtask(callback).delay(room, curr_motion_time, occupied_pct)
    
    return True
コード例 #11
0
def process_room(room):
    # Create room status entry if not exists and commit !!
    # Call director api
    print ("process_room --> room_id: %d" %(room['room_id']))
        
    #check if room status record exists. If not create initial record
    create_room_status(room)
    
    result = fetch_occ_from_director.delay(room, callback=subtask(fetch_rsv_from_cal,
                                                                     callback=subtask(reap)))
    
    return result
コード例 #12
0
    def render(self):

        req_id = self.request.params.get('request_id')
        req = Request.by_id(self.session, req_id)
        if not req:
            return ''

        data = {'req_id': req.id}

        only_manager = False
        # we should handle the case where the admin is also a user manager
        if (self.user.ldap_user and (req.user.manager_dn == self.user.dn)
                and (req.status == 'PENDING')):
            only_manager = True

        if self.user.is_admin and not only_manager:
            # create history entry
            RequestHistory.new(self.session, req,
                               req.status, 'APPROVED_ADMIN',
                               self.user)
            req.update_status('APPROVED_ADMIN')
            # save who performed this action
            req.last_action_user_id = self.user.id

            task_name = 'worker_approved'
            settings = self.request.registry.settings
            with open(settings['pyvac.celery.yaml']) as fdesc:
                Conf = yaml.load(fdesc, YAMLLoader)
            data['caldav.url'] = Conf.get('caldav').get('url')
        else:
            # create history entry
            RequestHistory.new(self.session, req,
                               req.status, 'ACCEPTED_MANAGER',
                               self.user)
            req.update_status('ACCEPTED_MANAGER')
            # save who performed this action
            req.last_action_user_id = self.user.id

            task_name = 'worker_accepted'

        self.session.flush()

        # call celery task directly, do not wait for polling
        from celery.registry import tasks
        from celery.task import subtask
        req_task = tasks[task_name]

        subtask(req_task).apply_async(kwargs={'data': data}, countdown=5)

        log.info('scheduling task %s for req_id: %d' % (task_name,
                                                        data['req_id']))
        return req.status
コード例 #13
0
ファイル: pipeline.py プロジェクト: domoritz/validitychecker
    def setUp(self):
        qobj = Query(query ='ice cream')
        qobj.save()

        number = 10
        # fetch_page_from_url -> parse_page -> store_in_db
        self.result = make_scholar_urls.delay(number, qobj, \
            callback=subtask(fetch_page_from_url, \
            callback=subtask(parse_scholar_page, \
            callback=subtask(store_in_db, credible=False))))
        self.result.get() # block

        self.articles = qobj.articles.all()
コード例 #14
0
ファイル: pipeline.py プロジェクト: domoritz/validitychecker
    def setUp(self):
        qobj = Query(query='ice cream')
        qobj.save()

        number = 10
        # fetch_page_from_url -> parse_page -> store_in_db
        self.result = make_scholar_urls.delay(number, qobj, \
            callback=subtask(fetch_page_from_url, \
            callback=subtask(parse_scholar_page, \
            callback=subtask(store_in_db, credible=False))))
        self.result.get()  # block

        self.articles = qobj.articles.all()
コード例 #15
0
ファイル: credentials.py プロジェクト: joaquinmorenoa/pyvac
    def render(self):

        if 'submit' in self.request.params:
            email = self.request.params.get('email', '')
            user = User.by_email(self.session, email)
            if user:
                passhash = uuid.uuid4().hex
                date_end = datetime.now() + relativedelta(seconds=86400)
                # create hash entry in database with a TTL of 1 day
                entry = PasswordRecovery(user_id=user.id,
                                         hash=passhash,
                                         date_end=date_end)
                self.session.add(entry)
                self.session.flush()

                # call celery send mail task directly
                from celery.registry import tasks
                from celery.task import subtask
                req_task = tasks['worker_mail']

                settings = self.request.registry.settings
                sender = 'pyvac@localhost'
                if 'pyvac.password.sender.mail' in settings:
                    sender = settings['pyvac.password.sender.mail']

                data = {
                    'sender':
                    sender,
                    'target':
                    user.email,
                    'subject':
                    'Password Recovery',
                    'content':
                    """Hello,

we send you this mail because you requested a password reset, to proceed please click the link below:
%s

Reminder, your login is: %s

""" % (route_url('change_password', self.request,
                 passhash=passhash), user.login)
                }

                subtask(req_task).delay(data=data)

                msg = 'Mail sent to %s for password recovery.' % user.email
                self.request.session.flash('info;%s' % msg)
                return HTTPFound(location=route_url('login', self.request))

        return {}
コード例 #16
0
ファイル: tasks.py プロジェクト: rahulranjan07/docker.celery
def waste_time(n=12, callback=None):
    '''Emulate a long-running task'''

    for i in range(0, n):
        log1.info('Wasting some time (%d/%d)' % (i, n))
        time.sleep(5)

    if callback:
        log1.info('Finished task: About to invoke %r' % (callback))
        subtask(callback).delay()
    else:
        log1.info('Finished task')

    return {'status': 'wasted'}
コード例 #17
0
def build_image(image_id, callback=None):
    logger.info("Build image %s start", image_id)
    image = OSImage.objects.get(id=image_id)
    image_yaml = ImageData({
        'base_image_url': image.base_image_url,
        'base_image_name': image.base_image_name,
        'new_image_name': image.name,
        'script_url': image.provisioning_script_url,
    }).as_yaml()
    img_msg = "Started image build %s" % image + '\n\n' + image_yaml
    send_event(str(image), img_msg, tags=['buildimage'])

    t0 = time.time()
    with tmpdir():
        try:
            with open('image.yaml', 'wb') as f:
                f.write(image_yaml)

            with remote_settings('localhost'):
                remote.build_image('image.yaml')

            # We should now have <image_name>.tar.gz and <image_name>.log
            # locally.
            img_file = image.name + '.tar.gz'
            img_storage_path = 'images/' + img_file
            with open(img_file, 'rb') as localfile:
                image.file.save(img_storage_path, File(localfile))

            image.active = True
        finally:
            logfile = image.name + '.log'
            try:
                # grab and store the compile log.
                with open(logfile, 'rb') as f:
                    logname = 'images/' + logfile
                    image.build_log.save(logname, File(f))
            except Exception:
                logger.info('Could not retrieve ' + logfile)
                raise
            finally:
                image.save()

    elapsed_time = time.time() - t0
    send_event(str(image),
               "Completed image %s in %d seconds" % (image, elapsed_time),
               tags=['buildimage', 'success'])

    # start callback if there is one.
    if callback is not None:
        subtask(callback).delay()
コード例 #18
0
ファイル: tasks.py プロジェクト: drmalex07/docker.celery
def waste_time(n=12, callback=None): 
    '''Emulate a long-running task'''

    for i in range(0, n):
        log1.info('Wasting some time (%d/%d)' % (i, n))
        time.sleep(5)
    
    if callback:
        log1.info('Finished task: About to invoke %r' % (callback))
        subtask(callback).delay()
    else:
        log1.info('Finished task')
    
    return {'status': 'wasted'}
コード例 #19
0
ファイル: request.py プロジェクト: grigouze/pyvac
    def render(self):
        try:
            form_date_from = self.request.params.get('date_from')
            if ' - ' not in form_date_from:
                msg = 'Invalid format for period.'
                self.request.session.flash('error;%s' % msg)
                return HTTPFound(location=route_url('home', self.request))

            dates = self.request.params.get('date_from').split(' - ')
            date_from = datetime.strptime(dates[0], '%d/%m/%Y')
            date_to = datetime.strptime(dates[1], '%d/%m/%Y')
            days = int(self.request.params.get('days'))

            vac_type = VacationType.by_name(self.session,
                                            self.request.params.get('type'))

            if days <= 0:
                msg = 'Invalid value for days.'
                self.request.session.flash('error;%s' % msg)
                return HTTPFound(location=route_url('home', self.request))

            request = Request(date_from=date_from,
                              date_to=date_to,
                              days=days,
                              vacation_type=vac_type,
                              status=u'PENDING',
                              user=self.user,
                              notified=False,
                              )
            self.session.add(request)
            self.session.flush()

            if request:
                msg = 'Request sent to your manager.'
                self.request.session.flash('info;%s' % msg)
                # call celery task directly, do not wait for polling
                from celery.registry import tasks
                from celery.task import subtask
                req_task = tasks['worker_pending']
                data = {'req_id': request.id}
                subtask(req_task).delay(data=data)

        except Exception as exc:
            log.error(exc)
            msg = ('An error has occured while processing this request: %r'
                   % exc)
            self.request.session.flash('error;%s' % msg)

        return HTTPFound(location=route_url('home', self.request))
コード例 #20
0
ファイル: pipeline.py プロジェクト: domoritz/validitychecker
    def setUpClass(cls):
        query='ice shield'
        qobj, _ = Query.objects.get_or_create(query__iexact=query, defaults={'query':query})

        number = 10
        result = prepare_client.delay(number, qobj, \
            callback=subtask(search_soap, \
            callback=subtask(extract_data, \
            callback=subtask(store_in_db, credible=True))))

        cls.result = []
        while isinstance(result, EagerResult) or isinstance(result, AsyncResult):
            result = result.get()
            cls.result.append(result)
        cls.qobj = qobj
コード例 #21
0
ファイル: owrb.py プロジェクト: ok-water-survey/owsq
def owrb_well_logs_save(database=config.owrb_database, collection=config.owrb_welllog_collection):
    #dcommons = datacommons.toolkit(username,password)
    db = Connection(config.mongo_host)
    db[database][collection].remove()
    #set geometries
    polydata = []
    for itm in db.ows.watersheds.find():
        polydata.append(itm)
    aquifer_poly = []
    for itm in db.ows.aquifers.find():
        aquifer_poly.append(itm)
    #load owrb well logs
    res = urllib2.urlopen(config.well_logs_url)
    data = json.loads(res.read())
    stask = []
    taskname_tmpl = 'owsq.data.owrb.owrb_well_logs_portal'
    for site in data["features"]:
        row_data = {}
        row_data = site["properties"]
        row_data['geometry'] = site['geometry']
        rowid = db[database][collection].save(row_data)
        stask.append(subtask(taskname_tmpl, args=(rowid,)))
    print 'Done with inserts, starting group jobs'
    job = group(stask)
    result = job.apply_async()
    aggregate_results = result.join()
    return "Success- All Well logs stored locally in Mongo(%s, %s) Total = %d" % (
    database, collection, sum(aggregate_results))
コード例 #22
0
ファイル: scrape.py プロジェクト: domoritz/validitychecker
def get_wok_page(qobj, number, callback=None):
    logger = get_wok_page.get_logger()
    # get session id from db
    sobj, created = KeyValue.objects.get_or_create(key='SID_web')

    # lazy invalid function, invalid if older than 20 minutes
    valid = lambda: (datetime.now() - sobj.created_at).seconds/60 < 20

    if not created and valid():
        # get latest session id, avoid problems when no id is defined
        sessionid = sobj.value

        # initialize fetcher with SID!
        fetcher = IsiFetcher(sid=sessionid)
        logger.warning("SID for web from db: %s" % sessionid)
    else:
        # without SID/ new sid
        fetcher = IsiFetcher()

        # create new sid object
        sobj.value = fetcher.SID
        sobj.save()

        logger.warning("New SID from web. Got SID: %s" % fetcher.SID)

    query = urllib.unquote_plus(qobj.query)
    page = fetcher.fetch(query, number)

    if callback:
        return subtask(callback).delay(page, qobj)
    else:
        return page
コード例 #23
0
def prepare_client(number, qobj=None, callback=None):

    logger = prepare_client.get_logger()
    logger.info("Preaparing client")

    # get session id from db
    # not getting a new sid for each query avoids throttling
    sobj, created = KeyValue.objects.get_or_create(key='SID')

    # lazy invalid function, invalid if older than 45 minutes
    valid = lambda: (datetime.now() - sobj.created_at).seconds / 60 < 45

    soap = None
    if not created and valid():
        # get latest session id, avoid problems when no id is defined
        sessionid = sobj.value

        # initialize client with SID!
        soap = WokmwsSoapClient(sessionid)
        logger.warning("SID from db: %s" % sessionid)
    else:
        # without SID/ new sid
        soap = WokmwsSoapClient()

        # create new sid object
        sobj.value = soap.SID
        sobj.save()

        logger.warning("New authentication. Got SID: %s" % soap.SID)

    if callback:
        #return callback(soap, qobj, number)
        return subtask(callback).delay(soap, qobj, number)
    else:
        return soap
コード例 #24
0
    def run(self, *args, **kwargs):
        self.log = log
        # init database connection
        session = DBSession()

        # init conf
        conf = ConfCache()
        remconf = conf.get('reminder', {}).get('trial_thresholds', {})
        self.countries = remconf.get('countries')
        self.trial_thresholds = remconf.get('values')
        self.subject = conf.get('reminder', {}).get('subject', 'Reminder')

        self.log.info('reminder conf: %s / %s' %
                      (self.countries, self.trial_thresholds))

        if not self.countries or not self.trial_thresholds:
            self.log.error('configuration is missing for trial reminder.')
            return False

        datas = [self.get_data(session, country) for country in self.countries]

        # flatten the list
        datas = [item for sublist in datas for item in sublist]
        self.log.info('number of reminders to send: %d' % len(datas))

        for data in datas:
            async_result = subtask(WorkerTrialReminder).delay(data=data)
            self.log.info('task reminder scheduled %r' % async_result)

        return True
コード例 #25
0
ファイル: fetch.py プロジェクト: domoritz/validitychecker
def extract_data(qobj, result, callback=None):
    logger = extract_data.get_logger()

    if not hasattr(result, "records"):
        logger.warning("Nothing found")
        return
    else:
        logger.info("Found: %s" % result.recordsFound)

    records = []
    for wos_record in result.records:

        record = {}
        record["title"] = wos_record.title[0][1][0]
        # record['url'] =
        # record['snippet'] =
        # record['source'] =
        record["authors"] = wos_record.authors[0][1]
        record["publish_date"] = date(
            int([x for x in wos_record.source if x[0] == "Published.BiblioYear"][0][1][0]), 1, 1
        )

        # convert name from Doe, J to J Doe
        record["authors"] = map(
            lambda author: " ".join(reversed(map(unicode.strip, author.split(",")))), record["authors"]
        )

        records.append(record)

    if callback:
        return subtask(callback).delay(qobj=qobj, records=records)
    else:
        return records
コード例 #26
0
def extract_data(qobj, result, callback=None):
    logger = extract_data.get_logger()

    if not hasattr(result, 'records'):
        logger.warning("Nothing found")
        return
    else:
        logger.info("Found: %s" % result.recordsFound)

    records = []
    for wos_record in result.records:

        record = {}
        record['title'] = wos_record.title[0][1][0]
        #record['url'] =
        #record['snippet'] =
        #record['source'] =
        record['authors'] = wos_record.authors[0][1]
        record['publish_date'] = date(
            int([
                x for x in wos_record.source if x[0] == 'Published.BiblioYear'
            ][0][1][0]), 1, 1)

        # convert name from Doe, J to J Doe
        record['authors'] = map(
            lambda author: ' '.join(
                reversed(map(unicode.strip, author.split(',')))),
            record['authors'])

        records.append(record)

    if callback:
        return subtask(callback).delay(qobj=qobj, records=records)
    else:
        return records
コード例 #27
0
ファイル: test_cache.py プロジェクト: westurner/celery
    def test_on_chord_part_return(self, setresult):
        tb = CacheBackend(backend="memory://")

        deps = Mock()
        deps.total = 2
        setresult.restore.return_value = deps
        task = Mock()
        task.name = "foobarbaz"
        try:
            current_app.tasks["foobarbaz"] = task
            task.request.chord = subtask(task)
            task.request.taskset = "setid"

            tb.on_chord_apply(task.request.taskset, [])

            self.assertFalse(deps.join.called)
            tb.on_chord_part_return(task)
            self.assertFalse(deps.join.called)

            tb.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

        finally:
            current_app.tasks.pop("foobarbaz")
コード例 #28
0
ファイル: fetch.py プロジェクト: domoritz/validitychecker
def prepare_client(number, qobj=None, callback=None):

    logger = prepare_client.get_logger()
    logger.info("Preaparing client")

    # get session id from db
    # not getting a new sid for each query avoids throttling
    sobj, created = KeyValue.objects.get_or_create(key="SID")

    # lazy invalid function, invalid if older than 45 minutes
    valid = lambda: (datetime.now() - sobj.created_at).seconds / 60 < 45

    soap = None
    if not created and valid():
        # get latest session id, avoid problems when no id is defined
        sessionid = sobj.value

        # initialize client with SID!
        soap = WokmwsSoapClient(sessionid)
        logger.warning("SID from db: %s" % sessionid)
    else:
        # without SID/ new sid
        soap = WokmwsSoapClient()

        # create new sid object
        sobj.value = soap.SID
        sobj.save()

        logger.warning("New authentication. Got SID: %s" % soap.SID)

    if callback:
        # return callback(soap, qobj, number)
        return subtask(callback).delay(soap, qobj, number)
    else:
        return soap
コード例 #29
0
    def test_on_chord_part_return(self, setresult):
        from celery.registry import tasks
        from celery.task import subtask
        b = self.MockBackend()
        deps = Mock()
        deps.total = 10
        setresult.restore.return_value = deps
        b.client.incr.return_value = 1
        task = Mock()
        task.name = "foobarbaz"
        try:
            tasks["foobarbaz"] = task
            task.request.chord = subtask(task)

            b.on_chord_part_return(task)
            self.assertTrue(b.client.incr.call_count)

            b.client.incr.return_value = deps.total
            b.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

            self.assertTrue(b.client.expire.call_count)
        finally:
            tasks.pop("foobarbaz")
コード例 #30
0
    def test_on_chord_part_return(self, setresult):
        from celery.registry import tasks
        from celery.task import subtask
        b = self.MockBackend()
        deps = Mock()
        deps.total = 10
        setresult.restore.return_value = deps
        b.client.incr.return_value = 1
        task = Mock()
        task.name = "foobarbaz"
        try:
            tasks["foobarbaz"] = task
            task.request.chord = subtask(task)

            b.on_chord_part_return(task)
            self.assertTrue(b.client.incr.call_count)

            b.client.incr.return_value = deps.total
            b.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

            self.assertTrue(b.client.expire.call_count)
        finally:
            tasks.pop("foobarbaz")
コード例 #31
0
ファイル: scrape.py プロジェクト: domoritz/validitychecker
def parse_wok_page(page, qobj, callback=None):
    logger = parse_wok_page.get_logger()

    parser = etree.HTMLParser()
    tree = etree.parse(StringIO(page), parser)

    elements = tree.xpath('//td[@class="summary_data"]')

    # to be returned
    records = []
    for element in elements:
        record = {}
        record['title'] = perform(element.xpath('a/value//text()'), a_join, unicode)
        record['source'] = perform(element.xpath('span[contains(text(),"Source")]/following-sibling::text()')[0], unicode, unicode.strip)
        record['authors'] = perform(element.xpath('span[contains(text(),"Author")]/following-sibling::text()')[0], unicode, a_split_semicolon, m_trim)
        record['publish_date'] = perform(element.xpath('span[contains(text(),"Published")]/following::text()')[1], lambda x: a_find(x, r'(\d{4})'),a_int, a_date)
        record['times_cited'] = perform(element.xpath('span[contains(text(),"Times Cited")]/following::text()')[1], a_trim, lambda s: s.replace(',',''), a_int)


        # remove et al
        record['authors'] = filter(lambda author: not author.startswith('et al'), record['authors'])

        # convert name from Doe, J to J Doe
        record['authors'] = map(lambda author: ' '.join(reversed(map(unicode.strip, author.split(' ')))), record['authors'])

        records.append(record)

    logger.warning("Got %d results for the query '%s' from isi/wok" % (len(records), qobj.query))

    if callback:
        return subtask(callback).delay(records=records, qobj=qobj)
    else:
        return records
コード例 #32
0
ファイル: scrape.py プロジェクト: domoritz/validitychecker
def parse_scholar_page(url, page, qobj, callback=None):
    parser = etree.HTMLParser()
    tree = etree.parse(StringIO(page), parser)

    elements = tree.xpath("//body/div[@class='gs_r']")

    # to be returned
    records = []

    for element in elements:
        record = {}
        record['title'] = perform(element.xpath('h3[@class="gs_rt"]/a//text()'), a_join, unicode)
        record['url'] = perform(element.xpath('h3[@class="gs_rt"]/a/@href'), a_join, unicode)
        record['snippet'] = perform(element.xpath('div[@class="gs_rs"]//text()'), a_join, unicode)
        record['source'] = perform(element.xpath('div[@class="gs_a"]//text()'), a_join, lambda x: a_find(x, r'-\s+(.+)[,|-]\s+\d{4}'),  unicode)
        record['authors'] = perform(element.xpath('div[@class="gs_a"]//text()'), a_join, lambda x: a_find(x, r'\A(.+?)\s+-\s+'), unicode, a_split_komma, m_trim)
        record['publish_date'] = perform(element.xpath('div[@class="gs_a"]//text()'), a_join, lambda x: a_find(x, r'\s+(\d{4})\s+\-'),a_int, a_date)

        records.append(record)

    logger = parse_wok_page.get_logger()
    logger.warning("Got %d results for the query '%s' from scholar" % (len(records), qobj.query))

    if callback:
        return subtask(callback).delay(records, qobj)
    else:
        return url, records
コード例 #33
0
def get_wok_page(qobj, number, callback=None):
    logger = get_wok_page.get_logger()
    # get session id from db
    sobj, created = KeyValue.objects.get_or_create(key='SID_web')

    # lazy invalid function, invalid if older than 20 minutes
    valid = lambda: (datetime.now() - sobj.created_at).seconds / 60 < 20

    if not created and valid():
        # get latest session id, avoid problems when no id is defined
        sessionid = sobj.value

        # initialize fetcher with SID!
        fetcher = IsiFetcher(sid=sessionid)
        logger.warning("SID for web from db: %s" % sessionid)
    else:
        # without SID/ new sid
        fetcher = IsiFetcher()

        # create new sid object
        sobj.value = fetcher.SID
        sobj.save()

        logger.warning("New SID from web. Got SID: %s" % fetcher.SID)

    query = urllib.unquote_plus(qobj.query)
    page = fetcher.fetch(query, number)

    if callback:
        return subtask(callback).delay(page, qobj)
    else:
        return page
コード例 #34
0
ファイル: test_redis.py プロジェクト: mozilla/firefox-flicks
    def test_on_chord_part_return(self, setresult):
        b = self.MockBackend()
        deps = Mock()
        deps.__len__ = Mock()
        deps.__len__.return_value = 10
        setresult.restore.return_value = deps
        b.client.incr.return_value = 1
        task = Mock()
        task.name = 'foobarbaz'
        try:
            current_app.tasks['foobarbaz'] = task
            task.request.chord = subtask(task)
            task.request.group = 'group_id'

            b.on_chord_part_return(task)
            self.assertTrue(b.client.incr.call_count)

            b.client.incr.return_value = len(deps)
            b.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

            self.assertTrue(b.client.expire.call_count)
        finally:
            current_app.tasks.pop('foobarbaz')
コード例 #35
0
ファイル: test_cache.py プロジェクト: Birdbird/celery
    def test_on_chord_part_return(self, setresult):
        tb = CacheBackend(backend='memory://', app=self.app)

        deps = Mock()
        deps.__len__ = Mock()
        deps.__len__.return_value = 2
        setresult.restore.return_value = deps
        task = Mock()
        task.name = 'foobarbaz'
        try:
            self.app.tasks['foobarbaz'] = task
            task.request.chord = subtask(task)

            gid, res = uuid(), [AsyncResult(uuid()) for _ in range(3)]
            task.request.group = gid
            tb.on_chord_apply(gid, {}, result=res)

            self.assertFalse(deps.join_native.called)
            tb.on_chord_part_return(task)
            self.assertFalse(deps.join_native.called)

            tb.on_chord_part_return(task)
            deps.join_native.assert_called_with(propagate=True)
            deps.delete.assert_called_with()

        finally:
            self.app.tasks.pop('foobarbaz')
コード例 #36
0
    def test_on_chord_part_return(self, setresult):
        tb = CacheBackend(backend='memory://')

        deps = Mock()
        deps.__len__ = Mock()
        deps.__len__.return_value = 2
        setresult.restore.return_value = deps
        task = Mock()
        task.name = 'foobarbaz'
        try:
            current_app.tasks['foobarbaz'] = task
            task.request.chord = subtask(task)

            gid, res = uuid(), [AsyncResult(uuid()) for _ in range(3)]
            task.request.group = gid
            tb.on_chord_apply(gid, {}, result=res)

            self.assertFalse(deps.join_native.called)
            tb.on_chord_part_return(task)
            self.assertFalse(deps.join_native.called)

            tb.on_chord_part_return(task)
            deps.join_native.assert_called_with(propagate=True)
            deps.delete.assert_called_with()

        finally:
            current_app.tasks.pop('foobarbaz')
コード例 #37
0
ファイル: worker.py プロジェクト: lansolo99/pyvac
    def process(self, data):
        """ accepted by manager
        auto flag as accepted by HR
        """
        req = Request.by_id(self.session, data['req_id'])
        # after new field was added, it may not be set yet
        if not req.date_updated:
            return

        delta = datetime.now() - req.date_updated
        # after Request.date_updated + 3 days, auto accept it by HR
        if delta.days >= 3:
            # auto accept it as HR
            self.log.info('3 days passed, auto accept it by HR')

            # create history entry
            msg = 'Automatically accepted by HR after 3 days passed'
            # use error_message field, as it should not be used here
            # if it fails in ERROR it should be overwritten anyway
            # as the status will be changed from APPROVED_ADMIN to ERROR
            RequestHistory.new(self.session, req,
                               req.status, 'APPROVED_ADMIN',
                               user=None, error_message=msg)
            # update request status after sending email
            req.update_status('APPROVED_ADMIN')
            self.session.flush()
            transaction.commit()

            data['autoaccept'] = True
            async_result = subtask(WorkerApproved).delay(data=data)
            self.log.info('task scheduled %r' % async_result)
コード例 #38
0
ファイル: test_redis.py プロジェクト: DotNetWebs/celery
    def test_on_chord_part_return(self, setresult):
        b = self.MockBackend()
        deps = Mock()
        deps.__len__ = Mock()
        deps.__len__.return_value = 10
        setresult.restore.return_value = deps
        b.client.incr.return_value = 1
        task = Mock()
        task.name = 'foobarbaz'
        try:
            current_app.tasks['foobarbaz'] = task
            task.request.chord = subtask(task)
            task.request.group = 'group_id'

            b.on_chord_part_return(task)
            self.assertTrue(b.client.incr.call_count)

            b.client.incr.return_value = len(deps)
            b.on_chord_part_return(task)
            deps.join_native.assert_called_with(propagate=True)
            deps.delete.assert_called_with()

            self.assertTrue(b.client.expire.call_count)
        finally:
            current_app.tasks.pop('foobarbaz')
コード例 #39
0
ファイル: test_cache.py プロジェクト: EnTeQuAk/celery
    def test_on_chord_part_return(self, setresult):
        tb = CacheBackend(backend='memory://')

        deps = Mock()
        deps.__len__ = Mock()
        deps.__len__.return_value = 2
        setresult.restore.return_value = deps
        task = Mock()
        task.name = 'foobarbaz'
        try:
            current_app.tasks['foobarbaz'] = task
            task.request.chord = subtask(task)
            task.request.group = 'group_id'

            tb.on_chord_apply(task.request.group, [])

            self.assertFalse(deps.join.called)
            tb.on_chord_part_return(task)
            self.assertFalse(deps.join.called)

            tb.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

        finally:
            current_app.tasks.pop('foobarbaz')
コード例 #40
0
ファイル: poller.py プロジェクト: lansolo99/pyvac
    def run(self, *args, **kwargs):
        self.log = log
        # init database connection
        session = DBSession()

        req_accepted_notified = Request.by_status(session,
                                                  'ACCEPTED_MANAGER',
                                                  notified=True)
        self.log.info('number of ACCEPTED_NOTIFIED requests: %d' %
                      len(req_accepted_notified))

        req_list = []
        req_list.extend(req_accepted_notified)

        for req in req_list:
            self.log.info('selecting task for req type %r' % req.status)

            check_status = req.status
            if req.status == 'ACCEPTED_MANAGER' and req.notified:
                check_status = 'ACCEPTED_NOTIFIED'

            req_task = self.worker_tasks[check_status]
            self.log.info('task selected %r' % req_task.name)

            data = {
                'req_id': req.id,
            }

            async_result = subtask(req_task).delay(data=data)
            self.log.info('task scheduled %r' % async_result)

        return True
コード例 #41
0
ファイル: test_cache.py プロジェクト: wiennat/celery
    def test_on_chord_part_return(self, setresult):
        tb = CacheBackend(backend='memory://')

        deps = Mock()
        deps.__len__ = Mock()
        deps.__len__.return_value = 2
        setresult.restore.return_value = deps
        task = Mock()
        task.name = 'foobarbaz'
        try:
            current_app.tasks['foobarbaz'] = task
            task.request.chord = subtask(task)
            task.request.group = 'group_id'

            tb.on_chord_apply(task.request.group, [])

            self.assertFalse(deps.join.called)
            tb.on_chord_part_return(task)
            self.assertFalse(deps.join.called)

            tb.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

        finally:
            current_app.tasks.pop('foobarbaz')
コード例 #42
0
ファイル: test_cache.py プロジェクト: sunliwen/celery
    def test_on_chord_part_return(self, setresult):
        tb = CacheBackend(backend="memory://")

        deps = Mock()
        deps.total = 2
        setresult.restore.return_value = deps
        task = Mock()
        task.name = "foobarbaz"
        try:
            current_app.tasks["foobarbaz"] = task
            task.request.chord = subtask(task)
            task.request.taskset = "setid"

            tb.on_chord_apply(task.request.taskset, [])

            self.assertFalse(deps.join.called)
            tb.on_chord_part_return(task)
            self.assertFalse(deps.join.called)

            tb.on_chord_part_return(task)
            deps.join.assert_called_with(propagate=False)
            deps.delete.assert_called_with()

        finally:
            current_app.tasks.pop("foobarbaz")
コード例 #43
0
def delete_proc(host, proc, callback=None, swarm_trace_id=None, retry=0):
    logger.info("[%s] Delete proc %s on host %s", swarm_trace_id, proc, host)

    # We want to retry this task a few times before giving up, but we
    # want to carry on with the swarm on failure, too.
    # Celery retry() method seems to re-raise the original exception,
    # which stops the whole swarm.
    MAX_RETRIES = 3
    RETRY_TIMEOUT = 60

    try:
        with remote_settings(host):
            with always_disconnect(host):
                remote.delete_proc(host, proc)

        send_event(Proc.name_to_shortname(proc),
                   'deleted %s on %s' % (proc, host),
                   tags=['proc', 'deleted'],
                   swarm_id=swarm_trace_id)

    except Exception as exc:
        logger.warning('[%s] Error while deleting proc %s on %s: %r',
                       swarm_trace_id, proc, host, exc)

        if retry >= MAX_RETRIES:
            raise

        send_event(Proc.name_to_shortname(proc),
                   'Error while deleting %s on %s: %r. Will retry.' %
                   (proc, host, exc),
                   tags=['proc', 'deleted', 'failed'],
                   swarm_id=swarm_trace_id)

        logger.warning('delete_proc: Retrying #%d', retry)
        delete_proc.apply_async(kwargs={
            'host': host,
            'proc': proc,
            'callback': callback,
            'swarm_trace_id': swarm_trace_id,
            'retry': retry + 1,
        },
                                countdown=RETRY_TIMEOUT)

    if callback is not None:
        logger.info("[%s] Delete proc calling subtask %s", swarm_trace_id,
                    callback)
        subtask(callback).delay()
コード例 #44
0
ファイル: models.py プロジェクト: kkdeep/basket
 def retry(self):
     # Meet the new task,
     # same as the old task.
     new_task = subtask(self.name, args=self.args, kwargs=self.kwargs)
     # Queue the new task.
     new_task.apply_async()
     # Forget the old task
     self.delete()
コード例 #45
0
ファイル: models.py プロジェクト: KKDeep/basket
 def retry(self):
     # Meet the new task,
     # same as the old task.
     new_task = subtask(self.name, args=self.args, kwargs=self.kwargs)
     # Queue the new task.
     new_task.apply_async()
     # Forget the old task
     self.delete()
コード例 #46
0
ファイル: pipeline.py プロジェクト: domoritz/validitychecker
    def setUpClass(cls):
        query = 'ice shield'
        qobj, _ = Query.objects.get_or_create(query__iexact=query,
                                              defaults={'query': query})

        number = 10
        result = prepare_client.delay(number, qobj, \
            callback=subtask(search_soap, \
            callback=subtask(extract_data, \
            callback=subtask(store_in_db, credible=True))))

        cls.result = []
        while isinstance(result, EagerResult) or isinstance(
                result, AsyncResult):
            result = result.get()
            cls.result.append(result)
        cls.qobj = qobj
コード例 #47
0
def build_app(build_id, callback=None, swarm_trace_id=None):
    logger.info("[%s] Build %s start", swarm_trace_id, build_id)
    build = Build.objects.get(id=build_id)
    build.start()
    build.save()

    build_yaml = BuildData(get_build_parameters(build)).as_yaml()
    build_msg = "Started build %s" % build + '\n\n' + build_yaml
    send_event(str(build), build_msg, tags=['build'], swarm_id=swarm_trace_id)

    _do_build(build, build_yaml)

    # start callback if there is one.
    if callback is not None:
        subtask(callback).delay()

    # If there were any other swarms waiting on this build, kick them off
    build_start_waiting_swarms(build.id)
コード例 #48
0
ファイル: main_download.py プロジェクト: ok-water-survey/owsq
def data_download(data, basedir='/data/static/', clustered=False, **kwargs):
    '''
        Download multiple data sets from multiple data sources. 
            Simple cart data: Example
                {"SCI-1":{"quantity":1,"id":"SCI-1","name":"North Canadian River at Shawnee, OK (07241800)",
                          "parameter":"Discharge, cubic feet per second",
                           "query":"{'source':'USGS',  'webservice_type':'uv','sites':'07241800','parameterCd':'00060','startDT':'2007-10-01','endDT':'2013-04-04'}"}
                }
        query['source'] used to import module which will have a save function. THis function returns a url to file just downloaded.
        filezip creates a zip file from the list of urls
        Task returns a url to the zip file of all data downloaded from different sources 
        Currently performing in a serial fashion. Need to update and perform with celery groups in which multiple parallel subtask are generated.
        
    '''
    if not data:
        raise Exception('No Data')
    try:
        data = json.loads(data)
    except:
        data = ast.literal_eval(data)
    newDir = os.path.join(basedir, 'ows_tasks/', str(data_download.request.id))
    call(["mkdir", '-p', newDir])
    os.chdir(newDir)
    logger = open(os.path.join(newDir, 'task_log.txt'), 'w')
    # consolidate sources- creates list of shopping cart items
    data_by_source = {}
    for itm, value in data.items():
        value['query'] = ast.literal_eval(value['query'])
        if value['query']['source'] in data_by_source:
            data_by_source[value['query']['source']].append(value)
        else:
            data_by_source[value['query']['source']] = [value]
    stask = []
    taskname_tmpl = 'owsq.data.download.%s.save'
    for itm, value in data_by_source.items():
        logger.write(log_info_tpl % (itm, str(len(value)), 'Subtask Created'))
        stask.append(subtask(taskname_tmpl % (itm), args=(newDir, itm,), kwargs={'data_items': value}))
    job = group(stask)
    result = job.apply_async()
    logger.write(log_info_tpl1 % ('Subtask Submission', 'Subtask Running'))
    aggregate_results = result.join()
    logger.write(log_info_tpl1 % ('Data query Successful', 'Subtasks completed'))

    urls = []
    for res in aggregate_results:
        urls.extend(res)
    outname = zip_name_tpl % (datetime.datetime.now().isoformat())
    zipurl = 'http://%s/%s/%s' % (socket.gethostname(), 'request', outname)
    logger.write(log_info_tpl2 % ('Data Zip URL', zipurl, '30 days'))
    logger.close()
    if clustered:
        return filezip.makezip(urls, zip_name_tpl % (datetime.datetime.now().isoformat()),
                               os.path.join(basedir, 'request/'))
    else:
        return filezip.makezip(newDir, zip_name_tpl % (datetime.datetime.now().isoformat()),
                               os.path.join(basedir, 'request/'), local=True)
コード例 #49
0
def join_taskset(setid, callback, interval=10, max_retries=None, propagate=True):
    '''
    Task to poll if the TaskSet ``setid`` has finished.

    Pass results of the TaskSet to ``callback``.
    '''
    result = TaskSetResult.restore(setid)
    if result.ready():
        return subtask(callback).delay(result.join(propagate=propagate))
    join_taskset.retry(countdown=interval, max_retries=max_retries)
コード例 #50
0
ファイル: scrape.py プロジェクト: domoritz/validitychecker
def make_scholar_urls(number, qobj, callback=None):
    query = urllib.quote_plus(qobj.query)

    step = 80 # max 100
    urls = ['http://scholar.google.com/scholar?as_sdt=1&as_vis=1&num='+str(step)+'&start='+str(start)+'&q='+query for start in range(0, number, step)]

    if callback:
        return [subtask(callback).delay(url, qobj) for url in urls]
    else:
        return urls
コード例 #51
0
ファイル: request.py プロジェクト: doyousoft/pyvac
    def render(self):

        req_id = self.request.params.get('request_id')
        req = Request.by_id(self.session, req_id)
        if not req:
            return ''

        data = {'req_id': req.id}

        only_manager = False
        # we should handle the case where the admin is also a user manager
        if (self.user.ldap_user and (req.user.manager_dn == self.user.dn)
                and (req.status == 'PENDING')):
            only_manager = True

        if self.user.is_admin and not only_manager:
            req.update_status('APPROVED_ADMIN')
            # save who performed this action
            req.last_action_user_id = self.user.id

            task_name = 'worker_approved'
            settings = self.request.registry.settings
            with open(settings['pyvac.celery.yaml']) as fdesc:
                Conf = yaml.load(fdesc, YAMLLoader)
            data['caldav.url'] = Conf.get('caldav').get('url')
        else:
            req.update_status('ACCEPTED_MANAGER')
            # save who performed this action
            req.last_action_user_id = self.user.id
            task_name = 'worker_accepted'

        self.session.flush()

        # call celery task directly, do not wait for polling
        from celery.registry import tasks
        from celery.task import subtask
        req_task = tasks[task_name]

        subtask(req_task).apply_async(kwargs={'data': data}, countdown=5)

        log.info('scheduling task %s for %s' % (task_name, data))
        return req.status
コード例 #52
0
    def run(self, __review_list, __eatery_id, __callback):
        """
                celery -A ProcessingCeleryTask  worker -n MappingListWorker -Q MappingListQueue --concurrency=4 -P \
                        gevent  --loglevel=info --autoreload
                """
        self.start = time.time()
        callback = subtask(__callback)

        print __eatery_id
        return group(
            callback.clone([arg, __eatery_id]) for arg in __review_list)()
コード例 #53
0
ファイル: credentials.py プロジェクト: grigouze/pyvac
    def render(self):

        if 'submit' in self.request.params:
            email = self.request.params.get('email', '')
            user = User.by_email(self.session, email)
            if user:
                passhash = uuid.uuid4().hex
                date_end = datetime.now() + relativedelta(seconds=86400)
                # create hash entry in database with a TTL of 1 day
                entry = PasswordRecovery(user_id=user.id,
                                         hash=passhash,
                                         date_end=date_end)
                self.session.add(entry)
                self.session.flush()

                # call celery send mail task directly
                from celery.registry import tasks
                from celery.task import subtask
                req_task = tasks['worker_mail']

                data = {
                    'sender': '*****@*****.**',
                    'target': user.email,
                    'subject': 'Password Recovery',
                    'content': """Hello,

we send you this mail because you requested a password reset, to proceed please click the link below:
%s

Reminder, your login is: %s

""" % (route_url('change_password', self.request, passhash=passhash), user.login)
                }

                subtask(req_task).delay(data=data)

                msg = 'Mail sent to %s for password recovery.' % user.email
                self.request.session.flash('info;%s' % msg)
                return HTTPFound(location=route_url('login', self.request))

        return {}
コード例 #54
0
ファイル: credentials.py プロジェクト: doyousoft/pyvac
    def render(self):

        if "submit" in self.request.params:
            email = self.request.params.get("email", "")
            user = User.by_email(self.session, email)
            if user:
                passhash = uuid.uuid4().hex
                date_end = datetime.now() + relativedelta(seconds=86400)
                # create hash entry in database with a TTL of 1 day
                entry = PasswordRecovery(user_id=user.id, hash=passhash, date_end=date_end)
                self.session.add(entry)
                self.session.flush()

                # call celery send mail task directly
                from celery.registry import tasks
                from celery.task import subtask

                req_task = tasks["worker_mail"]

                data = {
                    "sender": "*****@*****.**",
                    "target": user.email,
                    "subject": "Password Recovery",
                    "content": """Hello,

we send you this mail because you requested a password reset, to proceed please click the link below:
%s

Reminder, your login is: %s

"""
                    % (route_url("change_password", self.request, passhash=passhash), user.login),
                }

                subtask(req_task).delay(data=data)

                msg = "Mail sent to %s for password recovery." % user.email
                self.request.session.flash("info;%s" % msg)
                return HTTPFound(location=route_url("login", self.request))

        return {}
コード例 #55
0
def swarm_route(swarm_id, correct_nodes, callback=None, swarm_trace_id=None):
    """
    Given a list of nodes for the current swarm, make sure those nodes and
    only those nodes are in the swarm's routing pool, if it has one.
    """
    # It's important that the correct_nodes list be passed to this function
    # from the uptest finisher, rather than having this function build that
    # list itself, because if it built the list itself it couldn't be sure that
    # all the nodes had been uptested.  It's possible that one could have crept
    # in throuh a non-swarm deployment, for example.
    logger.info("[%s] Swarm %s route", swarm_trace_id, swarm_id)

    swarm = Swarm.objects.get(id=swarm_id)
    if swarm.pool:
        # There's just the right number of procs.  Make sure the balancer is up
        # to date, but only if the swarm has a pool specified.

        current_nodes = set(balancer.get_nodes(swarm.balancer, swarm.pool))
        correct_nodes = set(correct_nodes)
        new_nodes = correct_nodes.difference(current_nodes)
        stale_nodes = current_nodes.difference(correct_nodes)

        if new_nodes:
            balancer.add_nodes(swarm.balancer, swarm.pool, list(new_nodes))

        if stale_nodes:
            balancer.delete_nodes(swarm.balancer, swarm.pool,
                                  list(stale_nodes))

        # Clean up pool in balancer
        balancer.delete_pool_if_empty(swarm.balancer, swarm.pool)

        msg = ('Routed swarm {}.  '
               'Nodes: current={} correct={} new={} stale={}'.format(
                   swarm, list(current_nodes), list(correct_nodes),
                   list(new_nodes), list(stale_nodes)))
        send_event(str(swarm), msg, tags=['route'], swarm_id=swarm_trace_id)

    if callback is not None:
        subtask(callback).delay()
コード例 #56
0
def make_scholar_urls(number, qobj, callback=None):
    query = urllib.quote_plus(qobj.query)

    step = 80  # max 100
    urls = [
        'http://scholar.google.com/scholar?as_sdt=1&as_vis=1&num=' +
        str(step) + '&start=' + str(start) + '&q=' + query
        for start in range(0, number, step)
    ]

    if callback:
        return [subtask(callback).delay(url, qobj) for url in urls]
    else:
        return urls
コード例 #57
0
def parse_wok_page(page, qobj, callback=None):
    logger = parse_wok_page.get_logger()

    parser = etree.HTMLParser()
    tree = etree.parse(StringIO(page), parser)

    elements = tree.xpath('//td[@class="summary_data"]')

    # to be returned
    records = []
    for element in elements:
        record = {}
        record['title'] = perform(element.xpath('a/value//text()'), a_join,
                                  unicode)
        record['source'] = perform(
            element.xpath(
                'span[contains(text(),"Source")]/following-sibling::text()')
            [0], unicode, unicode.strip)
        record['authors'] = perform(
            element.xpath(
                'span[contains(text(),"Author")]/following-sibling::text()')
            [0], unicode, a_split_semicolon, m_trim)
        record['publish_date'] = perform(
            element.xpath(
                'span[contains(text(),"Published")]/following::text()')[1],
            lambda x: a_find(x, r'(\d{4})'), a_int, a_date)
        record['times_cited'] = perform(
            element.xpath(
                'span[contains(text(),"Times Cited")]/following::text()')[1],
            a_trim, lambda s: s.replace(',', ''), a_int)

        # remove et al
        record['authors'] = filter(
            lambda author: not author.startswith('et al'), record['authors'])

        # convert name from Doe, J to J Doe
        record['authors'] = map(
            lambda author: ' '.join(
                reversed(map(unicode.strip, author.split(' ')))),
            record['authors'])

        records.append(record)

    logger.warning("Got %d results for the query '%s' from isi/wok" %
                   (len(records), qobj.query))

    if callback:
        return subtask(callback).delay(records=records, qobj=qobj)
    else:
        return records
コード例 #58
0
ファイル: poller.py プロジェクト: bobquest33/pyvac
    def run(self, *args, **kwargs):
        self.log = log
        # init database connection
        session = DBSession()

        statuses = [
            'PENDING', 'ACCEPTED_MANAGER', 'DENIED', 'APPROVED_ADMIN',
            'CANCELED', 'ERROR'
        ]
        for status in statuses:
            requests = Request.by_status(session, status)
            self.log.info('number of requests for %s: %d' %
                          (status, len(requests)))

        req_accepted_notified = Request.by_status(session,
                                                  'ACCEPTED_MANAGER',
                                                  notified=True)
        self.log.info('number of ACCEPTED_NOTIFIED requests: %d' %
                      len(req_accepted_notified))

        # req_pending_notified = Request.by_status(session, 'PENDING',
        #                                          notified=True)
        # self.log.info('number of PENDING_NOTIFIED requests: %d' %
        #               len(req_pending_notified))

        req_list = []
        req_list.extend(req_accepted_notified)
        # req_list.extend(req_pending_notified)

        for req in req_list:
            self.log.info('selecting task for req type %r' % req.status)

            check_status = req.status
            if req.status == 'ACCEPTED_MANAGER' and req.notified:
                check_status = 'ACCEPTED_NOTIFIED'
            # if req.status == 'PENDING' and req.notified:
            #     check_status = 'PENDING_NOTIFIED'

            req_task = self.worker_tasks[check_status]
            self.log.info('task selected %r' % req_task.name)

            data = {
                'req_id': req.id,
            }

            async_result = subtask(req_task).delay(data=data)
            self.log.info('task scheduled %r' % async_result)

        return True
コード例 #59
0
def fetch_page_from_url(url, qobj, callback=None):

    logger = fetch_page_from_url.get_logger()
    logger.info("URL: %s" % url)

    headers = {'User-Agent': 'Mozilla/5.0'}
    req = urllib2.Request(url=url, headers=headers)
    response = urllib2.urlopen(req)
    page = response.read()

    if callback:
        # The callback may have been serialized with JSON,
        # so best practice is to convert the subtask dict back
        # into a subtask object.
        return subtask(callback).delay(url, page, qobj)
    else:
        return url, page