Example #1
0
def main():
    client = gearman.GearmanClient(['localhost:4730', 'otherhost:4730'])
    try:
        completed_job_request = client.submit_job("ToUpper",
                                                  "arbitrary binary data")
        check_request_status(completed_job_request)
    except Exception as e:
        print type(e)

    try:
        completed_job_request = client.submit_job("ToUpperTimeOut5",
                                                  "arbitrary binary data")
        check_request_status(completed_job_request)
    except Exception as e:
        print type(e)

    try:
        completed_job_request = client.submit_job("ToUpperTimeOut20",
                                                  "arbitrary binary data")
        check_request_status(completed_job_request)
    except Exception as e:
        print type(e)

    try:
        completed_job_request = client.submit_job("SysInfo", "")
        check_request_status(completed_job_request)
    except Exception as e:
        print type(e)

    try:
        completed_job_request = client.submit_job("MemInfo", "")
        check_request_status(completed_job_request)
    except Exception as e:
        print type(e)
Example #2
0
def _perform_export(user, do_delete):
    hash_object = hashlib.new('sha256')
    hash_object.update(user.username + user.password)
    file_hash = hash_object.hexdigest()

    emails = list(user.confirmed_emails.values_list('email', flat=True))
    openids = list(user.confirmed_openids.values_list('openid', flat=True))

    photos = []
    for photo in user.photos.all():
        photo_details = (photo.filename, photo.format)
        photos.append(photo_details)

    gm_client = gearman.GearmanClient(settings.GEARMAN_SERVERS)
    workload = {
        'do_delete': do_delete,
        'file_hash': file_hash,
        'username': user.username,
        'emails': emails,
        'openids': openids,
        'photos': photos
    }
    gm_client.submit_job('exportaccount',
                         json.dumps(workload),
                         background=True,
                         wait_until_complete=False)

    download_url = settings.EXPORT_FILES_URL + file_hash + '.xml.gz'
    return download_url
Example #3
0
 def performTask(self):
     from archivematicaMCP import limitGearmanConnectionsSemaphore
     limitGearmanConnectionsSemaphore.acquire()
     gm_client = gearman.GearmanClient([
         archivematicaMCP.config.get('MCPServer', "MCPArchivematicaServer")
     ])
     data = {"createdDate": timezone.now().isoformat(' ')}
     data["arguments"] = self.arguments
     LOGGER.info('Executing %s %s', self.execute, data)
     completed_job_request = None
     failMaxSleep = 60
     failSleepInitial = 1
     failSleep = failSleepInitial
     failSleepIncrementor = 2
     while completed_job_request == None:
         try:
             completed_job_request = gm_client.submit_job(
                 self.execute.lower(), cPickle.dumps(data), self.UUID)
         except gearman.errors.ServerUnavailable:
             completed_job_request = None
             time.sleep(failSleep)
             if failSleep == failSleepInitial:
                 LOGGER.exception('Error submitting job. Retrying.')
             if failSleep < failMaxSleep:
                 failSleep += failSleepIncrementor
     limitGearmanConnectionsSemaphore.release()
     self.check_request_status(completed_job_request)
     gm_client.shutdown()
     LOGGER.debug('Finished performing task %s', self.UUID)
Example #4
0
def disable_active_user(modeladmin, request, queryset):
    if request.POST.get('confirmation', False):
        gm_client = gearman.GearmanClient(settings.GEARMAN_JOB_SERVERS)
        for user in queryset:
            gm_client.submit_job("delete_user",
                                 json.dumps({
                                     'user_id':
                                     user.id,
                                     'action':
                                     DELETE_USER_DELETE_SOUNDS_ACTION_NAME
                                 }),
                                 wait_until_complete=False,
                                 background=True)
        messages.add_message(
            request, messages.INFO,
            '%d users will be soft deleted asynchronously, related sound are '
            'going to be deleted as well' % (queryset.count()))
        return HttpResponseRedirect(reverse('admin:auth_user_changelist'))

    params = [(k, v) for k in request.POST.keys()
              for v in request.POST.getlist(k)]
    tvars = {'anonymised': [], 'params': params}
    for obj in queryset:
        info = obj.profile.get_info_before_delete_user(remove_sounds=True)
        model_count = {
            model._meta.verbose_name_plural: len(objs)
            for model, objs in info['deleted'].model_objs.items()
        }
        anon = {'anonymised': []}
        anon['model_count'] = dict(model_count).items()
        anon['logic_deleted'] = info['logic_deleted']
        anon['name'] = info['anonymised']
        tvars['anonymised'].append(anon)

    return render(request, 'accounts/delete_confirmation.html', tvars)
Example #5
0
 def performTask(self):
     limitGearmanConnectionsSemaphore.acquire()
     gm_client = gearman.GearmanClient([django_settings.GEARMAN_SERVER])
     data = {"createdDate": timezone.now().isoformat(' ')}
     data["arguments"] = self.arguments
     data[
         "alwaysCapture"] = self.alwaysCapture  # tells worker to always capture stdout
     LOGGER.info('Executing %s %s', self.execute, data)
     completed_job_request = None
     failMaxSleep = 60
     failSleepInitial = 1
     failSleep = failSleepInitial
     failSleepIncrementor = 2
     while completed_job_request is None:
         try:
             completed_job_request = gm_client.submit_job(
                 self.execute.lower(), cPickle.dumps(data), self.UUID)
         except gearman.errors.ServerUnavailable:
             completed_job_request = None
             time.sleep(failSleep)
             if failSleep == failSleepInitial:
                 LOGGER.exception('Error submitting job. Retrying.')
             if failSleep < failMaxSleep:
                 failSleep += failSleepIncrementor
     limitGearmanConnectionsSemaphore.release()
     self.check_request_status(completed_job_request)
     gm_client.shutdown()
     LOGGER.debug('Finished performing task %s', self.UUID)
Example #6
0
    def test_3(self):
        '''Submit to job queue with gevent.'''

        monkey.patch_all()
        gm_client = gearman.GearmanClient([self.gm_server])
        for _ in range(self.jobs):
            gm_client.submit_job("with_gevent", self.data, background=True)
Example #7
0
 def performTask(self):
     from archivematicaMCP import limitGearmanConnectionsSemaphore
     limitGearmanConnectionsSemaphore.acquire()
     gm_client = gearman.GearmanClient([archivematicaMCP.config.get('MCPServer', "MCPArchivematicaServer")])
     data = {"createdDate" : datetime.datetime.now().__str__()}
     data["arguments"] = self.arguments
     print '"'+self.execute+'"', data
     completed_job_request = None
     failMaxSleep = 60
     failSleepInitial = 1
     failSleep = failSleepInitial
     failSleepIncrementor = 2
     while completed_job_request == None:
         try:
             completed_job_request = gm_client.submit_job(self.execute.lower(), cPickle.dumps(data), self.UUID)
         #raise ServerUnavailable('Found no valid connections: %r' % self.connection_list)
         #ServerUnavailable: Found no valid connections: [<GearmanConnection localhost:4730 connected=False>]
         except gearman.errors.ServerUnavailable as inst:
             completed_job_request = None
             time.sleep(failSleep)
             if failSleep == failSleepInitial:
                 print >>sys.stderr, inst.args
                 print >>sys.stderr, "Retrying issueing gearman command."
             if failSleep < failMaxSleep:
                 failSleep += failSleepIncrementor
     limitGearmanConnectionsSemaphore.release()
     self.check_request_status(completed_job_request)
     gm_client.shutdown()
     print "DEBUG: FINISHED PERFORMING TASK: ", self.UUID
Example #8
0
    def crop(self, dimensions=None, links_to_create=None):
        if path.isfile(settings.USER_FILES_ROOT + self.full_filename()):
            return  # already done, skip

        if not path.isfile(settings.UPLOADED_FILES_ROOT +
                           self.full_filename()):
            return  # source image doesn't exist, can't crop it

        if not links_to_create:
            links_to_create = []

        x = y = w = h = 0
        if dimensions:
            x = dimensions['x']
            y = dimensions['y']
            w = dimensions['w']
            h = dimensions['h']

        # Queue a job for the cropping/resizing gearman worker
        gm_client = gearman.GearmanClient(settings.GEARMAN_SERVERS)
        workload = {
            'file_hash': self.filename,
            'format': self.format,
            'x': x,
            'y': y,
            'w': w,
            'h': h,
            'links': links_to_create
        }
        gm_client.submit_job('cropresize',
                             json.dumps(workload),
                             background=True,
                             wait_until_complete=False)
Example #9
0
 def list(self):
     gm_client = gearman.GearmanClient([self.server])
     completed_job_request = gm_client.submit_job("getJobsAwaitingApproval", "", None)
     if completed_job_request.state == gearman.JOB_COMPLETE:
         return cPickle.loads(completed_job_request.result)
     elif completed_job_request.state == gearman.JOB_FAILED:
         raise RPCError("getJobsAwaitingApproval failed (check MCPServer logs)")
Example #10
0
    def __init__(self,
                 core,
                 server='localhost:9092',
                 key='secret',
                 jobs=None,
                 repo_format="{0}",
                 default_repo=""):
        super(BuildPlugin, self).__init__(core)

        self.server = server
        self.key = key
        self.job_timeout = time()
        self.repo_format = repo_format
        self.default_repo = default_repo

        self.jobs = {}
        if not jobs:
            return
        for el in jobs:
            name = el.get('name', None)
            short = el.get('short', name)
            if not name:
                raise ConfigurationError('job must have name')
            self.jobs[short] = (name, el.text)

        self.gm_client = gearman.GearmanClient([self.server])
        self.running_jobs = []
        self.upload_job = None
    def on_job_complete(self, current_job, job_result):
        resultObj = json.loads(job_result)
        
        if resultObj['files']['new'] or resultObj['files']['updated']:

            jobData = {'cruiseID':'', 'collectionSystemTransferID':'', 'files':{}}
            jobData['cruiseID'] = self.cruiseID
            jobData['collectionSystemTransferID'] = self.collectionSystemTransfer['collectionSystemTransferID']

            destDir = build_destDir(self).rstrip('/')
            jobData['files'] = resultObj['files']
            jobData['files']['new'] = [destDir + '/' + filename for filename in jobData['files']['new']]
            jobData['files']['updated'] = [destDir + '/' + filename for filename in jobData['files']['updated']]
                
            gm_client = gearman.GearmanClient([self.OVDM.getGearmanServer()])
            
            for task in self.OVDM.getTasksForHook('runCollectionSystemTransfer'):
                #print task
                submitted_job_request = gm_client.submit_job(task, json.dumps(jobData), background=True)
        
        # If the last part of the results failed
        if len(resultObj['parts']) > 0:
            if resultObj['parts'][-1]['result'] == "Fail": # Final Verdict
                #print "...but there was an error:"
                #print json.dumps(resultObj['parts'])
                self.OVDM.setError_collectionSystemTransfer(self.collectionSystemTransfer['collectionSystemTransferID'])
            else:
                self.OVDM.setIdle_collectionSystemTransfer(self.collectionSystemTransfer['collectionSystemTransferID'])
        else:
            self.OVDM.setIdle_collectionSystemTransfer(self.collectionSystemTransfer['collectionSystemTransferID'])

        print "Job: " + current_job.handle + ", " + self.collectionSystemTransfer['name'] + " transfer completed at: " + time.strftime("%D %T", time.gmtime())
            
        return super(OVDMGearmanWorker, self).send_job_complete(current_job, job_result)
Example #12
0
    def _rpc_sync_call(self,
                       ability,
                       data=None,
                       timeout=INFLIGHT_POLL_TIMEOUT):
        """Invoke remote method synchronously and with a deadline.

        When successful, it returns the payload of the response. Otherwise, it
        raises an exception. ``TimeoutError`` when the deadline was exceeded,
        ``RPCError`` when the worker failed abruptly, ``RPCServerError`` when
        the worker returned an error.
        """
        if data is None:
            data = b""
        elif "user_id" not in data:
            data["user_id"] = self.user.id
        client = gearman.GearmanClient([self.server])
        response = client.submit_job(
            ability,
            six.moves.cPickle.dumps(data, protocol=0),
            background=False,
            wait_until_complete=True,
            poll_timeout=timeout,
        )
        client.shutdown()
        if response.state == gearman.JOB_CREATED:
            raise TimeoutError(timeout)
        elif response.state != gearman.JOB_COMPLETE:
            raise RPCError("{} failed (check the logs)".format(ability))
        payload = six.moves.cPickle.loads(response.result)
        if isinstance(payload, dict) and payload.get("error", False):
            raise RPCServerError(payload)
        return payload
Example #13
0
 def delete_include_sounds(self, request, obj):
     username = obj.username
     if request.method == "POST":
         gm_client = gearman.GearmanClient(settings.GEARMAN_JOB_SERVERS)
         gm_client.submit_job("delete_user",
                              json.dumps({
                                  'user_id':
                                  obj.id,
                                  'action':
                                  DELETE_USER_DELETE_SOUNDS_ACTION_NAME
                              }),
                              wait_until_complete=False,
                              background=True)
         messages.add_message(
             request, messages.INFO, 'User \'%s\' will be soft deleted'
             ' asynchronously. Sounds and other related'
             ' content will be deleted.' % username)
         return HttpResponseRedirect(reverse('admin:auth_user_changelist'))
     info = obj.profile.get_info_before_delete_user(remove_sounds=True)
     model_count = {
         model._meta.verbose_name_plural: len(objs)
         for model, objs in info['deleted'].model_objs.items()
     }
     tvars = {'anonymised': []}
     anon = {}
     anon['model_count'] = dict(model_count).items()
     anon['logic_deleted'] = info['logic_deleted']
     anon['name'] = info['anonymised']
     tvars['anonymised'].append(anon)
     return render(request, 'accounts/delete_confirmation.html', tvars)
Example #14
0
def client(detect_type, filePs, seg_param, segpx=0, segopt=-1, od_conf=0):
    """
    :param detect_type: 1: detect, 2: seg 3: detect + seg
    :param filePs:
    :param segpx: 0: 不返回 seg 坐标, 1: 返回坐标
    :param segopt: -1 不返回覆盖比, 0: 覆盖部分覆盖比 1: 裸露部分覆盖比
    :param od_conf: 0 检测阈值, 0 时用默认阈值, 百分比整数
    :return:
    """
    print('seg_param -- {}'.format(seg_param))
    gm_client = gearman.GearmanClient(['127.0.0.1:4730'])
    if detect_type == 1:
        print('type {}'.format(detect_type))
        jobs = [
            dict(task='smart_site_det_car', data=json.dumps(obj=dict(path=filePs, od_conf=od_conf))),
            dict(task='smart_site_det_fog', data=json.dumps(obj=dict(path=filePs, od_conf=od_conf)))
        ]
        completed_job_request = gm_client.submit_multiple_jobs(jobs, poll_timeout=60)
    elif detect_type == 2:
        print('type {}'.format(detect_type))
        completed_job_request = gm_client.submit_job("smart_site_seg", json.dumps(obj=dict(path=filePs,
                                                                                           seg_param=seg_param,
                                                                                           )))
    elif detect_type == 3:
        print('type {}'.format(detect_type))
        jobs = [
            dict(task='smart_site_det_car', data=json.dumps(obj=dict(path=filePs, od_conf=od_conf))),
            dict(task='smart_site_det_fog', data=json.dumps(obj=dict(path=filePs, od_conf=od_conf))),
            dict(task='smart_site_seg', data=json.dumps(obj=dict(path=filePs,
                                                        seg_param=seg_param,
                                                  )))
        ]
        completed_job_request = gm_client.submit_multiple_jobs(jobs, poll_timeout=60)
    return check_request_status(completed_job_request, detect_type, filePs=filePs, seg_param=seg_param, segpx=segpx,
                                segopt=segopt)
Example #15
0
def getSensorData():
    gm_client = gearman.GearmanClient(gearmanServer)
    completed_job_request = gm_client.submit_job("iot.worker", 'temp')
    io = StringIO(completed_job_request.result)
    out = json.load(io)
    print (out)
    return out
    def handle(self, *args, **options):
        try:
            job_data = ''

            if len(args) == 0:
                raise CommandError('At least task name must be provided.')

            task_name = '{0}@{1}'.format(
                args[0], get_namespace()) if get_namespace() else args[0]
            if len(args) > 1:
                job_data = args[1]

            self.stdout.write(
                'Submitting job: {0:s}, job data: {1:s}.\n'.format(
                    task_name, job_data if job_data else '(empty)'))

            client = gearman.GearmanClient(
                django_gearman_commands.settings.GEARMAN_SERVERS)
            is_foreground = options.get('foreground', True)
            result = client.submit_job(
                task_name,
                job_data,
                wait_until_complete=True if is_foreground else False,
                background=False if is_foreground else True)

            self.stdout.write(
                'Job submission done, result: {0:s}.\n'.format(result))
        except:
            log.exception('Error when submitting gearman job')
            raise
Example #17
0
    def test_get_users(self):
        gm_client = gearman.GearmanClient(['localhost:4730'])
        agg = Aggregator(gm_client, None)

        user_data = agg.get_users()
        for i in range(len(user_data)):
            del user_data[i]['_id']
            if 'words' in user_data[i]:
                del user_data[i]['words']

        expected_response = [
            {
                u'username': u'iandioch',
                u'subscribed_feeds': [
                    u'https://news.ycombinator.com/rss'
                    ],
            },
            {
                u'username': u'sully',
                u'subscribed_feeds': [
                    u'https://news.ycombinator.com/rss',
                    u'http://spritesmods.com/rss.php',
                    u'http://dave.cheney.net/feed'
                    ],
            },
            {
                u'username': u'theotherguys',
                u'subscribed_feeds': [
                    u'https://news.ycombinator.com/rss',
                    u'http://spritesmods.com/rss.php'
                    ],
            }]

        self.assertEqual(user_data, expected_response)
Example #18
0
    def test_put_g2g(self):
        gm_client = gearman.GearmanClient(['localhost:4730'])
        agg = Aggregator(gm_client, None)

#        add_request = bson.BSON.encode({
#            'database':'feedlark',
#            'collecion':'g2g',
#            'data':{
#                'username':'******',
#                'test_parameter':'NOLO'
#                }
#            })
#        gm_client.submit_job('db-add',str(add_request))

        test_document = {
            'username': '******',
            'test_parameter': 'YOLO',
            }
        agg.put_g2g('iandioch', test_document)

        get_request = bson.BSON.encode({
            'database': 'feedlark',
            'collection': 'g2g',
            'query': {
                'username': '******',
                },
            'projection': {
                'test_parameter': 1,
                },
            })
        g2g_data = gm_client.submit_job('db-get', str(get_request)).result
        self.assertEqual(
            bson.BSON(g2g_data).decode()['docs'][0]['test_parameter'], 'YOLO')
Example #19
0
    def full_delete(self, request, obj):
        username = obj.username
        if request.method == "POST":
            gm_client = gearman.GearmanClient(settings.GEARMAN_JOB_SERVERS)
            gm_client.submit_job("delete_user",
                                 json.dumps({
                                     'user_id': obj.id,
                                     'action': "full_delete_user"
                                 }),
                                 wait_until_complete=False,
                                 background=True)
            messages.add_message(
                request, messages.INFO, 'User \'%s\' will be fully deleted '
                'asynchronously from the database' % (username))
            return HttpResponseRedirect(reverse('admin:auth_user_changelist'))

        info = obj.profile.get_info_before_delete_user(remove_sounds=False,
                                                       remove_user=True)
        model_count = {
            model._meta.verbose_name_plural: len(objs)
            for model, objs in info['deleted'].model_objs.items()
        }
        tvars = {'anonymised': []}
        anon = {}
        anon['model_count'] = dict(model_count).items()
        anon['name'] = info['anonymised']
        anon['deleted'] = True
        tvars['anonymised'].append(anon)
        return render(request, 'accounts/delete_confirmation.html', tvars)
Example #20
0
 def notifications(self):
     gm_client = gearman.GearmanClient([self.server])
     completed_job_request = gm_client.submit_job("getNotifications", "",
                                                  None)
     #self.check_request_status(completed_job_request)
     gm_client.shutdown()
     return cPickle.loads(completed_job_request.result)
Example #21
0
def disable_active_user_preserve_sounds(modeladmin, request, queryset):
    if request.POST.get('confirmation', False):
        gm_client = gearman.GearmanClient(settings.GEARMAN_JOB_SERVERS)
        for user in queryset:
            gm_client.submit_job("delete_user",
                                 json.dumps({
                                     'user_id':
                                     user.id,
                                     'action':
                                     DELETE_USER_KEEP_SOUNDS_ACTION_NAME
                                 }),
                                 wait_until_complete=False,
                                 background=True)
        messages.add_message(
            request, messages.INFO,
            '%d users will be soft deleted asynchronously' %
            (queryset.count()))
        return HttpResponseRedirect(reverse('admin:auth_user_changelist'))

    params = [(k, v) for k in request.POST.keys()
              for v in request.POST.getlist(k)]
    tvars = {'anonymised': [], 'params': params}
    for obj in queryset:
        info = obj.profile.get_info_before_delete_user(remove_sounds=False)
        tvars['anonymised'].append({'name': info['anonymised']})
    return render(request, 'accounts/delete_confirmation.html', tvars)
Example #22
0
 def __init__(self):
     self.updater = Updater(token=AUTH_TOKEN)
     self.dispatcher = self.updater.dispatcher
     self.gm_client = gearman.GearmanClient(
         [config['gearmanip'] + ':' + str(config['gearmanport'])])
     logging.basicConfig(
         format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
         level=logging.INFO)
Example #23
0
 def notifications(self):
     gm_client = gearman.GearmanClient([self.server])
     completed_job_request = gm_client.submit_job("getNotifications", "", None)
     gm_client.shutdown()
     if completed_job_request.state == gearman.JOB_COMPLETE:
         return cPickle.loads(completed_job_request.result)
     elif completed_job_request.state == gearman.JOB_FAILED:
         raise RPCError("getNotifications failed (check MCPServer logs)")
Example #24
0
 def execute(self, uuid, choice):
     gm_client = gearman.GearmanClient([self.server])
     data = {}
     data["jobUUID"] = uuid
     data["chain"] = choice
     data["agent"] = "338daf08-b1f8-4a3b-bcc7-6b005d976438"
     completed_job_request = gm_client.submit_job("approveJob", cPickle.dumps(data), None)
     #self.check_request_status(completed_job_request)
     return
Example #25
0
def connect2gearman():
    try:
        global gearman_client
        gearman_client = gearman.GearmanClient(['127.0.0.1'])
    except Exception, e:
        print "[ERROR GEARMAN]: %s" % (time.strftime(
            '%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
        print "%s\n" % (str(e))
        sys.exit(1)
Example #26
0
def do_onethread(cmdlines):
    #     print cmdlines
    gm_client = gearman.GearmanClient([GearmanSrvIP])
    cnt = 0
    runcnt = 0
    global Jobs
    for line in cmdlines:
        while len(Jobs) > 8:
            break
            # not use now
            for job in Jobs:
                if gm_client.get_job_status(job).state != 'COMPLETE':
                    time.sleep(0.1)
                    continue
                print 'find one completed.'
                check_request_status(job)
                Jobs.remove(job)
                break
            time.sleep(0.1)

        #if cnt > 50:
        #    break
        #print 'No.',runcnt
        gearman_run_cmdline(gm_client, line)
        cnt += 1
        runcnt += 1
        #print 'Jobs', len(Jobs)
    print 'total', cnt
    print 'wait for all completed...'

    gm_client.wait_until_jobs_accepted(Jobs)
    #gm_client.wait_until_jobs_completed(Jobs, poll_timeout= 5.0)
    cnt_completed = 0
    while (True):
        #statuses = gm_client.get_job_statuses(Jobs)
        #print statuses
        #print len(Jobs)
        gm_client.wait_until_jobs_completed(Jobs, poll_timeout=1)
        cnt_completed = 0
        for job in Jobs:
            #wait_jobs = []
            #wait_jobs.append(job)
            #gm_client.wait_until_jobs_completed(wait_jobs)
            if job.complete:
                cnt_completed += 1
        print cnt_completed, '/', cnt, 'done!\r',
        if (cnt_completed >= cnt):
            break
        time.sleep(30)

    gm_client.wait_until_jobs_completed(Jobs)
    for job in Jobs:
        check_request_status(job)
    #    print gm_client.get_job_status(job)

    print 'done.'
Example #27
0
 def handle(self, *args, **options):
     gm_client = gearman.GearmanClient(settings.GEARMAN_JOB_SERVERS)
     for sound in Sound.objects.filter(analysis_state='OK',
                                       moderation_state='OK'):
         # we avoid saving the sound as currently this triggers crc calculation
         # also with wait_until_complete=True we avoid processing all sounds at once in gm client machine
         gm_client.submit_job("analyze_sound",
                              str(sound.id),
                              wait_until_complete=True,
                              background=True)
Example #28
0
 def execute(self, uuid, choice, uid=None):
     gm_client = gearman.GearmanClient([self.server])
     data = {}
     data["jobUUID"] = uuid
     data["chain"] = choice
     if uid is not None:
         data["uid"] = uid
     gm_client.submit_job("approveJob", cPickle.dumps(data), None)
     gm_client.shutdown()
     return
Example #29
0
 def execute(self, uuid, choice):
     gm_client = gearman.GearmanClient([self.server])
     data = {}
     data["jobUUID"] = uuid
     data["chain"] = choice
     data["uid"] = "1"
     completed_job_request = gm_client.submit_job("approveJob",
                                                  cPickle.dumps(data), None)
     #self.check_request_status(completed_job_request)
     return
Example #30
0
 def process(self, force=False):
     gm_client = gearman.GearmanClient(settings.GEARMAN_JOB_SERVERS)
     if force or self.processing_state != "OK":
         self.set_processing_ongoing_state("QU")
         gm_client.submit_job("process_sound", str(self.id), wait_until_complete=False, background=True)
         audio_logger.info("Send sound with id %s to queue 'process'" % self.id)
     if force or self.analysis_state != "OK":
         self.set_analysis_state("QU")
         gm_client.submit_job("analyze_sound", str(self.id), wait_until_complete=False, background=True)
         audio_logger.info("Send sound with id %s to queue 'analyze'" % self.id)