示例#1
0
文件: tasks.py 项目: calaniz/TASTE
 def __init__(self, test_run, browser, platform):
     self.test_run = test_run
     self.vm_name = "cobs_thread%s_%s" % (current_process().index, test_run)
     self.vm_host = socket.gethostname()
     self.browser = browser.lower()
     self.platform = platform
     self.index = current_process().index + 1
     self.rdp_port = str(9001 + current_process().index)
示例#2
0
def configure_workers(sender, signal):
    # print "worker init" + str(os.getpid())
    # Make classifier.
    # model_def = "/home/mythxcq/caffe_person_classification_models/google_net/deploy_112.prototxt"
    # pretrained_model = "/home/mythxcq/caffe_person_classification_models/google_net/finetune_person_googlenet_112.caffemodel"
    # caffe.set_mode_gpu()
    # caffe.set_device(current_process().index)
    global classifier
    mean = np.array([104,117,123])
    classifier = caffe.Classifier(model_def, pretrained_model,
            mean=mean, input_scale=None, raw_scale=255.0)
    classifier.index = current_process().index
    print current_process().index
    print classifier
示例#3
0
def get_redis_for_stream():
    global REDIS_CONNECTION_STREAM
    if REDIS_CONNECTION_STREAM is None:
        REDIS_CONNECTION_STREAM = redis.StrictRedis.from_url(config.STREAM_URL)
        p = current_process()
        REDIS_CONNECTION_STREAM.client_setname("stream:%s" % p.name)
    return REDIS_CONNECTION_STREAM
示例#4
0
def detect_image(net, im):
    """Detect object classes in an image using pre-computed object proposals."""

    # Detect all object classes and regress object bounds
    timer = Timer()
    timer.tic()
    scores, boxes = im_detect(net, im)
    timer.toc()
    print(str(current_process().index)+' Detection took {:.3f}s for '
           '{:d} object proposals').format(timer.total_time, boxes.shape[0])

    # Visualize detections for each class
    # CONF_THRESH = 0.0

    CONF_THRESH = 0.4
    NMS_THRESH = 0.1
    person_idx = CLASSES.index('person')
    person_boxes = boxes[:, 4*person_idx:4*(person_idx + 1)]
    person_scores = scores[:, person_idx]
    person_dets = np.hstack((person_boxes,
                      person_scores[:, np.newaxis])).astype(np.float32)
    person_keep = nms(person_dets, NMS_THRESH)
    person_dets = person_dets[person_keep, :]
    # inds = np.where(dets[:, -1] >= CONF_THRESH)[0]
    person_dets = person_dets[np.where(person_dets[:, -1] >= CONF_THRESH)]
    return person_dets
示例#5
0
    def test_worker_int_handler_only_stop_MainProcess(self):
        try:
            import _multiprocessing  # noqa
        except ImportError:
            raise SkipTest("only relevant for multiprocessing")
        process = current_process()
        name, process.name = process.name, "OtherProcess"
        with patch("celery.apps.worker.active_thread_count") as c:
            c.return_value = 3
            try:
                worker = self._Worker()
                handlers = self.psig(cd.install_worker_int_handler, worker)
                handlers["SIGINT"]("SIGINT", object())
                self.assertTrue(state.should_stop)
            finally:
                process.name = name
                state.should_stop = False

        with patch("celery.apps.worker.active_thread_count") as c:
            c.return_value = 1
            try:
                worker = self._Worker()
                handlers = self.psig(cd.install_worker_int_handler, worker)
                with self.assertRaises(SystemExit):
                    handlers["SIGINT"]("SIGINT", object())
            finally:
                process.name = name
                state.should_stop = False
示例#6
0
 def test_worker_term_hard_handler_only_stop_MainProcess(self):
     try:
         import _multiprocessing  # noqa
     except ImportError:
         raise SkipTest('only relevant for multiprocessing')
     process = current_process()
     name, process.name = process.name, 'OtherProcess'
     try:
         with patch('celery.apps.worker.active_thread_count') as c:
             c.return_value = 3
             worker = self._Worker()
             handlers = self.psig(
                     cd.install_worker_term_hard_handler, worker)
             try:
                 handlers['SIGQUIT']('SIGQUIT', object())
                 self.assertTrue(state.should_terminate)
             finally:
                 state.should_terminate = False
         with patch('celery.apps.worker.active_thread_count') as c:
             c.return_value = 1
             worker = self._Worker()
             handlers = self.psig(
                     cd.install_worker_term_hard_handler, worker)
             with self.assertRaises(SystemTerminate):
                 handlers['SIGQUIT']('SIGQUIT', object())
     finally:
         process.name = name
示例#7
0
 def test_worker_term_hard_handler_only_stop_MainProcess(self):
     try:
         import _multiprocessing  # noqa
     except ImportError:
         raise SkipTest('only relevant for multiprocessing')
     process = current_process()
     name, process.name = process.name, 'OtherProcess'
     try:
         with patch('celery.apps.worker.active_thread_count') as c:
             c.return_value = 3
             worker = self._Worker()
             handlers = self.psig(
                 cd.install_worker_term_hard_handler, worker)
             try:
                 handlers['SIGQUIT']('SIGQUIT', object())
                 self.assertTrue(state.should_terminate)
             finally:
                 state.should_terminate = False
         with patch('celery.apps.worker.active_thread_count') as c:
             c.return_value = 1
             worker = self._Worker()
             handlers = self.psig(
                 cd.install_worker_term_hard_handler, worker)
             with self.assertRaises(WorkerTerminate):
                 handlers['SIGQUIT']('SIGQUIT', object())
     finally:
         process.name = name
示例#8
0
    def format(self, record):
        levelname = record.levelname
        color = self.colors.get(levelname)

        if self.use_color and color:
            msg = record.msg
            try:
                # safe_str will repr the color object
                # and color will break on non-string objects
                # so need to reorder calls based on type.
                # Issue #427
                if isinstance(msg, string_t):
                    record.msg = str_t(color(safe_str(msg)))
                else:
                    record.msg = safe_str(color(msg))
            except Exception as exc:
                record.msg = '<Unrepresentable {0!r}: {1!r}>'.format(
                    type(msg), exc)
                record.exc_info = True

        if not PY3 and 'processName' not in record.__dict__:
            # Very ugly, but have to make sure processName is supported
            # by foreign logger instances.
            # (processName is always supported by Python 2.7)
            process_name = current_process and current_process()._name or ''
            record.__dict__['processName'] = process_name
        return safe_str(logging.Formatter.format(self, record))
示例#9
0
def module_load_init(**__):
    global analyzer
    worker_index = current_process().index

    print("====================")
    print(" Worker Id: {0}".format(worker_index))
    print("====================")
示例#10
0
    def test_worker_int_handler_only_stop_MainProcess(self):
        try:
            import _multiprocessing  # noqa
        except ImportError:
            raise SkipTest('only relevant for multiprocessing')
        process = current_process()
        name, process.name = process.name, 'OtherProcess'
        with patch('celery.apps.worker.active_thread_count') as c:
            c.return_value = 3
            try:
                worker = self._Worker()
                handlers = self.psig(cd.install_worker_int_handler, worker)
                handlers['SIGINT']('SIGINT', object())
                self.assertTrue(state.should_stop)
            finally:
                process.name = name
                state.should_stop = None

        with patch('celery.apps.worker.active_thread_count') as c:
            c.return_value = 1
            try:
                worker = self._Worker()
                handlers = self.psig(cd.install_worker_int_handler, worker)
                with self.assertRaises(WorkerShutdown):
                    handlers['SIGINT']('SIGINT', object())
            finally:
                process.name = name
                state.should_stop = None
示例#11
0
def get_redis_for_http_cache():
    global REDIS_CONNECTION_HTTP_CACHE
    if REDIS_CONNECTION_HTTP_CACHE is None:
        REDIS_CONNECTION_HTTP_CACHE = redis.StrictRedis.from_url(config.HTTP_CACHE_URL)
        p = current_process()
        REDIS_CONNECTION_HTTP_CACHE.client_setname("http-cache:%s" % p.name)
    return REDIS_CONNECTION_HTTP_CACHE
示例#12
0
    def format(self, record):
        levelname = record.levelname
        color = self.colors.get(levelname)

        if self.use_color and color:
            msg = record.msg
            try:
                # safe_str will repr the color object
                # and color will break on non-string objects
                # so need to reorder calls based on type.
                # Issue #427
                if isinstance(msg, string_t):
                    record.msg = str_t(color(safe_str(msg)))
                else:
                    record.msg = safe_str(color(msg))
            except Exception as exc:
                record.msg = '<Unrepresentable {0!r}: {1!r}>'.format(
                    type(msg), exc)
                record.exc_info = True

        if not PY3 and 'processName' not in record.__dict__:
            # Very ugly, but have to make sure processName is supported
            # by foreign logger instances.
            # (processName is always supported by Python 2.7)
            process_name = current_process and current_process()._name or ''
            record.__dict__['processName'] = process_name
        return safe_str(logging.Formatter.format(self, record))
示例#13
0
    def test_worker_int_handler_only_stop_MainProcess(self):
        try:
            import _multiprocessing  # noqa
        except ImportError:
            raise SkipTest("only relevant for multiprocessing")
        process = current_process()
        name, process.name = process.name, "OtherProcess"
        with patch("celery.apps.worker.active_thread_count") as c:
            c.return_value = 3
            try:
                worker = self._Worker()
                handlers = self.psig(cd.install_worker_int_handler, worker)
                handlers["SIGINT"]("SIGINT", object())
                self.assertTrue(state.should_stop)
            finally:
                process.name = name
                state.should_stop = False

        with patch("celery.apps.worker.active_thread_count") as c:
            c.return_value = 1
            try:
                worker = self._Worker()
                handlers = self.psig(cd.install_worker_int_handler, worker)
                with self.assertRaises(SystemExit):
                    handlers["SIGINT"]("SIGINT", object())
            finally:
                process.name = name
                state.should_stop = False
def Grade(submissionIndex, submissionCount, problemIndex, filePath,
          problemPath, gradeMethod, caseCount, limitTime, limitMemory, usingLang,
          version, problemName):
    worker_num = current_process().index % MAX_CONTAINER_COUNT + 1
    
    saveDirectoryName = "%i_%i" % (submissionIndex, submissionCount)
    
    sharingDirName = "/mnt/shared/tempdir/%s" % (saveDirectoryName)
    
    argsList = "%s %s %s %s %i %i %i %s %s %s" % (filePath, problemPath,
                                                  saveDirectoryName, gradeMethod,
                                                  caseCount, limitTime,
                                                  limitMemory, usingLang,
                                                  version, problemName)
    
    containerCommand = "%s%i %s" % ('sudo docker exec grade_container', worker_num,
                                   'python /gradeprogram/rungrade.py ')
    
    print 'program start'
    
    try:
        call(containerCommand + argsList, shell=True)
        UpdateResult(submissionIndex, submissionCount, problemIndex, sharingDirName)
    except Exception as e:
        print e
        UpdateResult(submissionIndex, submissionCount, problemIndex)
        Restart(worker_num)
def Grade(filePath, problemPath, stdNum, problemNum, gradeMethod, caseCount,
          limitTime, limitMemory, usingLang, version, courseNum, submitCount,
          problemName):
    worker_num = current_process().index % MAX_CONTAINER_COUNT + 1
    
    saveDirectoryName = "%s_%s_%s_%i" % (stdNum, problemNum, courseNum, submitCount)
    sharingDirName = "%s%i/%s" % (ROOT_CONTAINER_DIRECTORY, worker_num,
                            saveDirectoryName)
    argsList = "%s %s %s %s %i %i %i %s %s %s" % (filePath, problemPath,
                                                  saveDirectoryName, gradeMethod,
                                                  caseCount, limitTime,
                                                  limitMemory, usingLang,
                                                  version, problemName)
    containerCommand = "%s%i %s" % ('sudo docker exec grade_container', worker_num,
                                   'python /gradeprogram/rungrade.py ')

    
    call('sudo mkdir ' + sharingDirName, shell = True)
    print 'program start'
    
    message = Popen(containerCommand + argsList, shell=True, stdout=PIPE)
    
    for i in xrange(limitTime*100):
        if message.poll() == None: 
            time.sleep(0.01)
        else:
            messageLines = message.stdout.readlines()
            UpdateResult(messageLines[-1], stdNum, problemNum, courseNum, submitCount)
            break
    else:
        UpdateResult('SERVER_ERROR', stdNum, problemNum, courseNum, submitCount)
    
    call('sudo rm -rf ' + sharingDirName, shell = True)
def Grade(submissionIndex, submissionCount, problemIndex, filePath,
          problemPath, gradeMethod, caseCount, limitTime, limitMemory,
          usingLang, version, problemName):
    worker_num = current_process().index % MAX_CONTAINER_COUNT + 1

    saveDirectoryName = "%i_%i" % (submissionIndex, submissionCount)

    sharingDirName = "/mnt/shared/tempdir/%s" % (saveDirectoryName)

    argsList = "%s %s %s %s %i %i %i %s %s %s" % (
        filePath, problemPath, saveDirectoryName, gradeMethod, caseCount,
        limitTime, limitMemory, usingLang, version, problemName)

    containerCommand = "%s%i %s" % ('sudo docker exec grade_container',
                                    worker_num,
                                    'python /gradeprogram/rungrade.py ')

    print 'program start'

    try:
        call(containerCommand + argsList, shell=True)
        UpdateResult(submissionIndex, submissionCount, problemIndex,
                     sharingDirName)
    except Exception as e:
        print e
        UpdateResult(submissionIndex, submissionCount, problemIndex)
        Restart(worker_num)
示例#17
0
文件: tasks.py 项目: almasudme/hpc
def parser(file_name, class_name, start):
    # function to load files and extract features
    # file_name = os.path.join('train/Train', str(row.ID) + '.wav')

    # handle exception to check if there isn't a file which is corrupted
    try:
        # here kaiser_fast is a technique used for faster extraction
        X, sample_rate = librosa.load(file_name, res_type='kaiser_fast')
        # we extract mfcc feature from data
        mfccs = np.mean(librosa.feature.mfcc(y=X, sr=sample_rate, n_mfcc=40).T,
                        axis=0)
    except Exception as e:
        print("Error encountered while parsing file: ", file_name)
        # logger.info("Error encountered while parsing file: ", file_name)
        return None, None

    feature = mfccs
    duration = time.time() - start
    line_array = np.append(feature, class_name + "\n")
    line = ",".join(line_array)

    with open(os.path.join('csv_data',
                           str(current_process().index) + '.csv'), 'a') as f1:
        f1.write(line)

    return
示例#18
0
def init_gpu():
    global base_model, image_shape
    if base_model is None:
        base_model = tf.keras.applications.InceptionV3(include_top=True,
                                                       weights='imagenet')
        image_shape = base_model.inputs[0].get_shape().as_list()
        print("Worker {} ready".format(current_process().index))
示例#19
0
文件: tasks.py 项目: hbatyrkhan/mycms
def script_test(instance, script_line, gen_source, gen_name, test,
                full_subtask):
    sandbox = Sandbox()
    sandbox.init(current_process().index)

    # CREATE FILES
    with open(path_join('.', 'problem', 'static', 'problem', 'testlib.h'),
              'r') as testlib_file:
        testlib = testlib_file.read()
    sandbox.create_file(gen_name + '.cpp', str(gen_source), is_public=0)
    sandbox.create_file('testlib.h', str(testlib), is_public=0)

    # COMPILE
    sandbox.run_cmd('g++ -o ' + path_join('box', gen_name) +
                    ' -std=c++11 -DONLINE_JUDGE ' + gen_name + '.cpp ' +
                    'testlib.h')

    # GET TEST
    out, err = sandbox.run_exec(exec=gen_name,
                                cmd=' '.join(script_line.split()[1:]),
                                dirs=[('/box', 'box', 'rw')],
                                meta_file=sandbox.get_box_dir('meta'),
                                time_limit=10,
                                memory_limit=128)

    instance.test_set.create(input=out.decode('utf-8'),
                             test_id=test['index'],
                             in_statement=test['useInStatements'],
                             subtask=full_subtask)
    sandbox.cleanup()
示例#20
0
文件: worker.py 项目: clvrobj/celery
 def _handle_request(signum, frame):
     if current_process()._name == "MainProcess":
         if callback:
             callback(worker)
         print("celeryd: %s shutdown (MainProcess)" % types[how])
         getattr(worker, how)(in_sighandler=True)
     raise exc()
示例#21
0
def get_jobs_from_zipfile(zipfileobject, guid):
    """Get a list of xml documents representing all the current jobs.

    Input:
        :guid: A guid used to access the jobsfs server.
    :return: [lxml.eTree, lxml.eTree,...]"""
    logger.debug("Getting current Jobs for guid: %s", guid)

    # Get current worker process id, to prevent race conditions.
    try:
        p = current_process()
        process_id = p.index
    except AttributeError:
        process_id = 0

    # Delete any existing data and use the guid to create a unique folder.
    directory = "/tmp/%s/%s" % (process_id, guid)
    prefix = os.path.commonprefix(['/tmp/%s' % process_id, os.path.abspath(directory)])
    assert prefix == '/tmp/%s' % process_id, "Directory should be located in /tmp/%s" % process_id

    if os.path.exists(directory):
        shutil.rmtree(directory)
    os.makedirs(directory)

    # Write zipfile to filesystem
    filename = os.path.join(directory, '%s.zip' % guid)
    with open(filename, 'wb') as f:
        for chunk in iter(lambda: zipfileobject.read(1024 * 16), ''):
            f.write(chunk)

    # Extact all files from zipfile.
    # Note: Using try/finally because zipfile doesn't support context managers
    #       until python 2.7.  Upon migration to python 2.7, can be replaced.
    zf = zipfile.ZipFile(filename)
    try:
        zf.extractall(directory)
    finally:
        zf.close()

    # Process the files.
    active_directory = os.path.join(directory, '%s' % guid)
    files = sorted(os.listdir(active_directory))
    logger.info("Found %s jobs for guid %s", len(files), guid)
    for f in files:
        path = os.path.join(active_directory, f)
        if os.path.isdir(path):
            logger.warn("Found folder '%s' inside active jobs for JSID: %s",
                        f, guid)
            continue
        with open(path) as _f:
            try:
                yield etree.fromstring(_f.read())
            except Exception as e:
                logger.error("Unable to parse XML document for job %s", path)
                logger.exception(e)
                raise

    # clean up after ourselves.
    shutil.rmtree(directory)
示例#22
0
async def get_aredis_for_cache():
    global AREDIS_CONNECTION_CACHE
    if AREDIS_CONNECTION_CACHE is None:
        AREDIS_CONNECTION_CACHE = aredis.StrictRedis.from_url(
            config.STORAGE_URL, decode_responses=True)
        p = current_process()
        await AREDIS_CONNECTION_CACHE.client_setname("cache:%s" % p.name)
    return AREDIS_CONNECTION_CACHE
示例#23
0
文件: tasks.py 项目: calaniz/TASTE
 def check_for_failed_vm(self):
     r = self.__execute_cmd(['VBoxManage', 'list', 'vms'])
     x = re.findall('cobs_thread%s_[0-9a-zA-Z]*' % current_process().index,
                    r[0])
     if len(x) > 0:
         self.__vm_log(
             "warning",
             "A failed VM was found before starting test. Cleaning up...")
         self.remove_vm(x[0])
示例#24
0
 def get_celery_hostname():
     """
     Interact with the current process to determine the celery assigned hostname.
     :return: The assigned hostname, and additionally store it in Shared
     """
     p = current_process()
     hostname = p.initargs[1]
     Shared.hostname = hostname
     return hostname
示例#25
0
    def set_mp_process_title(progname, info=None, hostname=None):  # noqa
        """Set the ps name using the multiprocessing process name.

        Only works if :mod:`setproctitle` is installed.

        """
        if hostname:
            progname = "{0}: {1}".format(progname, hostname)
        return set_process_title("{0}:{1}".format(progname, current_process().name), info=info)
示例#26
0
    def set_mp_process_title(progname, info=None, hostname=None):  # noqa
        """Set the ps name using the multiprocessing process name.

        Only works if :mod:`setproctitle` is installed.

        """
        if hostname:
            progname = "%s@%s" % (progname, hostname.split(".")[0])
        return set_process_title("%s:%s" % (progname, current_process().name), info=info)
示例#27
0
def get_redis_for_cache():
    global REDIS_CONNECTION_CACHE
    if REDIS_CONNECTION_CACHE is None:
        REDIS_CONNECTION_CACHE = redis.StrictRedis.from_url(
            config.STORAGE_URL, decode_responses=True,
        )
        p = current_process()
        REDIS_CONNECTION_CACHE.client_setname("cache:%s" % p.name)
    return REDIS_CONNECTION_CACHE
示例#28
0
    def set_mp_process_title(progname, info=None, hostname=None):  # noqa
        """Set the ps name using the multiprocessing process name.

        Only works if :mod:`setproctitle` is installed.

        """
        if hostname:
            progname = '%s@%s' % (progname, hostname.split('.')[0])
        return set_process_title(
            '%s:%s' % (progname, current_process().name), info=info)
示例#29
0
文件: platforms.py 项目: xacce/celery
    def set_mp_process_title(progname, info=None, hostname=None):  # noqa
        """Set the ps name using the multiprocessing process name.

        Only works if :mod:`setproctitle` is installed.

        """
        if hostname:
            progname = '{0}: {1}'.format(progname, hostname)
        return set_process_title(
            '{0}:{1}'.format(progname, current_process().name), info=info)
示例#30
0
 def get_value(self, *args, **kwargs):
     # see celery.utils.log
     try:
         from billiard import current_process
         result = current_process()._name
         if self.skip_main_process and result == 'MainProcess':
             return
         return result
     except Exception:
         return
示例#31
0
 def _handle_request(signum, frame):
     process_name = current_process()._name
     if not process_name or process_name == "MainProcess":
         if callback:
             callback(worker)
         print("celeryd: %s shutdown (%s)" % (
             types[how],
             process_name,
         ))
         getattr(worker, how)(in_sighandler=True)
     raise exc()
示例#32
0
def detect_image(net, im):
    """Detect object classes in an image using pre-computed object proposals."""

    # Detect all object classes and regress object bounds
    timer = Timer()
    timer.tic()
    scores, boxes = im_detect(net, im)
    timer.toc()
    print(str(current_process().index)+' Detection took {:.3f}s for '
           '{:d} object proposals').format(timer.total_time, boxes.shape[0])

    # Visualize detections for each class
    # CONF_THRESH = 0.0

    CONF_THRESH = 0.4
    NMS_THRESH = 0.1
    active_thds = {}
    label_alias = {}
    if hasattr(model_config, 'active_thds'):
        active_thds = model_config.active_thds
    if hasattr(model_config, 'default_thd'):
        CONF_THRESH = model_config.default_thd
    if hasattr(model_config, 'label_alias'):
        label_alias = model_config.label_alias

    targets = []
    for ac in model_config.active_classes:
        target_idx = CLASSES.index(ac)
        if len(boxes[1]) == 8:
            target_boxes = boxes[:, 4:8]
        elif len(boxes[1]) == len(CLASSES)*4:
            target_boxes = boxes[:, 4*target_idx:4*(target_idx + 1)]
        else:
            error
        target_scores = scores[:, target_idx]
        target_dets = np.hstack((target_boxes,
                      target_scores[:, np.newaxis])).astype(np.float32)
        target_keep = nms(target_dets, NMS_THRESH)
        target_dets = target_dets[target_keep, :]
        conf_thd = CONF_THRESH
        if ac in active_thds:
            conf_thd = active_thds[ac]
        target_dets = target_dets[np.where(target_dets[:, -1] >= conf_thd)]
        for r in target_dets:
            x = (int)(r[0].item())
            y = (int)(r[1].item())
            w = (int)(r[2].item())-x
            h = (int)(r[3].item())-y
            ac_alias = ac
            if ac in label_alias:
                ac_alias = label_alias[ac]
            targets.append({'x':x,'y':y,'w':w,'h':h, 'label': ac_alias, 'conf': int(100.0*r[4])})
    return targets
示例#33
0
    def set_mp_process_title(progname, info=None, hostname=None,  # noqa
            rate_limit=False):
        """Set the ps name using the multiprocessing process name.

        Only works if :mod:`setproctitle` is installed.

        """
        if not rate_limit or _setps_bucket.can_consume(1):
            if hostname:
                progname = "%s@%s" % (progname, hostname.split(".")[0])
            return set_process_title(
                "%s:%s" % (progname, current_process().name), info=info)
示例#34
0
def pdfgen(odtout,user,projectname):
      print 'inside pdf gen'+str(odtout[1])
      while True:
           p = Popen('unoconv --format pdf -p 222'+str(current_process().index)+' --output '+str(odtout[0])+'pdfs/'+str(odtout[2])+'.pdf '+' '+str(odtout[1]),shell=True,stdout=PIPE,stderr=PIPE)
           err = p.communicate()
           print 'asdas'
           if str(err) == "('', '')" :
               print "success"
               break
           else :
               print str(err)+'error'
               break
示例#35
0
 def _handle_request(*args):
     with in_sighandler():
         from celery.worker import state
         if current_process()._name == 'MainProcess':
             if callback:
                 callback(worker)
             safe_say('worker: {0} shutdown (MainProcess)'.format(how))
         if active_thread_count() > 1:
             setattr(state, {'Warm': 'should_stop',
                             'Cold': 'should_terminate'}[how], True)
         else:
             raise exc()
示例#36
0
文件: worker.py 项目: nott/celery
    def _handle_request(signum, frame):
        with in_sighandler():
            from celery.worker import state

            if current_process()._name == "MainProcess":
                if callback:
                    callback(worker)
                safe_say("worker: {0} shutdown (MainProcess)".format(how))
            if active_thread_count() > 1:
                setattr(state, {"Warm": "should_stop", "Cold": "should_terminate"}[how], True)
            else:
                raise exc()
示例#37
0
文件: worker.py 项目: jarieb/celery
 def _handle_request(*args):
     with in_sighandler():
         from celery.worker import state
         if current_process()._name == 'MainProcess':
             if callback:
                 callback(worker)
             safe_say('worker: {0} shutdown (MainProcess)'.format(how))
         if active_thread_count() > 1:
             setattr(state, {'Warm': 'should_stop',
                             'Cold': 'should_terminate'}[how], True)
         else:
             raise exc()
示例#38
0
文件: platforms.py 项目: kertz/celery
    def set_mp_process_title(progname, info=None, hostname=None, rate_limit=False):  # noqa
        """Set the ps name using the multiprocessing process name.

        Only works if :mod:`setproctitle` is installed.

        """
        if not rate_limit or _setps_bucket.can_consume(1):
            from billiard import current_process

            if hostname:
                progname = "{0}@{1}".format(progname, hostname.split(".")[0])
            return set_process_title("{0}:{1}".format(progname, current_process().name), info=info)
示例#39
0
    def init(self, taskstate_id=None, apikey_id=None):
        """
        Tasks should call this in their run() method to initialise stuff.
        Returns False if anything bad happens.
        """

        # Set our process ID if it hasn't been set yet
        global this_process
        if this_process is None:
            this_process = int(current_process()._name.split('-')[1])

            # Sleep for staggered worker startup
            if settings.STAGGER_APITASK_STARTUP:
                sleep_for = (this_process - 1) * 2
                self._logger.warning('Worker #%d staggered startup: sleeping for %d seconds', this_process, sleep_for)
                time.sleep(sleep_for)

        # Clear the current query information so we don't bloat
        if settings.DEBUG:
            for db in settings.DATABASES.keys():
                connections[db].queries = []

        self._started = time.time()
        self._api_log = []
        self._cache_delta = None

        self._taskstate = None
        self.apikey = None
        self.root = None

        # Fetch TaskState
        if taskstate_id is not None:
            try:
                self._taskstate = TaskState.objects.get(pk=taskstate_id)
            except TaskState.DoesNotExist:
                self.log_error('Task not starting: TaskState %d has gone missing', taskstate_id)
                return False

        # Fetch APIKey
        if apikey_id is not None:
            try:
                self.apikey = APIKey.objects.get(pk=apikey_id)
            except APIKey.DoesNotExist:
                return False
            else:
                # No longer a valid key?
                if not self.apikey.valid:
                    return False

                # Needs APIKeyInfo?
                if self.apikey.needs_apikeyinfo and getattr(self, 'name') != 'thing.api_key_info':
                    return False
示例#40
0
def execute_event_auto(self, event):
    eventStartTime = int(round(time.time() * 1000))
    chrome_options = Options()
    chrome_options.add_argument("--headless")
    chrome_options.add_argument("--user-data-dir=/tmp/chromium-user-dir")
    chrome_options.add_argument("--user-agent=TweetoasterAutomaticMode")
    if (chrome_auto_port != None):
        chrome_options.add_experimental_option(
            "debuggerAddress",
            "127.0.0.1:" + str(chrome_auto_port[current_process().index]))

    # 增加UA以触发Google Analytics
    # chrome_options.add_argument("--proxy-server=127.0.0.1:12333")
    driver_frontend = webdriver.Chrome(options=chrome_options)
    try:
        processor = TweetProcess(driver_frontend)
        param = {
            'tweet': event['tweet'],
            'template': event['template'],
            'out': 1
        }
        if event['translate'] != '':
            param['translate'] = event['translate']
        if 'noLikes' in event and event['noLikes']:
            param['noLikes'] = event['noLikes']
        driver_frontend.get(self_url + "?" +
                            parse.urlencode(param).replace("+", "%20"))
        # time.sleep(20)
        try:
            WebDriverWait(driver_frontend, 60, 0.5).until(
                EC.presence_of_element_located((By.CSS_SELECTOR, 'canvas')))
        except:
            driver_frontend.save_screenshot(
                f'Matsuri_translation/frontend/cache/LastErrorAuto.png')
        finally:
            filename = processor.save_screenshots_auto(eventStartTime)
            try:
                event["filename"] = filename
                insert_text_chunk(
                    f'Matsuri_translation/frontend/cache/{filename}.png',
                    f'Matsuri_translation/frontend/cache/{filename}.png',
                    json.dumps(event).encode("utf-8"))
            except:
                print("error in metadata")
    except:
        driver_frontend.save_screenshot(
            f'Matsuri_translation/frontend/cache/LastErrorAuto.png')
    finally:
        # time.sleep(5)

        driver_frontend.quit()
    return filename
def task_prerun(task_id=None, task=None, args=None, kwargs=None, **kw):
    meta = args[0]
    if meta["profile"]:
        config = meta["config"]
        profile_log = config.get("profile")
        logger = sync_logging.get_sync_logger(profile_log)
        logger.info("task_prerun",
                    event_id=task_id,
                    event_name=task.name,
                    path=meta.get("path"),
                    target=meta.get("target"),
                    hostname=task.request.hostname,
                    index=current_process().index)
示例#42
0
 def get_value(self, *args, **kwargs):  # pylint: disable=arguments-differ
     # see celery.utils.log
     try:
         try:
             from billiard import current_process
         except Exception:
             from billiard.process import current_process
         result = current_process()._name
         if self.skip_main_process and result == 'MainProcess':
             return None
         return result
     except Exception:
         return None
示例#43
0
def execute_event(self, event):
    logger.info(execute_event.name)
    logger.info(self.request)
    logger.info(current_process().index)
    chrome_options = Options()
    chrome_options.add_argument("--headless")
    chrome_options.add_argument('--disable-dev-shm-usage')
    if (chrome_twitter_port != None):
        logger.info('chrome_twitter_port ' + str(chrome_twitter_port))
        chrome_options.add_experimental_option(
            "debuggerAddress",
            "127.0.0.1:" + str(chrome_twitter_port[current_process().index]))
    chrome_options.add_argument("--no-sandbox")
    try:
        driver = webdriver.Chrome(options=chrome_options)
    except Exception as e:
        import traceback
        logger.error(e)
        traceback.print_exc()
        raise e
    filename = 'success|[]'

    logger.info("tweet.execute_event.chrome_started")
    try:
        processor = TweetProcess(driver)
        processor.open_page(event['url'])
        logger.info("tweet.execute_event.page_opened")
        processor.modify_tweet()
        logger.info("tweet.execute_event.js_executed")
        # processor.scroll_page_to_tweet(event['fast'])
        filename = processor.save_screenshots(event['fast'])
        logger.info("tweet.execute_event.png_get")
    except:
        return 'LastError|[]'
    finally:
        driver.quit()

    return filename
示例#44
0
文件: worker.py 项目: tigrang/celery
 def _handle_request(signum, frame):
     set_in_sighandler(True)
     try:
         from celery.worker import state
         if current_process()._name == "MainProcess":
             if callback:
                 callback(worker)
             safe_say("celeryd: %s shutdown (MainProcess)" % how)
         if how == "Warm":
             state.should_stop = True
         elif how == "Cold":
             state.should_terminate = True
     finally:
         set_in_sighandler(False)
示例#45
0
def _app_or_default_trace(app=None):  # pragma: no cover
    from traceback import print_stack
    from billiard import current_process
    if app is None:
        if getattr(state._tls, "current_app", None):
            print("-- RETURNING TO CURRENT APP --")  # noqa+
            print_stack()
            return state._tls.current_app
        if current_process()._name == "MainProcess":
            raise Exception("DEFAULT APP")
        print("-- RETURNING TO DEFAULT APP --")      # noqa+
        print_stack()
        return state.default_app
    return app
示例#46
0
def module_load_init(**__):
    global analyzer
    worker_index = current_process().index

    print("====================")
    print(" Worker Id: {0}".format(worker_index))
    print("====================")

    # TODO:
    #   - Add your model
    #   - You can use worker_index if you need to get and set gpu_id
    #       - ex) gpu_id = worker_index % TOTAL_GPU_NUMBER
    from Modules.vis_detection.main import vis_det
    analyzer = vis_det()
示例#47
0
def initializer():
    try:
        pid = current_process().name.split("-", 1)[1].split(":", 1)[1]
        process_id = (int(pid)-1) % NUM_REDDIT_INSTANCES
    except:
        pid = current_process().name.split("-", 1)[1]
        process_id = (int(pid)-1) % NUM_REDDIT_INSTANCES
    worker_id = process_id
    global reddit
    tries = 0
    while True:
        try:
            reddit = reddit_objs[worker_id]
            break
        except:
            try:
                reddit = init_reddit(worker_id)
                break
            except:
                worker_id += 1 % NUM_REDDIT_INSTANCES
                tries += 1
                if tries > 2 * NUM_REDDIT_INSTANCES:
                    raise Exception('reddit instance error')
示例#48
0
def _app_or_default_trace(app=None):  # pragma: no cover
    from traceback import print_stack
    from billiard import current_process
    if app is None:
        if getattr(state._tls, "current_app", None):
            print("-- RETURNING TO CURRENT APP --")  # noqa+
            print_stack()
            return state._tls.current_app
        if current_process()._name == "MainProcess":
            raise Exception("DEFAULT APP")
        print("-- RETURNING TO DEFAULT APP --")      # noqa+
        print_stack()
        return state.default_app
    return app
示例#49
0
def module_load_init(**__):
    #global analyzer
    global db
    worker_index = current_process().index

    print("====================")
    print(" Worker Id: {0}".format(worker_index))
    print("====================")

    # TODO:
    #    - load DB
    load = datetime.datetime.now()
    db = load_features()
    print("load-time: {}".format(datetime.datetime.now() - load))
示例#50
0
def _app_or_default_trace(app=None):  # pragma: no cover
    from traceback import print_stack
    from billiard import current_process
    if app is None:
        if getattr(_state._tls, 'current_app', None):
            print('-- RETURNING TO CURRENT APP --')  # noqa+
            print_stack()
            return _state._tls.current_app
        if current_process()._name == 'MainProcess':
            raise Exception('DEFAULT APP')
        print('-- RETURNING TO DEFAULT APP --')      # noqa+
        print_stack()
        return _state.default_app
    return app
示例#51
0
def _app_or_default_trace(app=None):  # pragma: no cover
    from traceback import print_stack
    from billiard import current_process
    if app is None:
        if getattr(_state._tls, 'current_app', None):
            print('-- RETURNING TO CURRENT APP --')  # noqa+
            print_stack()
            return _state._tls.current_app
        if current_process()._name == 'MainProcess':
            raise Exception('DEFAULT APP')
        print('-- RETURNING TO DEFAULT APP --')  # noqa+
        print_stack()
        return _state.default_app
    return app
示例#52
0
文件: worker.py 项目: rwillmer/celery
 def _handle_request(signum, frame):
     set_in_sighandler(True)
     try:
         from celery.worker import state
         if current_process()._name == 'MainProcess':
             if callback:
                 callback(worker)
             safe_say('celeryd: %s shutdown (MainProcess)' % how)
         if active_thread_count() > 1:
             setattr(state, {'Warm': 'should_stop',
                             'Cold': 'should_terminate'}[how], True)
         else:
             raise exc()
     finally:
         set_in_sighandler(False)
示例#53
0
def submit_code(self, code, email, klee_args, endpoint):
    # name will hold the name of the current worker, which is in the format
    # celery@name, so we split at @ and take the second part
    name = current_process().initargs[1].split('@')[1]
    with WorkerRunner(self.request.id, endpoint, worker_name=name) as runner:
        try:
            runner.run(code, email, klee_args)
        except SoftTimeLimitExceeded:
            result = {
                'klee_run': {
                    'output': 'Job exceeded time limit of '
                              '{} seconds'.format(worker_config.timeout)
                }
            }
            runner.send_notification('job_failed', result)
示例#54
0
 def test_worker_term_handler_only_stop_MainProcess(self):
     try:
         import _multiprocessing  # noqa
     except ImportError:
         raise SkipTest("only relevant for multiprocessing")
     process = current_process()
     name, process.name = process.name, "OtherProcess"
     try:
         worker = self._Worker()
         handlers = self.psig(cd.install_worker_term_handler, worker)
         handlers["SIGTERM"]("SIGTERM", object())
         self.assertTrue(state.should_stop)
     finally:
         process.name = name
         state.should_stop = False
示例#55
0
 def test_worker_term_handler_only_stop_MainProcess(self):
     try:
         import _multiprocessing  # noqa
     except ImportError:
         raise SkipTest("only relevant for multiprocessing")
     process = current_process()
     name, process.name = process.name, "OtherProcess"
     try:
         worker = self._Worker()
         handlers = self.psig(cd.install_worker_term_handler, worker)
         with self.assertRaises(SystemExit):
             handlers["SIGTERM"]("SIGTERM", object())
         self.assertFalse(worker.stopped)
     finally:
         process.name = name
    def _handle_request(*args):
        set_in_sighandler(True)
        try:
            from celery.worker import state

            if current_process()._name == "MainProcess":
                if callback:
                    callback(worker)
                    safe_say("celeryd: %s shutdown (MainProcess)" % how)
            if active_thread_count() > 1:
                setattr(state, {"Warm": "should_stop", "Cold": "should_terminate"}[how], True)
            else:
                raise exc()
        finally:
            set_in_sighandler(False)
示例#57
0
def generate(templatename,jsonpath,projectname,user,arr=[]):
      print 'generate task was called'
      print str(current_process().index) +"aadad"
      uri="http://localhost:4000/fetchRange/"
      data = {"start":arr[0], "end":arr[1]}
      print data
      headers = {'Content-Type': 'application/json'}
      payload = json.dumps(data)
      try:
          r = requests.post(uri, data=payload,headers=headers)
          jsonrows= json.loads(r.text)
      except:
          print 'error fetching json'
      for i in range(arr[0],arr[1]):
          print i
          render.delay(templatename,json.loads(jsonrows[str(i)]),projectname,user)
示例#58
0
文件: log.py 项目: n1ywb/celery
    def format(self, record):
        levelname = record.levelname
        color = self.colors.get(levelname)

        if self.use_color and color:
            try:
                record.msg = safe_str(str_t(color(record.msg)))
            except Exception as exc:
                record.msg = "<Unrepresentable {0!r}: {1!r}>".format(type(record.msg), exc)
                record.exc_info = True

        if not is_py3k and "processName" not in record.__dict__:
            # Very ugly, but have to make sure processName is supported
            # by foreign logger instances.
            # (processName is always supported by Python 2.7)
            process_name = current_process and current_process()._name or ""
            record.__dict__["processName"] = process_name
        return safe_str(logging.Formatter.format(self, record))
示例#59
0
文件: rdb.py 项目: Yight/InfoSecurity
 def get_avail_port(self, host, port, search_limit=100, skew=+0):
     try:
         _, skew = current_process().name.split("-")
         skew = int(skew)
     except ValueError:
         pass
     this_port = None
     for i in xrange(search_limit):
         _sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
         this_port = port + skew + i
         try:
             _sock.bind((host, this_port))
         except socket.error, exc:
             if exc.errno in [errno.EADDRINUSE, errno.EINVAL]:
                 continue
             raise
         else:
             return _sock, this_port