Ejemplo n.º 1
0
	def kill(self, sig = signal.SIGTERM):
		if not self._event_finished.is_set():
			try:
				os.kill(self._pid, sig)
			except OSError:
				if get_current_exception().errno != errno.ESRCH: # errno.ESRCH: no such process (already dead)
					raise
Ejemplo n.º 2
0
	def process(self, dn):
		job_info_dict = None
		try:
			job_info_dict = JobInfoProcessor.process(self, dn)
		except JobResultError:
			logger = logging.getLogger('jobs.results')
			logger.warning('Unable to process job information', exc_info=get_current_exception())
			clear_current_exception()
		if job_info_dict:
			job_data_dict = job_info_dict[JobResult.RAW]
			result = {}

			def get_items_with_key(key_prefix):
				return ifilter(lambda key_value: key_value[0].startswith(key_prefix), job_data_dict.items())

			# parse old job info data format for files
			old_fmt_header = [FileInfo.Hash, FileInfo.NameLocal, FileInfo.NameDest, FileInfo.Path]
			for (file_key, file_data) in get_items_with_key('FILE'):
				file_idx = file_key.replace('FILE', '') or '0'
				result[int(file_idx)] = dict(izip(old_fmt_header, file_data.strip('"').split('  ')))
			# parse new job info data format
			for (file_key, file_data) in get_items_with_key('OUTPUT_FILE'):
				(file_idx, file_prop) = file_key.replace('OUTPUT_FILE_', '').split('_')
				if isinstance(file_data, str):
					file_data = file_data.strip('"')
				file_prop = file_prop.lower().replace('dest', 'namedest').replace('local', 'namelocal')
				result.setdefault(int(file_idx), {})[FileInfo.str2enum(file_prop)] = file_data
			return list(result.values())
Ejemplo n.º 3
0
 def process(self, dn):
     jobInfo = None
     try:
         jobInfo = JobInfoProcessor.process(self, dn)
     except JobResultError:
         logging.getLogger('jobs.results').warning(
             'Unable to process job information',
             exc_info=get_current_exception())
     if jobInfo:
         jobData = jobInfo[JobResult.RAW]
         result = {}
         # parse old job info data format for files
         oldFileFormat = [
             FileInfoProcessor.Hash, FileInfoProcessor.NameLocal,
             FileInfoProcessor.NameDest, FileInfoProcessor.Path
         ]
         for (fileKey, fileData) in ifilter(
                 lambda key_value: key_value[0].startswith('FILE'),
                 jobData.items()):
             fileIdx = fileKey.replace('FILE', '').rjust(1, '0')
             result[int(fileIdx)] = dict(
                 izip(oldFileFormat,
                      fileData.strip('"').split('  ')))
         # parse new job info data format
         for (fileKey, fileData) in ifilter(
                 lambda key_value: key_value[0].startswith('OUTPUT_FILE'),
                 jobData.items()):
             (fileIdx, fileProperty) = fileKey.replace('OUTPUT_FILE_',
                                                       '').split('_')
             if isinstance(fileData, str):
                 fileData = fileData.strip('"')
             result.setdefault(
                 int(fileIdx),
                 {})[FileInfoProcessor.str2enum(fileProperty)] = fileData
         return list(result.values())
Ejemplo n.º 4
0
	def kill(self):
		try:
			os.kill(self.proc.pid, signal.SIGTERM)
		except OSError:
			if get_current_exception().errno != errno.ESRCH:  # errno.ESRCH: no such process (already dead)
				raise
			clear_current_exception()
Ejemplo n.º 5
0
 def kill(self, sig=signal.SIGTERM):
     if not self._event_finished.is_set():
         try:
             os.kill(self._pid, sig)
         except OSError:
             if get_current_exception(
             ).errno != errno.ESRCH:  # errno.ESRCH: no such process (already dead)
                 raise
Ejemplo n.º 6
0
def _get_job_selector_and_task(config, job_selector_str, require_task):
	if not require_task:
		try:  # try to build job selector without task
			return (None, JobSelector.create(job_selector_str))
		except Exception:
			if not except_nested(TaskNeededException, get_current_exception()):
				raise
			clear_current_exception()
	task = gc_create_workflow(config).task
	return (task, JobSelector.create(job_selector_str, task=task))
Ejemplo n.º 7
0
	def _run_thread(self, token, fun, args, kwargs):
		trace_fun = get_trace_fun()
		if trace_fun:
			sys.settrace(trace_fun)
		try:
			fun(*args, **kwargs)
		except Exception:
			with_lock(self._lock, self._collect_exc, token, get_current_exception())
		with_lock(self._lock, self._unregister_token, token)
		with_lock(self._lock, self._notify.set)
Ejemplo n.º 8
0
    def process(self, dn):
        job_info_dict = None
        try:
            job_info_dict = JobInfoProcessor.process(self, dn)
        except JobResultError:
            logger = logging.getLogger('jobs.results')
            logger.warning('Unable to process job information',
                           exc_info=get_current_exception())
            clear_current_exception()
        if job_info_dict:
            job_data_dict = job_info_dict[JobResult.RAW]
            result = {}

            def get_items_with_key(key_prefix):
                return ifilter(
                    lambda key_value: key_value[0].startswith(key_prefix),
                    job_data_dict.items())

            # parse old job info data format for files
            old_fmt_header = [
                FileInfo.Hash, FileInfo.NameLocal, FileInfo.NameDest,
                FileInfo.Path
            ]
            for (file_key, file_data) in get_items_with_key('FILE'):
                file_idx = file_key.replace('FILE', '') or '0'
                result[int(file_idx)] = dict(
                    izip(old_fmt_header,
                         file_data.strip('"').split('  ')))
            # parse new job info data format
            for (file_key, file_data) in get_items_with_key('OUTPUT_FILE'):
                (file_idx, file_prop) = file_key.replace('OUTPUT_FILE_',
                                                         '').split('_')
                if isinstance(file_data, str):
                    file_data = file_data.strip('"')
                file_prop = file_prop.lower().replace('dest',
                                                      'namedest').replace(
                                                          'local', 'namelocal')
                result.setdefault(int(file_idx),
                                  {})[FileInfo.str2enum(file_prop)] = file_data
            return list(result.values())
Ejemplo n.º 9
0
	def process(self, dn):
		jobInfo = None
		try:
			jobInfo = JobInfoProcessor.process(self, dn)
		except JobResultError:
			logging.getLogger('jobs.results').warning('Unable to process job information', exc_info = get_current_exception())
		if jobInfo:
			jobData = jobInfo[JobResult.RAW]
			result = {}
			# parse old job info data format for files
			oldFileFormat = [FileInfoProcessor.Hash, FileInfoProcessor.NameLocal, FileInfoProcessor.NameDest, FileInfoProcessor.Path]
			for (fileKey, fileData) in ifilter(lambda key_value: key_value[0].startswith('FILE'), jobData.items()):
				fileIdx = fileKey.replace('FILE', '').rjust(1, '0')
				result[int(fileIdx)] = dict(izip(oldFileFormat, fileData.strip('"').split('  ')))
			# parse new job info data format
			for (fileKey, fileData) in ifilter(lambda key_value: key_value[0].startswith('OUTPUT_FILE'), jobData.items()):
				(fileIdx, fileProperty) = fileKey.replace('OUTPUT_FILE_', '').split('_')
				if isinstance(fileData, str):
					fileData = fileData.strip('"')
				result.setdefault(int(fileIdx), {})[FileInfoProcessor.str2enum(fileProperty)] = fileData
			return list(result.values())
Ejemplo n.º 10
0
	def _run_thread(self, token, fun, args, kwargs):
		try:
			fun(*args, **kwargs)
		except Exception:
			self._lock.acquire()
			try:
				self._ex_collector.collect(logging.ERROR, 'Exception in thread %r', self._token_desc[token], exc_info = get_current_exception())
			finally:
				self._lock.release()
		self._lock.acquire()
		try:
			self._token_time.pop(token, None)
			self._token_desc.pop(token, None)
		finally:
			self._lock.release()
		self._notify.set()