コード例 #1
0
    def clean(self):
        descriptors = createDescriptors(self.arguments, None, [], [], None,
                                        self.workingDir)
        for descriptor in descriptors:
            if self.all:
                if sys.version_info >= (3, ):
                    cache = os.path.join(os.path.dirname(descriptor.module),
                                         "__pycache__")
                    if os.path.exists(cache):
                        log.info("Deleting pycache: " + cache)
                        self.purgeDirectory(cache, True)
                else:
                    path = descriptor.module + ".pyc"
                    try:
                        mode = os.stat(path)[stat.ST_MODE]
                        if stat.S_ISLNK(mode):
                            os.unlink(path)
                        if stat.S_ISREG(mode):
                            os.remove(path)
                        log.info("Deleting compiled module: " + path)
                    except Exception:
                        log.debug("Error deleting compiled module: " + path)

            pathToDelete = os.path.join(descriptor.output, self.outsubdir)
            if os.path.exists(pathToDelete):
                log.info("Deleting output directory: " + pathToDelete)
                self.purgeDirectory(pathToDelete, True)
            else:
                log.debug("Output directory does not exist: " + pathToDelete)
コード例 #2
0
	def assertLineCount(self, file, filedir=None, expr='', condition=">=1", ignores=None, encoding=None, **xargs):
		"""Perform a validation assert on the number of lines in a text file matching a specific regular expression.
		
		This method will add a C{PASSED} outcome to the outcome list if the number of lines in the 
		input file matching the specified regular expression evaluate to true when evaluated against 
		the supplied condition.
		
		@param file: The basename of the file used in the line count
		@param filedir: The dirname of the file (defaults to the testcase output subdirectory)
		@param expr: The regular expression string used to match a line of the input file
		@param condition: The condition to be met for the number of lines matching the regular expression
		@param ignores: A list of regular expressions that will cause lines to be excluded from the count
		@param encoding: The encoding to use to open the file. 
		The default value is None which indicates that the decision will be delegated 
		to the L{getDefaultFileEncoding()} method. 
		@param xargs: Variable argument list (see class description for supported parameters)
				
		"""	
		if filedir is None: filedir = self.output
		f = os.path.join(filedir, file)

		try:
			numberLines = linecount(f, expr, ignores=ignores, encoding=encoding or self.getDefaultFileEncoding(f))
			log.debug("Number of matching lines is %d"%numberLines)
		except Exception:
			log.warn("caught %s: %s", sys.exc_info()[0], sys.exc_info()[1], exc_info=1)
			msg = self.__assertMsg(xargs, 'Line count on %s for %s%s '%(file, quotestring(expr), condition))
			self.addOutcome(BLOCKED, '%s failed due to %s: %s'%(msg, sys.exc_info()[0], sys.exc_info()[1]), abortOnError=self.__abortOnError(xargs))
		else:
			if (eval("%d %s" % (numberLines, condition))):
				msg = self.__assertMsg(xargs, 'Line count on input file %s' % file)
				self.addOutcome(PASSED, msg, abortOnError=self.__abortOnError(xargs))
			else:
				msg = self.__assertMsg(xargs, 'Line count on %s for %s%s (actual =%d) '%(file, quotestring(expr), condition, numberLines))
				self.addOutcome(FAILED, msg, abortOnError=self.__abortOnError(xargs))
コード例 #3
0
def getmatches(file, regexpr, ignores=None, encoding=None):
	"""Look for matches on a regular expression in an input file, return a sequence of the matches.
	
	@param file: The full path to the input file
	@param regexpr: The regular expression used to search for matches
	@param ignores: A list of regexes which will cause matches to be discarded
	@param encoding: Specifies the encoding to be used for opening the file, or None for default. 
	@return: A list of the match objects 
	@rtype: list
	@raises FileNotFoundException: Raised if the input file does not exist
	
	"""
	matches = []
	rexp = re.compile(regexpr)
	
	log.debug("Looking for expression \"%s\" in input file %s" %(regexpr, file))
	
	if not os.path.exists(file):
		raise FileNotFoundException("unable to find file %s" % (os.path.basename(file)))
	else:
		with openfile(file, 'r', encoding=encoding) as f:
			for l in f:
				match = rexp.search(l)
				if match is not None: 
					shouldignore = False
					if ignores:
						for i in ignores:
							if re.search(i, l):
								shouldignore = True
								break
					if shouldignore: continue
					
					log.debug(("Found match for line: %s" % l).rstrip())
					matches.append(match)
		return matches
コード例 #4
0
def logContents(message, list):
	"""Log a list of strings, prepending the line number to each line in the log output.
	
	@param list: The list of strings to log
	"""
	count = 0
	log.debug(message)
	for line in list:
		count = count + 1
		log.debug(("  Line %-5d:  %s" % (count, line)).rstrip())
コード例 #5
0
def logContents(message, list):
	"""Log a list of strings, prepending the line number to each line in the log output.
	
	:param list: The list of strings to log
	"""
	if not log.isEnabledFor(logging.DEBUG): return
	count = 0
	log.debug(message)
	for line in list:
		count = count + 1
		log.debug(("  Line %-5d:  %s" % (count, line)).rstrip())
コード例 #6
0
def getmatches(file,
               regexpr,
               ignores=None,
               encoding=None,
               flags=0,
               mappers=[],
               returnFirstOnly=False):
    """Look for matches on a regular expression in an input file, return a sequence of the matches 
	(or if returnFirstOnly=True, just the first).
	
	:param file: The full path to the input file
	:param regexpr: The regular expression used to search for matches
	:param mappers: A list of lambdas or generator functions used to pre-process the file's lines before looking for matches. 
	:param ignores: A list of regexes which will cause matches to be discarded. These are applied *after* any mappers. 
	:param encoding: Specifies the encoding to be used for opening the file, or None for default. 
	:param returnFirstOnly: If True, stops reading the file as soon as the first match is found and returns it. 
	:return: A list of the match objects, or the match object or None if returnFirstOnly is True
	:rtype: list
	:raises FileNotFoundException: Raised if the input file does not exist
	
	"""
    matches = []
    rexp = re.compile(regexpr, flags=flags)

    log.debug("Looking for expression \"%s\" in input file %s" %
              (regexpr, file))

    if isinstance(ignores, str):
        ignores = [
            ignores
        ]  # it's easy to pass in a str by mistake and we definitely don't want to be ignoring lines containing any letter from that string!
    ignores = [re.compile(i, flags=flags) for i in (ignores or [])]

    if not pathexists(file):
        raise FileNotFoundException("unable to find file \"%s\"" % (file))
    else:
        with openfile(file, 'r', encoding=encoding) as f:
            for l in applyMappers(f, mappers):
                match = rexp.search(l)
                if match is not None:
                    shouldignore = False
                    for i in ignores:
                        if i.search(l):
                            shouldignore = True
                            break
                    if shouldignore: continue

                    log.debug(("Found match for line: %s" % l).rstrip())
                    if returnFirstOnly is True: return match
                    matches.append(match)
        if returnFirstOnly is True: return None
        return matches
コード例 #7
0
	def assertDiff(self, file1, file2, filedir1=None, filedir2=None, ignores=[], sort=False, replace=[], includes=[], encoding=None, **xargs):
		"""Perform a validation assert on the comparison of two input text files.
		
		This method performs a file comparison on two input files. The files are pre-processed prior to the 
		comparison to either ignore particular lines, sort their constituent lines, replace matches to regular 
		expressions in a line with an alternate value, or to only include particular lines. Should the files 
		after pre-processing be equivalent a C{PASSED} outcome is added to the test outcome list, otherwise
		a C{FAILED} outcome is added.
		
		@param file1: The basename of the first file used in the file comparison
		@param file2: The basename of the second file used in the file comparison (often a reference file)
		@param filedir1: The dirname of the first file (defaults to the testcase output subdirectory)
		@param filedir2: The dirname of the second file (defaults to the testcase reference directory)
		@param ignores: A list of regular expressions used to denote lines in the files which should be ignored
		@param sort: Boolean flag to indicate if the lines in the files should be sorted prior to the comparison
		@param replace: List of tuples of the form ('regexpr', 'replacement'). For each regular expression in the 
			list, any occurences in the files is replaced with the replacement value prior to the comparison being 
			carried out. This is often useful to replace timestamps in logfiles etc.
		@param includes: A list of regular expressions used to denote lines in the files which should be used in the 
			comparison. Only lines which match an expression in the list are used for the comparison
		@param encoding: The encoding to use to open the file. 
		The default value is None which indicates that the decision will be delegated 
		to the L{getDefaultFileEncoding()} method. 
		@param xargs: Variable argument list (see class description for supported parameters)
				
		"""
		if filedir1 is None: filedir1 = self.output
		if filedir2 is None: filedir2 = self.reference
		f1 = os.path.join(filedir1, file1)
		f2 = os.path.join(filedir2, file2)

		log.debug("Performing file comparison:")
		log.debug("  file1:       %s" % file1)
		log.debug("  filedir1:    %s" % filedir1)
		log.debug("  file2:       %s" % file2)
		log.debug("  filedir2:    %s" % filedir2)
		
		msg = self.__assertMsg(xargs, 'File comparison between %s and %s'%(file1, file2))
		unifiedDiffOutput=os.path.join(self.output, os.path.basename(f1)+'.diff')
		result = False
		try:
			result = filediff(f1, f2, ignores, sort, replace, includes, unifiedDiffOutput=unifiedDiffOutput, encoding=encoding or self.getDefaultFileEncoding(f1))
		except Exception:
			log.warn("caught %s: %s", sys.exc_info()[0], sys.exc_info()[1], exc_info=1)
			self.addOutcome(BLOCKED, '%s failed due to %s: %s'%(msg, sys.exc_info()[0], sys.exc_info()[1]), abortOnError=self.__abortOnError(xargs))
		else:
			try:
				self.addOutcome(PASSED if result else FAILED, msg, abortOnError=self.__abortOnError(xargs))
			finally:
				if not result:
					self.logFileContents(unifiedDiffOutput, encoding=encoding or self.getDefaultFileEncoding(f1))
コード例 #8
0
	def assertOrderedGrep(self, file, filedir=None, exprList=[], contains=True, encoding=None, **xargs):   
		"""Perform a validation assert on a list of regular expressions occurring in specified order in a text file.
		
		When the C{contains} input argument is set to true, this method will append a C{PASSED} outcome 
		to the test outcome list if the supplied regular expressions in the C{exprList} are seen in the file
		in the order they appear in the list; otherwise a C{FAILED} outcome is added. Should C{contains} be set 
		to false, a C{PASSED} outcome will only be added should the regular expressions not be seen in the file in 
		the order they appear in the list.
		
		@param file: The basename of the file used in the ordered grep
		@param filedir: The dirname of the file (defaults to the testcase output subdirectory)
		@param exprList: A list of regular expressions which should occur in the file in the order they appear in the list
		@param contains: Boolean flag to denote if the expressions should or should not be seen in the file in the order specified
		@param encoding: The encoding to use to open the file. 
		The default value is None which indicates that the decision will be delegated 
		to the L{getDefaultFileEncoding()} method. 
		@param xargs: Variable argument list (see class description for supported parameters)
				
		"""
		if filedir is None: filedir = self.output
		f = os.path.join(filedir, file)
	
		log.debug("Performing ordered grep on file:")
		log.debug("  file:       %s" % file)
		log.debug("  filedir:    %s" % filedir)
		for expr in exprList: log.debug("  exprList:   %s" % expr)
		log.debug("  contains:   %s" % LOOKUP[contains])
		
		msg = self.__assertMsg(xargs, 'Ordered grep on input file %s' % file)
		expr = None
		try:
			expr = orderedgrep(f, exprList, encoding=encoding or self.getDefaultFileEncoding(f))
		except Exception:
			log.warn("caught %s: %s", sys.exc_info()[0], sys.exc_info()[1], exc_info=1)
			self.addOutcome(BLOCKED, '%s failed due to %s: %s'%(msg, sys.exc_info()[0], sys.exc_info()[1]), abortOnError=self.__abortOnError(xargs))
		else:
			if expr is None and contains:
				result = PASSED
			elif expr is None and not contains:
				result = FAILED
			elif expr is not None and not contains:
				result = PASSED
			else:
				result = FAILED

			if result == FAILED and expr: 
				msg += ' failed on expression \"%s\"'% expr
			self.addOutcome(result, msg, abortOnError=self.__abortOnError(xargs))
コード例 #9
0
def safeEval(expr,
             errorMessage='Failed to evaluate "{expr}" due to {error}',
             emptyNamespace=False,
             extraNamespace={}):
    """
	Executes eval(...) on the specified string expression, using a controlled globals()/locals() environment to 
	ensure we do not break compatibility between PySys versions, and that a sensible set of PySys constants and 
	modules are available. 
	
	Unless ``emptyNamespace=True``, the global environment used for evaluation includes the 
	``os.path``, ``math``, ``sys``, ``re``, ``json``, and ``locale`` 
	standard Python modules, as well as the ``pysys`` module and the contents of the `pysys.constants` module, e.g. ``IS_WINDOWS``. 
	
	If necessary, symbols for additional modules can be imported dynamically using ``import_module``:: 
	
		x = safeEval("import_module('difflib').get_close_matches('app', ['apple', 'orange', 'applic']")
	
	If an error occurs, an exception is raised that includes the expression in its message. 
	
	:param str expr: The string to be evaluated.
	
	:param str errorMessage: The string used for the raised exception message if an exception is thrown by eval, 
		where ``{expr}`` will be replaced with the actual expression and ``{error}`` with the error message. 
		
	:param bool emptyNamespace: By default a default namespace is provided including the symbols described above such as 
		``os.path``, ``pysys``, etc. Set this to True to start with a completely empty namespace with no symbols defined. 

	:param dict[str,obj] extraNamespace: A dict of string names and Python object values to be included in the globals 
		environment used to evaluate this string. 
		
	.. versionadded:: 2.0
	"""
    env = {} if emptyNamespace else globals()
    if extraNamespace:
        env = dict(env)
        for k, v in extraNamespace.items():
            env[k] = v
    try:
        return eval(expr, env)
    except Exception as e:
        log.debug('Evaluation of %r failed: ', expr, exc_info=True)
        # nb: must use .replace() not .format() since errorMessage could include {...} string literals
        raise SafeEvalException(
            errorMessage.replace(
                '{expr}',
                expr.replace('\n', ' ').replace('\r', '').strip()).replace(
                    '{error}', '%s - %s' %
                    (e.__class__.__name__, e or '<no message>')).strip())
コード例 #10
0
ファイル: threadpool.py プロジェクト: Stringybell/Password
    def __init__(self, requests_queue, results_queue, poll_timeout=5, **kwds):
        """Class constructor.
		
		:param requests_queue: Reference to the threadpool's request queue
		:param results_queue: Reference to the threadpool's results queue
		:param poll_timeout: The timeout when trying to obtain a request from the request queue
		:param kwds: Variable arguments to be passed to the threading.Thread constructor
		
		"""
        threading.Thread.__init__(self, **kwds)
        log.debug("[%s] Creating thread for test execution" % self.getName())
        self.setDaemon(1)
        self._requests_queue = requests_queue
        self._results_queue = results_queue
        self._poll_timeout = poll_timeout
        self._dismissed = threading.Event()
        self.start()
コード例 #11
0
def getmatches(file, regexpr, ignores=None, encoding=None, flags=0, mappers=[]):
	"""Look for matches on a regular expression in an input file, return a sequence of the matches.
	
	:param file: The full path to the input file
	:param regexpr: The regular expression used to search for matches
	:param ignores: A list of regexes which will cause matches to be discarded
	:param encoding: Specifies the encoding to be used for opening the file, or None for default. 
	:return: A list of the match objects 
	:rtype: list
	:raises FileNotFoundException: Raised if the input file does not exist
	
	"""
	matches = []
	rexp = re.compile(regexpr, flags=0)
	
	log.debug("Looking for expression \"%s\" in input file %s" %(regexpr, file))

	ignores = [re.compile(i, flags=flags) for i in (ignores or [])]

	if None in mappers: mappers = [m for m in mappers if m]

	if not pathexists(file):
		raise FileNotFoundException("unable to find file \"%s\"" % (file))
	else:
		with openfile(file, 'r', encoding=encoding) as f:
			for l in f:
				# pre-process
				for mapper in mappers:
					l = mapper(l)
					if l is None: break
				if l is None: continue

			
				match = rexp.search(l)
				if match is not None: 
					shouldignore = False
					for i in ignores:
						if i.search(l):
							shouldignore = True
							break
					if shouldignore: continue
					
					log.debug(("Found match for line: %s" % l).rstrip())
					matches.append(match)
		return matches
コード例 #12
0
	def assertLastGrep(self, file, filedir=None, expr='', contains=True, ignores=[], includes=[], encoding=None, **xargs):
		"""Perform a validation assert on a regular expression occurring in the last line of a text file.
		
		When the C{contains} input argument is set to true, this method will add a C{PASSED} outcome 
		to the test outcome list if the supplied regular expression is seen in the file; otherwise a 
		C{FAILED} outcome is added. Should C{contains} be set to false, a C{PASSED} outcome will only 
		be added should the regular expression not be seen in the file.
		
		@param file: The basename of the file used in the grep
		@param filedir: The dirname of the file (defaults to the testcase output subdirectory)
		@param expr: The regular expression to check for in the last line of the file
		@param contains: Boolean flag to denote if the expression should or should not be seen in the file
		@param ignores: A list of regular expressions used to denote lines in the file which should be ignored
		@param includes: A list of regular expressions used to denote lines in the file which should be used in the assertion.#
		@param encoding: The encoding to use to open the file. 
		The default value is None which indicates that the decision will be delegated 
		to the L{getDefaultFileEncoding()} method. 
		@param xargs: Variable argument list (see class description for supported parameters)
				
		"""
		if filedir is None: filedir = self.output
		f = os.path.join(filedir, file)

		log.debug("Performing grep on file:")
		log.debug("  file:       %s" % file)
		log.debug("  filedir:    %s" % filedir)
		log.debug("  expr:       %s" % expr)
		log.debug("  contains:   %s" % LOOKUP[contains])

		msg = self.__assertMsg(xargs, 'Grep on last line of %s %s %s'%(file, 'contains' if contains else 'not contains', quotestring(expr)))
		try:
			result = lastgrep(f, expr, ignores, includes, encoding=encoding or self.getDefaultFileEncoding(f)) == contains
		except Exception:
			log.warn("caught %s: %s", sys.exc_info()[0], sys.exc_info()[1], exc_info=1)
			self.addOutcome(BLOCKED, '%s failed due to %s: %s'%(msg, sys.exc_info()[0], sys.exc_info()[1]), abortOnError=self.__abortOnError(xargs))
		else:
			if result: msg = self.__assertMsg(xargs, 'Grep on input file %s' % file)
			self.addOutcome(PASSED if result else FAILED, msg, abortOnError=self.__abortOnError(xargs))
コード例 #13
0
	def stop(self, timeout=TIMEOUTS['WaitForProcessStop'], hard=False):
		"""Stop a process running.
		
		Uses SIGTERM to give processes a chance to gracefully exit including dump code coverage information if needed. 
		
		@raise ProcessError: Raised if an error occurred whilst trying to stop the process
		
		"""
		# PySys has always done a non-hard SIGTERM on Unix; so far this seems ok but could cause problems for 
		# poorly behaved processes that don't SIGTERM cleanly
		
		sig = signal.SIGKILL if hard else signal.SIGTERM
		
		try:
			with self.__lock:
				if self.exitStatus is not None: return 
				
				# do the kill before the killpg, as there's a small race in which we might try to stop a process 
				# before it has added itself to its own process group, in which case this is essential to avoid 
				# leaking
				os.kill(self.pid, sig)
				
				# nb assuming setpgrp was called when we forked, this will signal the entire process group, 
				# so any children are also killed; small chance this could fail if the process was stopped 
				# before it had a chance to create its process group
				if not self.disableKillingChildProcesses:
					try:
						os.killpg(self.pid, sig)
					except Exception as ex: # pragma: no cover
						# Best not to worry about these
						log.debug('Failed to kill process group (but process itself was killed fine) for %s: %s', self, ex)
			
			try:
				self.wait(timeout=timeout)
			except Exception as ex: # pragma: no cover
				# if it times out on SIGTERM, do our best to SIGKILL it anyway to avoid leaking processes, but still report as an error
				if sig != signal.SIGKILL:
					log.warning('Failed to SIGTERM process %r, will now SIGKILL the process group before re-raising the exception', self)
					try:
						os.killpg(self.pid, signal.SIGKILL)
					except Exception as ex2:
						log.debug('Failed to SIGKILL process group %r: %s', self, ex2)
				
				raise
		except Exception as ex: # pragma: no cover
			log.debug('Failed to stop process %r: ', self, exc_info=True)
			raise ProcessError("Error stopping process %r due to %s: %s"%(self, type(ex).__name__, ex))
コード例 #14
0
ファイル: console_clean.py プロジェクト: Stringybell/Password
    def clean(self):
        Project.findAndLoadProject(outdir=self.outsubdir)

        descriptors = createDescriptors(self.arguments,
                                        None, [], [],
                                        None,
                                        self.workingDir,
                                        expandmodes=False)
        supportMultipleModesPerRun = Project.getInstance().getProperty(
            'supportMultipleModesPerRun', True)

        for descriptor in descriptors:
            if self.all:
                modulepath = os.path.join(descriptor.testDir,
                                          descriptor.module)
                cache = os.path.join(os.path.dirname(modulepath),
                                     "__pycache__")
                if os.path.isdir(cache):
                    log.info("Deleting pycache: " + cache)
                    deletedir(cache)
                else:
                    log.debug('__pycache__ does not exist: %s', cache)
                path = modulepath + ".pyc"
                if os.path.exists(path):
                    log.info("Deleting compiled Python module: " + path)
                    os.remove(path)
                else:
                    log.debug('.pyc does not exist: %s', path)

            for mode in (descriptor.modes or [None]):
                pathToDelete = os.path.join(descriptor.testDir,
                                            descriptor.output, self.outsubdir)

                if os.path.isabs(self.outsubdir
                                 ):  # must delete only the selected testcase
                    pathToDelete += "/" + descriptor.id

                if supportMultipleModesPerRun and mode:
                    pathToDelete += '~' + mode

                if os.path.exists(pathToDelete):
                    log.info("Deleting output directory: " + pathToDelete)
                    deletedir(pathToDelete)
                else:
                    log.debug("Output directory does not exist: " +
                              pathToDelete)
コード例 #15
0
ファイル: console_make.py プロジェクト: pysys-test/pysys-test
    def makeTest(self):
        """
		Uses the previously parsed arguments to create a new test (or related asset) on disk in ``self.dest``. 
		
		Can be overridden if additional post-processing steps are required for some templates. 
		"""
        templates = self.getTemplates()
        if self.template:
            tmp = [t for t in templates if t['name'] == self.template]
            if len(tmp) != 1:
                raise UserError(
                    'Cannot find a template named "%s"; available templates for this project and directory are: %s'
                    % (self.template, ', '.join(t['name'] for t in templates)))
            tmp = tmp[0]
        else:
            tmp = templates[0]  # pick the default

        log.debug('Using template: \n%s', json.dumps(tmp, indent='  '))
        dest = self.dest
        print("Creating %s using template %s ..." % (dest, tmp['name']))
        assert tmp['isTest']  # not implemented for other asset types yet

        if os.path.exists(dest):
            raise UserError('Cannot create %s as it already exists' % dest)

        mkdir(dest)

        if not tmp['replace']:
            # use defaults unless user explicitly defines one or more, to save user having to keep redefining the standard ones
            tmp['replace'] = [
                ['@@DATE@@', '@{DATE}'],
                ['@@USERNAME@@', '@{USERNAME}'],
                ['@@DIR_NAME@@', '@{DIR_NAME}'],
                ['@@DEFAULT_DESCRIPTOR@@', '@{DEFAULT_DESCRIPTOR}'],
                [
                    '@@DEFAULT_DESCRIPTOR_MINIMAL@@',
                    '@{DEFAULT_DESCRIPTOR_MINIMAL}'
                ],
                ['@@LINE_LENGTH_GUIDE@@', '@{LINE_LENGTH_GUIDE}'],
            ]

        with open(
                self.project.pysysTemplatesDir + '/default-test/pysystest.py',
                'rb') as f:
            DEFAULT_DESCRIPTOR = f.read()
            DEFAULT_DESCRIPTOR = DEFAULT_DESCRIPTOR[:DEFAULT_DESCRIPTOR.find(
                b'import')].rstrip().decode('ascii')
            DEFAULT_DESCRIPTOR = DEFAULT_DESCRIPTOR.replace(
                '@@DATE@@', '@{DATE}')
            DEFAULT_DESCRIPTOR = DEFAULT_DESCRIPTOR.replace(
                '@@USERNAME@@', '@{USERNAME}')
            DEFAULT_DESCRIPTOR = DEFAULT_DESCRIPTOR.replace(
                '@@LINE_LENGTH_GUIDE@@', '@{LINE_LENGTH_GUIDE}')

        DEFAULT_DESCRIPTOR_MINIMAL = '\n'.join([
            l for l in DEFAULT_DESCRIPTOR.split('\n')
            if ((l.startswith('#__pysys_skipped_reason__')
                 or not l.startswith('#__pysys_')))
        ])

        replace = [
            (
                re.compile(r1.encode('ascii')),
                r2  # in addition to ${...} project properties, add some that are especially useful here
                .replace('@{DEFAULT_DESCRIPTOR}',
                         DEFAULT_DESCRIPTOR.replace('\\', '\\\\')).replace(
                             '@{DEFAULT_DESCRIPTOR_MINIMAL}',
                             DEFAULT_DESCRIPTOR_MINIMAL.replace('\\', '\\\\')
                         ).replace('@{DATE}', self.project.startDate).replace(
                             '@{USERNAME}', self.project.username).replace(
                                 '@{DIR_NAME}',
                                 os.path.basename(dest)).replace(
                                     '@{LINE_LENGTH_GUIDE}',
                                     self.project.getProperty(
                                         "pysystestTemplateLineLengthGuide",
                                         80 * "=")).
                encode(
                    'utf-8'
                )  # non-ascii chars are unlikely, but a reasonable default is to use utf-8 to match typical XML
            ) for (r1, r2) in tmp['replace']
        ]

        log.debug('Using replacements: %s', replace)

        for c in tmp['copy']:
            target = dest + os.sep + os.path.basename(c)
            if os.path.basename(c) == tmp['testOutputDir']:
                log.debug("  Not copying dir %s" % target)
                continue
            if os.path.exists(target):
                raise Exception('Cannot copy to %s as it already exists' %
                                target)
            self.copy(c, target, replace)
            print("  Copied %s%s" %
                  (target, os.sep + '*' if os.path.isdir(target) else ''))

        for d in tmp['mkdir']:
            if os.path.isabs(d):
                log.debug('Skipping creation of absolute directory: %s', d)
            else:
                mkdir(dest + os.sep + d)

        return dest
コード例 #16
0
	def assertGrep(self, file, filedir=None, expr='', contains=True, ignores=None, literal=False, encoding=None, **xargs):
		"""Perform a validation assert on a regular expression occurring in a text file.
		
		When the C{contains} input argument is set to true, this method will add a C{PASSED} outcome 
		to the test outcome list if the supplied regular expression is seen in the file; otherwise a 
		C{FAILED} outcome is added. Should C{contains} be set to false, a C{PASSED} outcome will only 
		be added should the regular expression not be seen in the file.
		
		@param file: The basename of the file used in the grep
		@param filedir: The dirname of the file (defaults to the testcase output subdirectory)
		@param expr: The regular expression to check for in the file (or a string literal if literal=True). 
			If the match fails, the matching regex will be reported as the test outcome
		@param contains: Boolean flag to denote if the expression should or should not be seen in the file
		@param ignores: Optional list of regular expressions that will be 
			ignored when reading the file. 
		@param literal: By default expr is treated as a regex, but set this to True to pass in 
			a string literal instead
		@param encoding: The encoding to use to open the file. 
		The default value is None which indicates that the decision will be delegated 
		to the L{getDefaultFileEncoding()} method. 
		@param xargs: Variable argument list (see class description for supported parameters)
				
		"""
		if filedir is None: filedir = self.output
		f = os.path.join(filedir, file)

		if literal:
			def escapeRegex(expr):
				# use our own escaping as re.escape makes the string unreadable
				regex = expr
				expr = ''
				for c in regex:
					if c in '\\{}[]+?^$':
						expr += '\\'+c
					elif c in '().*/':
						expr += '['+c+']' # more readable
					else:
						expr += c
				return expr
			expr = escapeRegex(expr)

		log.debug("Performing grep on file:")
		log.debug("  file:       %s" % file)
		log.debug("  filedir:    %s" % filedir)
		log.debug("  expr:       %s" % expr)
		log.debug("  contains:   %s" % LOOKUP[contains])
		try:
			result = filegrep(f, expr, ignores=ignores, returnMatch=True, encoding=encoding or self.getDefaultFileEncoding(f))
		except Exception:
			log.warn("caught %s: %s", sys.exc_info()[0], sys.exc_info()[1], exc_info=1)
			msg = self.__assertMsg(xargs, 'Grep on %s %s %s'%(file, 'contains' if contains else 'does not contain', quotestring(expr) ))
			self.addOutcome(BLOCKED, '%s failed due to %s: %s'%(msg, sys.exc_info()[0], sys.exc_info()[1]), abortOnError=self.__abortOnError(xargs))
		else:
			# short message if it succeeded, more verbose one if it failed to help you understand why, 
			# including the expression it found that should not have been there
			outcome = PASSED if (result!=None) == contains else FAILED
			if outcome == PASSED: 
				msg = self.__assertMsg(xargs, 'Grep on input file %s' % file)
			else:
				msg = self.__assertMsg(xargs, 'Grep on %s %s %s'%(file, 'contains' if contains else 'does not contain', 
					quotestring(result.group(0) if result else expr) ))
			self.addOutcome(outcome, msg, abortOnError=self.__abortOnError(xargs))
コード例 #17
0
    def parseArgs(self, args, printXOptions=None):
        # add any default args first; shlex.split does a great job of providing consistent parsing from str->list,
        # but need to avoid mangling \'s on windows; since this env var will be different for each OS no need for consistent win+unix behaviour
        if os.getenv('PYSYS_DEFAULT_ARGS', ''):
            log.info('Using PYSYS_DEFAULT_ARGS = %s' %
                     os.environ['PYSYS_DEFAULT_ARGS'])
            args = shlex.split(os.environ['PYSYS_DEFAULT_ARGS'].replace(
                os.sep, os.sep * 2 if os.sep == '\\' else os.sep)) + args

        printLogsDefault = PrintLogs.ALL
        if '--ci' in args:
            # to ensure identical behaviour, set these as if on the command line
            # (printLogs we don't set here since we use the printLogsDefault mechanism to allow it to be overridden
            # by CI writers and/or the command line; note that setting --mode=ALL would be incorrect if
            # supportMultipleModesPerRun=false but that's a legacy options so we raise an exception later if this happened)
            args = [
                '--purge', '--record', '-j0', '--type=auto', '--mode=ALL',
                '-XcodeCoverage'
            ] + args
            printLogsDefault = PrintLogs.FAILURES

        try:
            optlist, self.arguments = getopt.gnu_getopt(
                args, self.optionString, self.optionList)
        except Exception:
            log.warn("Error parsing command line arguments: %s" %
                     (sys.exc_info()[1]))
            sys.exit(1)

        log.debug('PySys arguments: tests=%s options=%s', self.arguments,
                  optlist)

        EXPR1 = re.compile("^[\w\.]*=.*$")
        EXPR2 = re.compile("^[\w\.]*$")

        printLogs = None
        ci = False
        defaultAbortOnError = None

        logging.getLogger('pysys').setLevel(logging.INFO)

        # as a special case, set a non-DEBUG log level for the implementation of assertions
        # so that it doesn't get enabled with -vDEBUG only -vassertions=DEBUG
        # as it is incredibly verbose and slow and not often useful
        logging.getLogger('pysys.assertions').setLevel(logging.INFO)

        for option, value in optlist:
            if option in ("-h", "--help"):
                self.printUsage(printXOptions)

            elif option in ['--ci']:
                continue  # handled above

            elif option in ("-r", "--record"):
                self.record = True

            elif option in ("-p", "--purge"):
                self.purge = True

            elif option in ("-v", "--verbosity"):
                verbosity = value
                if '=' in verbosity:
                    loggername, verbosity = value.split('=')
                    assert not loggername.startswith(
                        'pysys.'
                    ), 'The "pysys." prefix is assumed and should not be explicitly specified'
                    if loggername.startswith('python:'):
                        loggername = loggername[len('python:'):]
                        assert not loggername.startswith(
                            'pysys'
                        ), 'Cannot use python: with pysys.*'  # would produce a duplicate log handler
                        # in the interests of performance and simplicity we normally only add the pysys.* category
                        logging.getLogger(loggername).addHandler(
                            pysys.internal.initlogging.pysysLogHandler)
                    else:
                        loggername = 'pysys.' + loggername
                else:
                    loggername = None

                if verbosity.upper() == "DEBUG":
                    verbosity = logging.DEBUG
                elif verbosity.upper() == "INFO":
                    verbosity = logging.INFO
                elif verbosity.upper() == "WARN":
                    verbosity = logging.WARN
                elif verbosity.upper() == "CRIT":
                    verbosity = logging.CRITICAL
                else:
                    log.warn('Invalid log level "%s"' % verbosity)
                    sys.exit(1)

                if loggername is None:
                    # when setting global log level to a higher level like WARN etc we want to affect stdout but
                    # not necessarily downgrade the root level (would make run.log less useful and break
                    # some PrintLogs behaviour)
                    stdoutHandler.setLevel(verbosity)
                    if verbosity == logging.DEBUG:
                        logging.getLogger('pysys').setLevel(logging.DEBUG)
                else:
                    # for specific level setting we need the opposite - only change stdoutHandler if we're
                    # turning up the logging (since otherwise it wouldn't be seen) but also change the specified level
                    logging.getLogger(loggername).setLevel(verbosity)

            elif option in ("-a", "--type"):
                self.type = value
                if self.type not in ["auto", "manual"]:
                    log.warn(
                        "Unsupported test type - valid types are auto and manual"
                    )
                    sys.exit(1)

            elif option in ("-t", "--trace"):
                self.trace = value

            elif option in ("-i", "--include"):
                self.includes.append(value)

            elif option in ("-e", "--exclude"):
                self.excludes.append(value)

            elif option in ("-c", "--cycle"):
                try:
                    self.cycle = int(value)
                except Exception:
                    print(
                        "Error parsing command line arguments: A valid integer for the number of cycles must be supplied"
                    )
                    sys.exit(1)

            elif option in ("-o", "--outdir"):
                value = os.path.normpath(value)
                if os.path.isabs(value) and not value.startswith('\\\\?\\'):
                    value = fromLongPathSafe(toLongPathSafe(value))
                self.outsubdir = value

            elif option in ("-m", "--mode", "--modeinclude"):
                self.modeinclude = self.modeinclude + [
                    x.strip() for x in value.split(',')
                ]

            elif option in ["--modeexclude"]:
                self.modeexclude = self.modeexclude + [
                    x.strip() for x in value.split(',')
                ]

            elif option in ["-n", "-j", "--threads"]:
                N_CPUS = multiprocessing.cpu_count()
                if value.lower() == 'auto': value = '0'
                if value.lower().startswith('x'):
                    self.threads = max(1, int(float(value[1:]) * N_CPUS))
                else:
                    self.threads = int(value)
                    if self.threads <= 0:
                        self.threads = int(
                            os.getenv('PYSYS_DEFAULT_THREADS', N_CPUS))

            elif option in ("-b", "--abort"):
                defaultAbortOnError = str(value.lower() == 'true')

            elif option in ["-g", "--progress"]:
                self.progress = True

            elif option in ["--printLogs"]:
                printLogs = getattr(PrintLogs, value.upper(), None)
                if printLogs is None:
                    print(
                        "Error parsing command line arguments: Unsupported --printLogs value '%s'"
                        % value)
                    sys.exit(1)

            elif option in ["-X"]:
                if '=' in value:
                    key, value = value.split('=', 1)
                else:
                    key, value = value, 'true'

                # best not to risk unintended consequences with matching of other types, but for boolean
                # it's worth it to resolve the inconsistent behaviour of -Xkey=true and -Xkey that existed until 1.6.0,
                # and because getting a bool where you expected a string is a bit more likely to give an exception
                # and be noticed that getting a string where you expected a boolean (e.g. the danger of if "false":)
                if value.lower() == 'true':
                    value = True
                elif value.lower() == 'false':
                    value = False

                self.userOptions[key] = value

            elif option in ("-y", "--validateOnly"):
                self.userOptions['validateOnly'] = True

            elif option in ("-G", "--grep"):
                self.grep = value

            else:
                print("Unknown option: %s" % option)
                sys.exit(1)

        # log this once we've got the log levels setup
        log.debug('PySys is installed at: %s; python from %s',
                  os.path.dirname(pysys.__file__), sys.executable)

        # retained for compatibility, but PYSYS_DEFAULT_ARGS is a better way to achieve the same thing
        if os.getenv('PYSYS_PROGRESS', '').lower() == 'true':
            self.progress = True

        # special hidden dict of extra values to pass to the runner, since we can't change
        # the public API now
        self.userOptions['__extraRunnerOptions'] = {
            'progressWritersEnabled': self.progress,
            'printLogs': printLogs,
            'printLogsDefault':
            printLogsDefault,  # to use if not provided by a CI writer or cmdline
        }

        # load project AFTER we've parsed the arguments, which opens the possibility of using cmd line config in
        # project properties if needed
        Project.findAndLoadProject(outdir=self.outsubdir)

        if defaultAbortOnError is not None:
            setattr(Project.getInstance(), 'defaultAbortOnError',
                    defaultAbortOnError)
        if '--ci' in args and not Project.getInstance().getProperty(
                'supportMultipleModesPerRun', True):
            raise UserError(
                'Cannot use --ci option with a legacy supportMultipleModesPerRun=false project'
            )

        descriptors = createDescriptors(self.arguments,
                                        self.type,
                                        self.includes,
                                        self.excludes,
                                        self.trace,
                                        self.workingDir,
                                        modeincludes=self.modeinclude,
                                        modeexcludes=self.modeexclude,
                                        expandmodes=True)
        descriptors.sort(
            key=lambda d: [d.executionOrderHint, d._defaultSortKey])

        # No exception handler above, as any createDescriptors failure is really a fatal problem that should cause us to
        # terminate with a non-zero exit code; we don't want to run no tests without realizing it and return success

        if self.grep:
            regex = re.compile(self.grep, flags=re.IGNORECASE)
            descriptors = [
                d for d in descriptors
                if (regex.search(d.id) or regex.search(d.title))
            ]

        runnermode = self.modeinclude[0] if len(
            self.modeinclude
        ) == 1 else None  # used when supportMultipleModesPerRun=False
        return self.record, self.purge, self.cycle, runnermode, self.threads, self.outsubdir, descriptors, self.userOptions
コード例 #18
0
def filediff(file1,
             file2,
             ignore=[],
             sort=True,
             replacementList=[],
             include=[],
             unifiedDiffOutput=None,
             encoding=None):
    """Perform a file comparison between two (preprocessed) input files, returning true if the files are equivalent.
	
	The method reads in the files and loads the contents of each as a list of strings. The two files are 
	said to be equal if the two lists are equal. The method allows for preprocessing of the string lists 
	to trim down their contents prior to the comparison being performed. Preprocessing is either to remove 
	entries from the lists which match any entry in a set of regular expressions, include only lines which 
	match any entry in a set of regular expressions, replace certain keywords in the string values of each list
	with a set value (e.g. to replace time stamps etc), or to sort the lists before the comparison (e.g. where 
	determinism may not exist). Verbose logging of the method occurs at DEBUG level showing the contents of the 
	processed lists prior to the comparison being performed.  
	
	@param file1: The full path to the first file to use in the comparison
	@param file2: The full path to the second file to use in the comparison, typically a reference file
	@param ignore: A list of regular expressions which remove entries in the input file contents before making the comparison
	@param sort: Boolean to sort the input file contents before making the comparison
	@param replacementList: A list of tuples (key, value) where matches to key are replaced with value in the input file contents before making the comparison
	@param include: A list of regular expressions used to select lines from the input file contents to use in the comparison 
	@param unifiedDiffOutput: If specified, indicates the full path of a file to which unified diff output will be written, 
		if the diff fails. 
	@param encoding: Specifies the encoding to be used for opening the file, or None for default. 
	
	@return: success (True / False)
	@rtype: boolean
	@raises FileNotFoundException: Raised if either of the files do not exist

	"""
    for file in file1, file2:
        if not os.path.exists(file):
            raise FileNotFoundException("unable to find file %s" %
                                        (os.path.basename(file)))
    else:
        list1 = []
        list2 = []

        with openfile(file1, 'r', encoding=encoding) as f:
            for i in f:
                list1.append(i.strip())

        with openfile(file2, 'r', encoding=encoding) as f:
            for i in f:
                list2.append(i.strip())

        list1 = trimContents(list1, ignore, exclude=True)
        list2 = trimContents(list2, ignore, exclude=True)
        list1 = trimContents(list1, include, exclude=False)
        list2 = trimContents(list2, include, exclude=False)
        list1 = replace(list1, replacementList)
        list2 = replace(list2, replacementList)
        if sort:
            list1.sort()
            list2.sort()

        logContents(
            "Contents of %s after pre-processing;" % os.path.basename(file1),
            list1)
        logContents(
            "Contents of %s after pre-processing;" % os.path.basename(file2),
            list2)
        if not list1 and not list2:
            # maybe this should be an exception... it's probably not what was intended
            log.warn(
                'File comparison pre-processing has filtered out all lines from the files to be diffed, please check if this is intended: %s, %s',
                os.path.basename(file1), os.path.basename(file2))

        if list1 != list2:
            log.debug("Unified diff between pre-processed input files;")
            l1 = []
            l2 = []
            for i in list1:
                l1.append("%s\n" % i)
            for i in list2:
                l2.append("%s\n" % i)

            # nb: have to switch 1 and 2 around to get the right diff for a typical output,ref file pair
            diff = ''.join(
                difflib.unified_diff(
                    l2,
                    l1,
                    fromfile='%s (%d lines)' %
                    (os.path.basename(file2), len(l2)),
                    tofile='%s (%d lines)' %
                    (os.path.basename(file1), len(l1)),
                ))
            if unifiedDiffOutput:
                with openfile(unifiedDiffOutput, 'w', encoding=encoding) as f:
                    f.write(diff)
            for line in diff.split('\n'):
                log.debug("  %s", line)

        if list1 == list2: return True
        return False
コード例 #19
0
    def __init__(self,
                 command,
                 arguments,
                 environs,
                 workingDir,
                 state,
                 timeout,
                 stdout=None,
                 stderr=None,
                 displayName=None):
        """Create an instance of the process wrapper.
		
		@param command:  The full path to the command to execute
		@param arguments:  A list of arguments to the command
		@param environs:  A dictionary of environment variables (key, value) for the process context execution
		@param workingDir:  The working directory for the process
		@param state:  The state of the process (L{pysys.constants.FOREGROUND} or L{pysys.constants.BACKGROUND}
		@param timeout:  The timeout in seconds to be applied to the process
		@param stdout:  The full path to the filename to write the stdout of the process
		@param stderr:  The full path to the filename to write the sdterr of the process
		@param displayName: Display name for this process

		"""
        self.displayName = displayName if displayName else os.path.basename(
            command)
        self.command = command
        self.arguments = arguments
        self.environs = {}
        for key in environs:
            self.environs[_stringToUnicode(key)] = _stringToUnicode(
                environs[key])
        self.workingDir = workingDir
        self.state = state
        self.timeout = timeout

        # 'publicly' available data attributes set on execution
        self.pid = None
        self.exitStatus = None

        # print process debug information
        log.debug("Process parameters for executable %s" %
                  os.path.basename(self.command))
        log.debug("  command      : %s", self.command)
        for a in self.arguments:
            log.debug("  argument     : %s", a)
        log.debug("  working dir  : %s", self.workingDir)
        log.debug("  stdout       : %s", stdout)
        log.debug("  stderr       : %s", stderr)
        keys = list(self.environs.keys())
        keys.sort()
        for e in keys:
            log.debug("  environment  : %s=%s", e, self.environs[e])

        # private
        self._outQueue = None
コード例 #20
0
ファイル: console_make.py プロジェクト: pysys-test/pysys-test
    def getTemplates(self):
        project = self.project
        projectroot = os.path.normpath(os.path.dirname(project.projectFile))
        dir = self.parentDir

        DIR_CONFIG_DESCRIPTOR = 'pysysdirconfig.xml'
        if not project.projectFile or not dir.startswith(projectroot):
            log.debug(
                'Project file does not exist under "%s" so processing of %s files is disabled',
                dir, DIR_CONFIG_DESCRIPTOR)
            return None

        from pysys.config.descriptor import _XMLDescriptorParser  # uses a non-public API, so please don't copy this into your own test maker

        # load any descriptors between the project dir up to (AND including) the dir we'll be walking
        searchdirsuffix = dir[len(projectroot) + 1:].split(
            os.sep) if len(dir) > len(projectroot) else []

        DEFAULT_DESCRIPTOR = _XMLDescriptorParser.DEFAULT_DESCRIPTOR

        def expandAndValidateTemplate(t, defaults):
            source = t.get('source', '<unknown source>')
            if defaults is None: defaults = DEFAULT_DESCRIPTOR

            if t['name'].lower().replace('_', '').replace(' ',
                                                          '') != t['name']:
                raise UserError(  # enforce this to make them easy to type on cmd line, and consistent
                    "Invalid template name \"%s\" - must be lowercase and use hyphens not underscores/spaces for separating words, in \"%s\""
                    % (t['name'], source))

            source = t.get('source', None)
            if t['mkdir'] is None:
                t['mkdir'] = [defaults.output, defaults.reference]
                if defaults.input not in [
                        '!Input_dir_if_present_else_testDir!',
                        '!INPUT_DIR_IF_PRESENT_ELSE_TEST_DIR!'
                ]:
                    t['mkdir'].append(defaults.input)

            t['testOutputDir'] = defaults.output

            t['copy'] = [
                os.path.normpath(
                    os.path.join(
                        os.path.dirname(source) if source else '',
                        project.expandProperties(x).strip()))
                for x in t['copy']
            ]
            copy = []
            for c in t['copy']:
                globbed = glob.glob(c)
                if not globbed:
                    raise UserError(
                        'Cannot find any file or directory "%s" in maker template "%s" of "%s"'
                        % (c, t['name'], source))
                copy.extend(globbed)
            t['copy'] = copy

            t['replace'] = [(r1, project.expandProperties(r2))
                            for (r1, r2) in t['replace']]
            for r1, r2 in t['replace']:
                try:
                    re.compile(r1)
                except Exception as ex:
                    raise UserError(
                        'Invalid replacement regular expression "%s" in maker template "%s" of "%s": %s'
                        % (r1, t['name'], source, ex))

            return t

        # start with the built-ins and project
        templates = [
            expandAndValidateTemplate(t, project._defaultDirConfig)
            for t in self.__PYSYS_DEFAULT_TEMPLATES
        ]
        if project._defaultDirConfig:
            templates = [
                expandAndValidateTemplate(t, project._defaultDirConfig)
                for t in project._defaultDirConfig._makeTestTemplates
            ] + templates

        parentDirDefaults = None
        for i in range(len(searchdirsuffix) + 1):  # up to AND including dir
            if i == 0:
                currentdir = projectroot
            else:
                currentdir = projectroot + os.sep + os.sep.join(
                    searchdirsuffix[:i])

            if pathexists(currentdir + os.sep + DIR_CONFIG_DESCRIPTOR):
                parentDirDefaults = _XMLDescriptorParser.parse(
                    currentdir + os.sep + DIR_CONFIG_DESCRIPTOR,
                    parentDirDefaults=parentDirDefaults,
                    istest=False,
                    project=project)
                newtemplates = [
                    expandAndValidateTemplate(t, parentDirDefaults)
                    for t in parentDirDefaults._makeTestTemplates
                ]
                log.debug(
                    'Loaded directory configuration descriptor from %s: \n%s',
                    currentdir, parentDirDefaults)

                # Add in existing templates from higher levels, but de-dup'd, giving priority to the latest defined template, and also putting the latest ones at the top of the list
                # for increased prominence
                for deftmpl in templates:
                    if not any(tmpl['name'] == deftmpl['name']
                               for tmpl in newtemplates):
                        newtemplates.append(deftmpl)
                templates = newtemplates

        log.debug('Loaded templates: \n%s', json.dumps(templates, indent='  '))
        return templates