コード例 #1
0
ファイル: linter.py プロジェクト: nils-wisiol/pypuf
def main(arguments):
    """
    This method starts the execution of the code analysis on every python file of a given path.
    :param arguments: [String, int]
                 This may include the path to the current path to analyse, the maximal number of characters
                 for each line of code or some patterns to skip certain paths.
    """
    parser = ArgumentParser(
        usage='This tool can be used to check the python code in a directory on style\nviolations.'
              'The scripts default path is the directory in which this script is\nlocated in.'
    )
    parser.add_argument(
        '-p', '--path', help='a path to the directory where the python code should be analyzed '
                             '(default path os ".")', default='.', type=str, dest='path',
    )
    parser.add_argument(
        '-l', '--max-line-length', help='maximal number of characters for each line (default is 120)',
        default='120', type=int, dest='max_line_length',
    )
    parser.add_argument(
        '-e', '--exclude_patterns', help='patterns to identify directories or files which should not be checked '
                                         '(default are env .env)',
        default=['.env', 'env', 'venv', '.venv'], type=str, dest='exclude_patterns', nargs='+',
    )
    args = parser.parse_args(arguments)

    # set maximal line length
    max_line_length = '--max-line-length={0}'.format(args.max_line_length)

    # set paths which should not be checked
    excludes = '--exclude={0}'.format(','.join(args.exclude_patterns))

    # run pycodestyle check
    pycodestyle_returncode = call([executable, '-m', 'pycodestyle', max_line_length, excludes, args.path])

    # exclude specific directories support to check subdirectories by default
    files_to_check = []
    for root, _, file_names in walk(args.path):
        # skip certain path which includes some certain patterns
        if reduce((lambda x, y: x or y), [pattern in root for pattern in args.exclude_patterns]):
            continue

        for filename in fn_filter(file_names, '*.py'):
            if not reduce((lambda x, y: x or y), [pattern in filename for pattern in args.exclude_patterns]):
                files_to_check.append(path.join(root, filename))

    # if the path is a single file
    if not files_to_check:
        files_to_check = [args.path]

    pylint_cmd = [executable, '-m', 'pylint', max_line_length, '--disable=R']
    pylint_cmd.extend(files_to_check)
    # run pylint check
    pylint_returncode = call(pylint_cmd)

    returncode = 0
    if pycodestyle_returncode != 0 or pylint_returncode != 0:
        returncode = 1

    exit(returncode)
コード例 #2
0
ファイル: Pygemony.py プロジェクト: onemoretime/pygemony
    def parse_by_extension(self, files):
        """
        Parses the list of the directory for files with an acceptable
        extension. The extension is determined by data returned from github on
        the languages used in the project.

        :param list files: The list of all files in the current repository
        :rtype: generator(str)
        :return: Generates a list of acceptable-to-parse files.
        """
        for lang in self.language:
            for ext in lang.file_exts:
                for file_ in fn_filter(files, ext):
                    yield file_
コード例 #3
0
def sum_transport_in_straits(runpath, monthly_average=False):

    strait = set_strait_info()
    nstraits = len(strait)
    # Calculate transport in each of the straits
    for sidx in range(0, nstraits):
        strait[sidx].transport = 0.
        if strait[sidx].is_zonal and strait[sidx].is_meridional:
            u_file = fn_filter(listdir(runpath),
                               '*' + strait[sidx].mom6_name + '_U.nc')
            v_file = fn_filter(listdir(runpath),
                               '*' + strait[sidx].mom6_name + '_V.nc')
            if (len(u_file) == 0 and len(v_file) == 0):
                print(("Warning: File not found for %s" %
                       strait[sidx].mom6_name))
                continue
            u_vargroup = Dataset(runpath + '/' + u_file[0]).variables
            v_vargroup = Dataset(runpath + '/' + v_file[0]).variables
        elif strait[sidx].is_zonal:
            v_file = fn_filter(listdir(runpath),
                               '*' + strait[sidx].mom6_name + '*.nc')
            if (len(v_file) == 0):
                print(("Warning: File not found for %s" %
                       strait[sidx].mom6_name))
                continue
            v_vargroup = Dataset(runpath + '/' + v_file[0]).variables
        elif strait[sidx].is_meridional:
            u_file = fn_filter(listdir(runpath),
                               '*' + strait[sidx].mom6_name + '*.nc')
            if (len(u_file) == 0):
                print(("Warning: File not found for %s" %
                       strait[sidx].mom6_name))
                continue
            u_vargroup = Dataset(runpath + '/' + u_file[0]).variables

        if strait[sidx].is_zonal:
            strait[sidx].time = v_vargroup['time'][:]
            # Need to find the first interface deeper than or equal to the requested z-limit. If deeper, then we'll need to
            # scale the next layer back and zero out the rest of the column
            vmo = v_vargroup['vmo'][:, :, :, :]
            if strait[sidx].zlim > 0.:
                z_i = v_vargroup['z_i'][:]
                zidx = np.sum(z_i <= strait[sidx].zlim) - 1
                if z_i[zidx] < strait[
                        sidx].zlim:  # Scale back transport in the next layer
                    # Fraction of the layer that should be included in the calculation
                    frac = (z_i[zidx + 1] - strait[sidx].zlim) / (
                        z_i[zidx + 1] - z_i[zidx]
                    )  # Fraction of the layer that should be
                    vmo[:, zidx + 1, :] = vmo[:, zidx + 1, :] * frac
                    vmo[:, zidx +
                        2:, :] = 0.  # All layers below do not contribute
            strait[sidx].transport += vmo.sum(axis=(1, 2, 3))
            Dataset(runpath + '/' + v_file[0]).close()

        if strait[sidx].is_meridional:
            strait[sidx].time = u_vargroup['time'][:]
            umo = u_vargroup['umo'][:, :, :, :]
            if strait[sidx].zlim > 0.:
                z_i = u_vargroup['z_i'][:]
                zidx = np.sum(z_i <= strait[sidx].zlim) - 1
                if z_i[zidx] < strait[sidx].zlim:
                    frac = (z_i[zidx + 1] -
                            strait[sidx].zlim) / (z_i[zidx + 1] - z_i[zidx])
                    umo[:, zidx + 1, :] = umo[:, zidx + 1, :] * frac
                    umo[:, zidx + 2:, :] = 0.
            strait[sidx].transport += umo.sum(axis=(1, 2, 3))
            Dataset(runpath + '/' + u_file[0]).close()

        if monthly_average:
            strait[sidx].transport = make_monthly_averages(
                strait[sidx].transport)
            strait[sidx].time = make_monthly_averages(strait[sidx].time)
        ntime = strait[sidx].time.size
        time = strait[sidx].time

    transport_array = np.zeros((ntime, nstraits))
    for sidx in range(0, nstraits):
        transport_array[:, sidx] = strait[sidx].transport
    return time, transport_array, strait
コード例 #4
0
import ntpath
import os

from fnmatch import filter as fn_filter

from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _

path1 = os.path.join(os.getcwd(), "executor/tests")
path2 = os.path.join(os.getcwd(), 'users/tests')

pattern = "test_*.py"

adjunct_path = [
    os.path.join(path2, f) for f in fn_filter(os.listdir(path2), pattern)
]
paths = [os.path.join(path1, f) for f in fn_filter(os.listdir(path1), pattern)]

paths.extend(adjunct_path)


class Executor(models.Model):
    """
    Executor holds all crucial data related to a test executor in the database
    Fields:
        tester: user initiating a test
        environment_id: a unique integer field each test is executed in
        file: a m2m field object, each containing tests written in python, each executor can have more than one file
        created_at: a DateTimeField describing the time and date each Executor (test) was instantiated
        test_log: corresponding logs for each test
コード例 #5
0
def get_file_names(file_dir=CSV_DIR, ext='*.csv'):
    # print(fn_filter(os.listdir(file_dir), ext))
    return fn_filter(os.listdir(file_dir), ext)
コード例 #6
0
ファイル: Pygemony.py プロジェクト: lucode/pygemony
 def parse_by_extension(self, files):
     for lang in self.language:
         for ext in lang.file_exts:
             for file_ in fn_filter(files, ext):
                 yield file_
コード例 #7
0
def sum_transport_in_straits(runpath, monthly_average = False):

  strait = set_strait_info()
  nstraits = len(strait)
  # Calculate transport in each of the straits
  for sidx in range(0,nstraits):
    strait[sidx].transport = 0.
    if strait[sidx].is_zonal and strait[sidx].is_meridional:
      u_file = fn_filter(listdir(runpath), '*' + strait[sidx].mom6_name + '_U.nc')
      v_file = fn_filter(listdir(runpath), '*' + strait[sidx].mom6_name + '_V.nc')
      if (len(u_file)==0 and len(v_file)==0):
        print("Warning: File not found for %s" % strait[sidx].mom6_name)
        continue
      u_vargroup = Dataset(runpath+'/'+u_file[0]).variables
      v_vargroup = Dataset(runpath+'/'+v_file[0]).variables
    elif strait[sidx].is_zonal:
      v_file = fn_filter(listdir(runpath), '*' + strait[sidx].mom6_name + '*.nc')
      if (len(v_file)==0):
        print("Warning: File not found for %s" % strait[sidx].mom6_name)
        continue
      v_vargroup = Dataset(runpath+'/'+v_file[0]).variables
    elif strait[sidx].is_meridional:
      u_file = fn_filter(listdir(runpath), '*' + strait[sidx].mom6_name + '*.nc')
      if (len(u_file)==0):
        print("Warning: File not found for %s" % strait[sidx].mom6_name)
        continue
      u_vargroup = Dataset(runpath+'/'+u_file[0]).variables

    # Need to find the first interface deeper than or equal to the requested z-limit. If deeper, then we'll need to scale the
    # bottommost part of the column
    if strait[sidx].is_zonal:
      strait[sidx].time = v_vargroup['time'][:]
      if strait[sidx].zlim > 0.:
        z_i = v_vargroup['z_i'][:]
        zidx = np.sum(z_i<strait[sidx].zlim)
        if z_i[zidx] > strait[sidx].zlim:
          frac = min(1., (z_i[zidx] - strait[sidx].zlim)/(z_i[zidx]-z_i[zidx-1]))
          vmo[:,-1,:] = vmo[:,-1,:]*frac
      else:
        vmo = v_vargroup['vmo'][:,:,:,:]
      strait[sidx].transport += vmo.sum(axis=(1,2,3))
      Dataset(runpath+'/'+v_file[0]).close()

    if strait[sidx].is_meridional:
      strait[sidx].time = u_vargroup['time'][:]
      if strait[sidx].zlim > 0.:
        z_i = u_vargroup['z_i'][:]
        zidx = np.sum(z_i<strait[sidx].zlim)
        if z_i[zidx] > strait[sidx].zlim:
          frac = min(1., (z_i[zidx] - strait[sidx].zlim)/(z_i[zidx]-z_i[zidx-1]))
          umo[:,-1,:] = umo[:,-1,:]*frac
      else:
        umo = u_vargroup['umo'][:,:,:,:]
      strait[sidx].transport += umo.sum(axis=(1,2,3))
      Dataset(runpath+'/'+u_file[0]).close()
    if monthly_average:
      strait[sidx].transport = make_monthly_averages(strait[sidx].transport)
      strait[sidx].time = make_monthly_averages(strait[sidx].time)
    ntime = strait[sidx].time.size
    time = strait[sidx].time

  transport_array = np.zeros((ntime, nstraits))
  for sidx in range(0,nstraits):
    transport_array[:,sidx] = strait[sidx].transport
  return time, transport_array, strait
コード例 #8
0
ファイル: page_navigator.py プロジェクト: welchbj/almanac
 def match(self, pattern: str) -> Iterable[AbstractPage]:
     """Match stored pages against ``fnmatch`` patterns."""
     paths: List[str] = [page.path for page in self._page_table.keys()]
     for match in sorted(fn_filter(paths, pattern)):
         yield self[match]