Example #1
0
def find_files(paths):
    result = []
    basePath = Path('nuxeo-tools-hooks/nxtools/hooks')
    for path in [basePath.glob(path) for path in paths]:
        result += path

    return [str(path.relative_to(basePath)) for path in result if not path.relative_to(basePath).match('tests/**/*')]
Example #2
0
def load_legacy(filename):
    m = Path(filename)
    name = m.stem
    d = {}
    c = count()
    r = True

    def num(s):
        try:
            return int(s)
        except ValueError:
            return float(s)

    with m.open() as f:
        while r:
            c.next()
            r = re.search("([^\d\W]+)\s+(-*\d+\.*\d*)", f.readline())
            if r:
                d[r.groups()[0]] = num(r.groups()[1])
    l = c.next() - 1
    data = np.loadtxt(str(m.resolve()), skiprows=l)
    dataset = NpDataset(data, resolution=d["cellsize"])
    if "UTMzone" in d:
        gp = GeoPoint(UTM("UTMzone"), d["xllcorner"], d["yllcorner"])
    else:
        gp = GeoPoint(UTM(1), d["xllcorner"], d["yllcorner"])
    return GridMesh(gp, dataset)
Example #3
0
    def __init__(self):
        self.defaults = {
            'configuration': None,
            'platforms': [],
        }
        self.xcode = None
        self.repo_overrides = dict()

        self.root_path = Path.cwd()  # type: Path

        self.library_directory = Path(os.path.expanduser('~/Library/io.schwa.Punic'))
        if not self.library_directory.exists():
            self.library_directory.mkdir(parents=True)
        self.repo_cache_directory = self.library_directory / 'repo_cache'
        if not self.repo_cache_directory.exists():
            self.repo_cache_directory.mkdir(parents=True)
        self.punic_path = self.root_path / 'Carthage'
        self.build_path = self.punic_path / 'Build'
        self.checkouts_path = self.punic_path / 'Checkouts'

        self.derived_data_path = self.library_directory / "DerivedData"

        runner.cache_path = self.library_directory / "cache.shelf"

        self.can_fetch = False
        self.xcode = Xcode.default()

        # Read in defaults from punic.yaml
        self.read(Path('punic.yaml'))
class TermiusApp(App):
    """Class for CLI application."""

    def __init__(self):
        """Construct new CLI application."""
        super(TermiusApp, self).__init__(
            description='Termius app',
            version=__version__,
            command_manager=CommandManager('termius.handlers'),
        )
        self.configure_signals()
        self.directory_path = Path(expanduser('~/.{}/'.format(self.NAME)))
        if not self.directory_path.is_dir():
            self.directory_path.mkdir(parents=True)

    def configure_logging(self):
        """Change logging level for request package."""
        super(TermiusApp, self).configure_logging()
        logging.getLogger('requests').setLevel(logging.WARNING)
        return

    # pylint: disable=no-self-use
    def configure_signals(self):
        """Bind subscribers to signals."""
        post_create_instance.connect(store_ssh_key, sender=SshKey)
        post_update_instance.connect(store_ssh_key, sender=SshKey)
        post_delete_instance.connect(delete_ssh_key, sender=SshKey)

        post_logout.connect(clean_data)
Example #5
0
def archive_resource(resource, destination):
    """
    Write an archive of a resource
    """
    archive_path = Path(destination, resource.basename)
    if resource.is_repo:
        temp_dir = tempfile.mkdtemp(prefix='clone-')
        try:
            repo = clone(resource.url, temp_dir, resource.commitish)
            logging.debug("Archiving %s@%s to %s", resource.url,
                          resource.commitish, archive_path)
            with archive_path.open("wb") as output:
                repo.archive(output, treeish=str(resource.commitish),
                             prefix=resource.prefix)
        finally:
            shutil.rmtree(temp_dir, ignore_errors=True)
    else:
        url = urlparse(resource.url)
        if url.scheme in SUPPORTED_URL_SCHEMES:
            logging.debug("Fetching %s to %s", resource.url, archive_path)
            fetch_url(url, str(archive_path), 5)
        elif url.scheme in ['', 'file'] and url.netloc == '':
            logging.debug("Copying %s to %s", url.path, archive_path)
            shutil.copyfile(url.path, str(archive_path))
    # else: UnsupportedScheme

    return archive_path
Example #6
0
def apply_patchqueue(base_repo, pq_repo, prefix):
    """
    Link and then apply a patchqueue repository to a source repository
    """
    status_path = Path(pq_repo.working_dir, prefix, 'status')
    patches_link = Path(base_repo.git_dir, 'patches',
                        base_repo.active_branch.name)

    # make the directory tree for the patches within the base repo
    # pylint: disable=no-member
    patches_link.parent.mkdir(parents=True)

    # link the patchqueue directory for the base repo branch
    rel_path = relpath(str(status_path.parent), str(patches_link.parent))
    patches_link.symlink_to(rel_path)

    # create an empty status file
    with status_path.open('w'):
        pass

    patches = subprocess.check_output(['guilt', 'series'],
                                      cwd=base_repo.working_dir)
    if patches:
        subprocess.check_call(['guilt', 'push', '--all'],
                              cwd=base_repo.working_dir)
def new_page():
    from string import Template     # Use Python templates, not Mako templates

    slug = raw_input('Slug for page: ')
    title = raw_input('Title of page: ')
    template = raw_input('Template to inherit from (default is example.html): ')

    new_dir = Path('site') / slug
    if new_dir.exists():
        print '\nDirectory %s already exists, aborting' % new_dir
        return
    new_dir.mkdir()

    html_file = new_dir / 'index.html'
    with html_file.open('w') as fp:
        fp.write(Template(NEW_PAGE_HTML_TEMPLATE).substitute(
            title=repr(title.strip()), template=template.strip() or 'example.html'))

    js_file = new_dir / 'app.es6'
    with js_file.open('w') as fp:
        class_name = ''.join(s.capitalize() for s in title.split(' '))
        fp.write(Template(NEW_PAGE_JS_TEMPLATE).substitute(
            title=title, class_name=class_name))

    marker = '// This comment marks where new entry points will be added'
    new_entry = "'%s': './site/%s/app.es6'," % (slug, slug)
    code = open('webpack.config.js').read()
    with open('webpack.config.js', 'w') as fp:
        fp.write(code.replace(marker, new_entry + '\n    ' + marker))
Example #8
0
def create_repo_from_spec(spec_path, top_path, repo_path):
    """
    Invoke the prep phase of rpmbuild to generate a source directory then
    create a git repo from it
    """
    top_dir = top_path.resolve()
    cmd = ['rpmbuild', '-bp', '--nodeps',
           '--define', '_topdir '+str(top_dir), str(spec_path)]
    logging.debug("Running %s", ' '.join(cmd))
    subprocess.check_call(cmd)

    # move the created build directory under the repo directory
    build_path = list(Path(top_path, 'BUILD').glob('*'))[0]
    rename(str(build_path), str(repo_path))

    git_dir = Path(repo_path, '.git')
    if git_dir.exists():
        # setup already created a git repo
        repo = git.Repo(str(repo_path))
    else:
        repo = git.Repo.init(str(repo_path))
        index = repo.index
        index.add(repo.untracked_files)
        index.commit("Repo generated by planex-clone")

    return repo
Example #9
0
def dump_thermals_to_cup_file(flight, cup_filename_local):
    """Dump flight's thermals to a .cup file (SeeYou).

    Args:
        flight: an igc_lib.Flight, the flight to be written
        cup_filename_local: a string, the name of the file to be written.
    """
    cup_filename = Path(cup_filename_local).expanduser().absolute()
    with cup_filename.open('wt') as wpt:
        wpt.write(u'name,code,country,lat,')
        wpt.write(u'lon,elev,style,rwdir,rwlen,freq,desc,userdata,pics\n')

        def write_fix(name, fix):
            lat = _degrees_float_to_degrees_minutes_seconds(fix.lat, 'lat')
            lon = _degrees_float_to_degrees_minutes_seconds(fix.lon, 'lon')
            wpt.write(u'"%s",,,%02d%02d.%03d%s,' % (
                name, lat.degrees, lat.minutes,
                int(round(lat.seconds/60.0*1000.0)), lat.hemisphere))
            wpt.write(u'%03d%02d.%03d%s,%fm,,,,,,,' % (
                lon.degrees, lon.minutes,
                int(round(lon.seconds/60.0*1000.0)), lon.hemisphere,
                fix.gnss_alt))
            wpt.write(u'\n')

        for i, thermal in enumerate(flight.thermals):
            write_fix(u'%02d' % i, thermal.enter_fix)
            write_fix(u'%02d_END' % i, thermal.exit_fix)
Example #10
0
def dump_flight_to_kml(flight, kml_filename_local):
    """Dumps the flight to KML format.

    Args:
        flight: an igc_lib.Flight, the flight to be saved
        kml_filename_local: a string, the name of the output file
    """
    assert flight.valid
    kml = simplekml.Kml()

    def add_point(name, fix):
        kml.newpoint(name=name, coords=[(fix.lon, fix.lat)])

    coords = []
    for fix in flight.fixes:
        coords.append((fix.lon, fix.lat))
    kml.newlinestring(coords=coords)

    add_point(name="Takeoff", fix=flight.takeoff_fix)
    add_point(name="Landing", fix=flight.landing_fix)

    for i, thermal in enumerate(flight.thermals):
        add_point(name="thermal_%02d" % i, fix=thermal.enter_fix)
        add_point(name="thermal_%02d_END" % i, fix=thermal.exit_fix)
        kml_filename = Path(kml_filename_local).expanduser().absolute()
    kml.save(kml_filename.as_posix())
Example #11
0
    def process(self, path):
        path = Path(path)

        if path.is_dir():
            self.process_files_in(path)
        else:
            self.process_one_file(path)
Example #12
0
def main(src, dest):
    """links configfiles from one folder to another

    if links exists it verifies content
    if files exist at the target side it errors

    Args:
        src: source folder
        dest: target folder
    """
    src = Path(src)
    if not src.exists():
        print("WARNING:", src, "does not exist, skipping linking")
        return

    dest = Path(dest)

    for element in filter(_is_yaml_file, src.iterdir()):
        _warn_on_unknown_encryption(element)
        target = dest.joinpath(element.name)
        # the following is fragile
        if target.is_symlink():
            _warn_on_missmatching_symlink(src=element, target=target)
        elif target.is_file():
            _warn_on_existing_file(target)
        else:
            target.symlink_to(element.resolve())
Example #13
0
def gmx_mpi_linked(link):
    gmx_exe = distutils.spawn.find_executable('gmx')
    gmx_mpi = Path('~/gmx_mpi').expanduser()
    if not link:
        return ''
    else:
        gmx_mpi.symlink_to(gmx_exe)
        return str(gmx_mpi.expanduser())
Example #14
0
def vim_plug():
    vim_plug_path = Path(VIM_DIR).expand_user().join('autoload').make_dirs()
    vim_plug_path = vim_plug_path.join('plug.vim')
    LOG.info('downloading vim-plug')
    r = requests.get(VIM_PLUG_URL)
    with open(vim_plug_path.path, 'w') as f:
        f.write(r.content)
    LOG.info('done')
Example #15
0
def get_file(path):
    result = Path('web') / path
    if result.is_file():
        return str(result)
    if result.is_dir() and (result / 'index.html').is_file():
        return str(result / 'index.html')
    # File was not found.
    return None
Example #16
0
def pytest_unconfigure(config):
    if config_existed:
        config_backup.rename(str(path_config))
    else:
        os.remove(str(path_config))
    if config.option.link_gmx_mpi:
        gmx_mpi = Path('~/gmx_mpi').expanduser()
        gmx_mpi.unlink()
 def generate_ssh_key_instance(self, path, storage):
     """Generate ssh key from file."""
     private_key_path = Path(path)
     instance = SshKey(
         private_key=private_key_path.read_text(),
         label=private_key_path.name
     )
     self.validate_ssh_key(instance, storage)
     return instance
def datadir(original_datadir, tmpdir):
    # Method from: https://github.com/gabrielcnr/pytest-datadir
    # License: MIT
    import shutil
    result = Path(str(tmpdir.join(original_datadir.stem)))
    if original_datadir.is_dir():
        shutil.copytree(str(original_datadir), str(result))
    else:
        result.mkdir()
    return result
def _get_data(request, data_type):
    data_dir = Path(DATA_PATH)
    result = None
    for file in data_dir.iterdir():
        parts = file.stem.split('__')
        test_name = request.node.name.split('test_')[-1]
        test_name = test_name[:test_name.index('[')] if '[' in test_name else test_name
        if parts[0] == data_type:
            if parts[1] == test_name or (parts[1] == 'default' and result is None):
                result = yaml.safe_load(file.read_text())
    return result
Example #20
0
def find_spec(package):
    """
    From a package name locate the spec file
    """
    spec_search = Configuration.get('spec', 'search-path',
                                    default='SPECS').split(':')
    for subdir in spec_search:
        path = Path(subdir, package+'.spec')
        if path.exists():
            return path
    return None
Example #21
0
 def _local_url(path):
     """Copy a filepath into the static dir if required
     """
     path = Path(path).resolve()
     # if file is already below static in the hierarchy, don't do anything
     if static in path.parents:
         return path.relative_to(base)
     # otherwise copy the file into static
     static.mkdir(parents=True, exist_ok=True)
     local = static / path.name
     copyfile(str(path), str(local))  # only need str for py<3.6
     return str(local.relative_to(base))
Example #22
0
def find_link_pin(package):
    """
    From a package name locate the link or pin file
    """
    pin_search = Configuration.get('pin', 'search-path',
                                   default='SPECS').split(':')
    for suffix in ('.pin', '.lnk'):
        for subdir in pin_search:
            path = Path(subdir, package+suffix)
            if path.exists():
                return path
    return None
def check_text_files(obtained_fn, expected_fn, fix_callback=lambda x: x, encoding=None):
    """
    Compare two files contents. If the files differ, show the diff and write a nice HTML
    diff file into the data directory.
    :param Path obtained_fn: path to obtained file during current testing.
    :param Path expected_fn: path to the expected file, obtained from previous testing.
    :param str encoding: encoding used to open the files.
    :param callable fix_callback:
        A callback to "fix" the contents of the obtained (first) file.
        This callback receives a list of strings (lines) and must also return a list of lines,
        changed as needed.
        The resulting lines will be used to compare with the contents of expected_fn.
    """
    __tracebackhide__ = True

    obtained_fn = Path(obtained_fn)
    expected_fn = Path(expected_fn)
    obtained_lines = fix_callback(obtained_fn.read_text(encoding=encoding).splitlines())
    expected_lines = expected_fn.read_text(encoding=encoding).splitlines()

    if obtained_lines != expected_lines:
        diff_lines = list(difflib.unified_diff(expected_lines, obtained_lines))
        if len(diff_lines) <= 500:
            html_fn = obtained_fn.with_suffix(".diff.html")
            try:
                differ = difflib.HtmlDiff()
                html_diff = differ.make_file(
                    fromlines=expected_lines,
                    fromdesc=expected_fn,
                    tolines=obtained_lines,
                    todesc=obtained_fn,
                )
            except Exception as e:
                html_fn = "(failed to generate html diff: %s)" % e
            else:
                html_fn.write_text(html_diff, encoding="UTF-8")

            diff = ["FILES DIFFER:", str(expected_fn), str(obtained_fn)]
            diff += ["HTML DIFF: %s" % html_fn]
            diff += diff_lines
            raise AssertionError("\n".join(diff))
        else:
            # difflib has exponential scaling and for thousands of lines it starts to take minutes to render
            # the HTML diff.
            msg = [
                "Files are different, but diff is too big (%s lines)" % (len(diff_lines),),
                "- obtained: %s" % (obtained_fn,),
                "- expected: %s" % (expected_fn,),
            ]
            raise AssertionError("\n".join(msg))
Example #24
0
def cmd_run(path):
    """
    Runs an appliction.
    """
    os.chdir(path)
    package = Path("./package.json")
    if not package.is_file():
        raise Exception("Invalid package: no package.json file")

    package = json.load(package.open())

    if "engines" not in package or package["engines"] == {}:
        raise Exception("Invalid package: no engines specified")

    r = requests.get("%s/index.json" % Particle.REPO)
    r.raise_for_status()
    remote_particles = r.json()["particles"]

    variables = {}
    for name, range_ in package["engines"].items():
        p = Particle.get_local(name, range_)
        if not p:
            # if auto_fetch:
            if name in remote_particles:
                v = semver.max_satisfying(remote_particles[name], range_, False)
                if v:
                    print("Downloading %s %s..." % (name, v))
                    p = Particle.fetch(name, v)
                else:
                    print("Cannot satisfy %s (%s), aborting." % (name, range_))
                    sys.exit(1)
            else:
                print("No particle named %s exists, aborting." % name)
                sys.exit(1)
        variables["$" + name.upper().replace("-", "_")] = str(p.main)

    pattern = re.compile('|'.join(map(re.escape, variables.keys())))

    if "lepton" not in package:
        raise Exception("Invalid package: no lepton key in particle.json")
    elif "run" not in package["lepton"]:
        raise Exception("Invalid package: no lepton.run key in particle.json")

    args = package["lepton"]["run"]
    args = pattern.sub(lambda x: variables[x.group()], args)
    args = shlex.split(args)
    print("Resulting command line: %r" % args)
    print("Current dir: %s" % os.getcwd())
    os.execvp(args[0], args)
Example #25
0
 def check(self, path_pairs):
     result = RepoCheckResult()
     for repo_path, system_path in path_pairs:
         LOG.debug('checking "{}" <-> "{}"...'.format(repo_path, system_path))
         repo = Path(self.repo_path).joinpath(repo_path)
         system = Path(system_path)
         pair = RepoPathPair(repo, system)
         repo = repo.expanduser()
         system = system.expanduser()
         pair.update(repo, system)
         status = diffcheck(repo, system)
         LOG.debug('done, status: {}'.format(status))
         pair.status = status
         result.add_pair(pair)
     return result
Example #26
0
def data_path(relative_path, relative_to=None):
    """Returns data path to test file."""

    if relative_to is None:
        # Use BASE_DIR as default.
        relative_to = BASE_DIR
    elif not isinstance(relative_to, Path):
        # Ensure relative_to is a Path.
        relative_to = Path(relative_to)

    # If relative_to is not a path, move up one level.
    if not relative_to.is_dir():
        relative_to = relative_to.parent

    return relative_to / 'data' / relative_path
Example #27
0
def test_update_and_build():
    if quick_tests_only:
        return

    source = Path(__file__).parent / 'Examples'
    destination = Path(tempfile.mkdtemp()) / 'Examples'

    shutil.copytree(source, destination)

    project_paths = [path for path in destination.iterdir() if path.is_dir()]

    for project_path in project_paths:

        with work_directory(project_path):

            output = runner.check_run('punic update')
    def del_oldest_tile(self):
        """ Deletes the oldest tile from the cache. """
        arr = self._get_cache_arr()
        oldestAddr = None
        oldestTs = Inf
        for k,v in arr.iteritems():
            if v.get('ts',Inf) < oldestTs:
                oldestTs = v.get('tx',Inf)
                oldestAddr = k

        if oldestAddr is not None:
            p = Path(arr[oldestAddr].get('path',None))
            if p is None: raise IOError('Invalid Path!')
            p.unlink()
        else:
            raise IOError('No tiles to delete!')
Example #29
0
 def __init__(self, path, remote_url=None, branch_name='master'):
     self.path = Path(path)
     self.path_str = str(self.path)
     self.remote_url = remote_url
     self.branch_name = branch_name
     db_latest_key = '%s:%s:%s' % (self.path_str, remote_url or '',
                                      branch_name)
     self.db_latest_key = sha256(db_latest_key).hexdigest()
Example #30
0
File: git.py Project: pmac/bedrock
    def __init__(self, path, remote_url=None, remote_name=None, branch_name='master'):
        self.path = Path(path)
        self.path_str = str(self.path)
        self.remote_url = remote_url
        self.branch_name = branch_name
        if not remote_name:
            remote_name = 'bedrock-dev' if settings.DEV else 'bedrock-prod'

        self.remote_name = remote_name