Example #1
0
def make_symlink(path, real_path):
    real_path = Path(real_path)
    real_path.parent.mkdir(parents=True, exist_ok=True)
    path = Path(path)
    path.parent.mkdir(parents=True, exist_ok=True)
    if path.exists():
        os.rename(str(path), str(path.with_suffix('.bak')))
    os.symlink(str(real_path), str(path))
Example #2
0
 def find_spec(self, name, path, target=None):
     path = Path('./' + name.replace('.', r'/'))
     if path.with_suffix('.c').exists():
         return ModuleSpec(name,
                           CintegrateLoader(path, type='c'),
                           is_package=True)
     if path.with_suffix('.cpp').exists():
         return ModuleSpec(name,
                           CintegrateLoader(path, type='c++'),
                           is_package=True)
     if path.with_suffix('.so').exists():
         return ModuleSpec(name,
                           CintegrateLoader(path, type='so'),
                           is_package=True)
     if path.is_dir():
         return ModuleSpec(name, CintegrateLoader(path), is_package=True)
     return None
Example #3
0
def check_text_files(obtained_fn, expected_fn, fix_callback=lambda x: x, encoding=None):
    """
    Compare two files contents. If the files differ, show the diff and write a nice HTML
    diff file into the data directory.

    :param Path obtained_fn: path to obtained file during current testing.

    :param Path expected_fn: path to the expected file, obtained from previous testing.

    :param str encoding: encoding used to open the files.

    :param callable fix_callback:
        A callback to "fix" the contents of the obtained (first) file.
        This callback receives a list of strings (lines) and must also return a list of lines,
        changed as needed.
        The resulting lines will be used to compare with the contents of expected_fn.
    """
    __tracebackhide__ = True

    obtained_fn = Path(obtained_fn)
    expected_fn = Path(expected_fn)
    obtained_lines = fix_callback(obtained_fn.read_text(encoding=encoding).splitlines())
    expected_lines = expected_fn.read_text(encoding=encoding).splitlines()

    if obtained_lines != expected_lines:
        diff_lines = list(
            difflib.unified_diff(expected_lines, obtained_lines, lineterm="")
        )
        if len(diff_lines) <= 500:
            html_fn = obtained_fn.with_suffix(".diff.html")
            try:
                differ = difflib.HtmlDiff()
                html_diff = differ.make_file(
                    fromlines=expected_lines,
                    fromdesc=expected_fn,
                    tolines=obtained_lines,
                    todesc=obtained_fn,
                )
            except Exception as e:
                html_fn = "(failed to generate html diff: %s)" % e
            else:
                html_fn.write_text(html_diff, encoding="UTF-8")

            diff = ["FILES DIFFER:", str(expected_fn), str(obtained_fn)]
            diff += ["HTML DIFF: %s" % html_fn]
            diff += diff_lines
            raise AssertionError("\n".join(diff))
        else:
            # difflib has exponential scaling and for thousands of lines it starts to take minutes to render
            # the HTML diff.
            msg = [
                "Files are different, but diff is too big ({} lines)".format(
                    len(diff_lines)
                ),
                "- obtained: {}".format(obtained_fn),
                "- expected: {}".format(expected_fn),
            ]
            raise AssertionError("\n".join(msg))
def check_text_files(obtained_fn, expected_fn, fix_callback=lambda x: x, encoding=None):
    """
    Compare two files contents. If the files differ, show the diff and write a nice HTML
    diff file into the data directory.
    :param Path obtained_fn: path to obtained file during current testing.
    :param Path expected_fn: path to the expected file, obtained from previous testing.
    :param str encoding: encoding used to open the files.
    :param callable fix_callback:
        A callback to "fix" the contents of the obtained (first) file.
        This callback receives a list of strings (lines) and must also return a list of lines,
        changed as needed.
        The resulting lines will be used to compare with the contents of expected_fn.
    """
    __tracebackhide__ = True

    obtained_fn = Path(obtained_fn)
    expected_fn = Path(expected_fn)
    obtained_lines = fix_callback(obtained_fn.read_text(encoding=encoding).splitlines())
    expected_lines = expected_fn.read_text(encoding=encoding).splitlines()

    if obtained_lines != expected_lines:
        diff_lines = list(difflib.unified_diff(expected_lines, obtained_lines))
        if len(diff_lines) <= 500:
            html_fn = obtained_fn.with_suffix(".diff.html")
            try:
                differ = difflib.HtmlDiff()
                html_diff = differ.make_file(
                    fromlines=expected_lines,
                    fromdesc=expected_fn,
                    tolines=obtained_lines,
                    todesc=obtained_fn,
                )
            except Exception as e:
                html_fn = "(failed to generate html diff: %s)" % e
            else:
                html_fn.write_text(html_diff, encoding="UTF-8")

            diff = ["FILES DIFFER:", str(expected_fn), str(obtained_fn)]
            diff += ["HTML DIFF: %s" % html_fn]
            diff += diff_lines
            raise AssertionError("\n".join(diff))
        else:
            # difflib has exponential scaling and for thousands of lines it starts to take minutes to render
            # the HTML diff.
            msg = [
                "Files are different, but diff is too big (%s lines)" % (len(diff_lines),),
                "- obtained: %s" % (obtained_fn,),
                "- expected: %s" % (expected_fn,),
            ]
            raise AssertionError("\n".join(msg))
Example #5
0
def which(
        program,
        find_all=False):  # pragma: no cover -- TODO separate module for utils
    EXECUTABLE_EXT = ['']
    if "PATHEXT" in os.environ:
        EXECUTABLE_EXT += os.environ["PATHEXT"].split(os.pathsep)

    result = []
    for path in [''] + os.environ["PATH"].split(os.pathsep):
        path = path.strip('"')
        exe_file = Path(path) / program
        for ext in EXECUTABLE_EXT:
            fullpath = exe_file.with_suffix(
                ext) if ext and ext != '.' else exe_file
            if fullpath.is_file() and os.access(str(fullpath), os.X_OK):
                if find_all:
                    result.append(fullpath)
                else:
                    return fullpath
    return result or None
Example #6
0
def modified_config(request):
    link_gmx_mpi = request.config.getoption('link_gmx_mpi')
    tools = str(Path('~/gmx_mpi').expanduser()) if link_gmx_mpi else ''
    append_suffix = 'yes' if request.config.getoption('append_suffix') else 'no'
    return tools, append_suffix, Path


path_config = Path('~/.gromacswrapper.cfg').expanduser()
gw_config = ConfigParser()
if path_config.exists():
    gw_config.read(str(path_config.resolve()))
    config_existed = True
else:
    gw_config.read('gromacs/templates/gromacswrapper.cfg')
    config_existed = False
config_backup = path_config.with_suffix('.bak')


def pytest_configure(config):
    link_gmx_mpi = config.getoption('link_gmx_mpi')
    append_suffix = 'yes' if config.getoption('append_suffix') else 'no'
    if config_existed:
        shutil.copy(str(path_config), str(config_backup))
    tools = gmx_mpi_linked(link_gmx_mpi)
    gw_config.set('Gromacs', 'tools', tools)
    gw_config.set('Gromacs', 'append_suffix', append_suffix)
    with open(str(path_config), 'w') as config_file:
        gw_config.write(config_file)


def pytest_unconfigure(config):
Example #7
0
    parser.add_argument("-o",
                        "--out_files",
                        action="store",
                        dest="outFiles",
                        required=True,
                        help="output file or directory")

    args = parser.parse_args()

    inPath = Path(args.inFiles)
    outPath = Path(args.outFiles)

    # Case 1:  Convert single file.
    if (inPath.is_file()):
        inSuff = inPath.suffix
        inStem = inPath.stem
        outDir = outPath.parent
        outDir.mkdir(parents=True, exist_ok=True)
        if (inSuff != tsurfSuffix):
            msg = 'Only tsurf (*.ts) files are allowed as input.'
            raise ValueError(msg)
        outPath = outPath.with_suffix(vtkSuffix)
        convertFile(inPath, outPath)
    # Case 2:  Convert directory.
    elif (inPath.is_dir()):
        convertDir(inPath, outPath)
    # Case 3:  Give up.
    else:
        msg = 'Unable to find %s.' % inPath
        raise ValueError(msg)
Example #8
0
    def _get_jupyter_notebook_filename(cls):
        # check if we are running in vscode, we have the jupyter notebook defined:
        if 'IPython' in sys.modules:
            # noinspection PyBroadException
            try:
                from IPython import get_ipython  # noqa
                ip = get_ipython()
                # vscode-jupyter PR #8531 added this variable
                local_ipynb_file = ip.__dict__.get('user_ns', {}).get('__vsc_ipynb_file__') if ip else None
                if local_ipynb_file:
                    # now replace the .ipynb with .py
                    # we assume we will have that file available for monitoring
                    local_ipynb_file = Path(local_ipynb_file)
                    script_entry_point = local_ipynb_file.with_suffix('.py').as_posix()

                    # install the post store hook,
                    # notice that if we do not have a local file we serialize/write every time the entire notebook
                    cls._jupyter_install_post_store_hook(local_ipynb_file.as_posix(), log_history=False)

                    return script_entry_point
            except Exception:
                pass

        if not (sys.argv[0].endswith(os.path.sep + 'ipykernel_launcher.py') or
                sys.argv[0].endswith(os.path.join(os.path.sep, 'ipykernel', '__main__.py'))) \
                or len(sys.argv) < 3 or not sys.argv[2].endswith('.json'):
            return None

        server_info = None

        # we can safely assume that we can import the notebook package here
        # noinspection PyBroadException
        try:
            # noinspection PyPackageRequirements
            from notebook.notebookapp import list_running_servers
            import requests
            current_kernel = sys.argv[2].split(os.path.sep)[-1].replace('kernel-', '').replace('.json', '')
            # noinspection PyBroadException
            try:
                server_info = next(list_running_servers())
            except Exception:
                # on some jupyter notebook versions this function can crash on parsing the json file,
                # we will parse it manually here
                # noinspection PyPackageRequirements
                import ipykernel
                from glob import glob
                import json
                for f in glob(os.path.join(os.path.dirname(ipykernel.get_connection_file()), '??server-*.json')):
                    # noinspection PyBroadException
                    try:
                        with open(f, 'r') as json_data:
                            server_info = json.load(json_data)
                    except Exception:
                        server_info = None
                    if server_info:
                        break

            cookies = None
            password = None
            if server_info and server_info.get('password'):
                # we need to get the password
                from ....config import config
                password = config.get('development.jupyter_server_password', '')
                if not password:
                    cls._get_logger().warning(
                        'Password protected Jupyter Notebook server was found! '
                        'Add `sdk.development.jupyter_server_password=<jupyter_password>` to ~/clearml.conf')
                    return os.path.join(os.getcwd(), 'error_notebook_not_found.py')

                r = requests.get(url=server_info['url'] + 'login')
                cookies = {'_xsrf': r.cookies.get('_xsrf', '')}
                r = requests.post(server_info['url'] + 'login?next', cookies=cookies,
                                  data={'_xsrf': cookies['_xsrf'], 'password': password})
                cookies.update(r.cookies)

            auth_token = server_info.get('token') or os.getenv('JUPYTERHUB_API_TOKEN') or ''
            try:
                r = requests.get(
                    url=server_info['url'] + 'api/sessions', cookies=cookies,
                    headers={'Authorization': 'token {}'.format(auth_token), })
            except requests.exceptions.SSLError:
                # disable SSL check warning
                from urllib3.exceptions import InsecureRequestWarning
                # noinspection PyUnresolvedReferences
                requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
                # fire request
                r = requests.get(
                    url=server_info['url'] + 'api/sessions', cookies=cookies,
                    headers={'Authorization': 'token {}'.format(auth_token), }, verify=False)
                # enable SSL check warning
                import warnings
                warnings.simplefilter('default', InsecureRequestWarning)

            # send request to the jupyter server
            try:
                r.raise_for_status()
            except Exception as ex:
                cls._get_logger().warning('Failed accessing the jupyter server{}: {}'.format(
                    ' [password={}]'.format(password) if server_info.get('password') else '', ex))
                return os.path.join(os.getcwd(), 'error_notebook_not_found.py')

            notebooks = r.json()

            cur_notebook = None
            for n in notebooks:
                if n['kernel']['id'] == current_kernel:
                    cur_notebook = n
                    break

            notebook_path = cur_notebook['notebook'].get('path', '')
            notebook_name = cur_notebook['notebook'].get('name', '')

            is_google_colab = False
            # check if this is google.colab, then there is no local file
            # noinspection PyBroadException
            try:
                # noinspection PyPackageRequirements
                from IPython import get_ipython
                if get_ipython() and 'google.colab' in get_ipython().extension_manager.loaded:
                    is_google_colab = True
            except Exception:
                pass

            if is_google_colab:
                script_entry_point = str(notebook_name or 'notebook').replace(
                    '>', '_').replace('<', '_').replace('.ipynb', '.py')
                if not script_entry_point.lower().endswith('.py'):
                    script_entry_point += '.py'
                local_ipynb_file = None
            else:
                # always slash, because this is from uri (so never backslash not even on windows)
                entry_point_filename = notebook_path.split('/')[-1]

                # now we should try to find the actual file
                entry_point = (Path.cwd() / entry_point_filename).absolute()
                if not entry_point.is_file():
                    entry_point = (Path.cwd() / notebook_path).absolute()

                # fix for VSCode pushing uuid at the end of the notebook name.
                if not entry_point.exists():
                    # noinspection PyBroadException
                    try:
                        alternative_entry_point = '-'.join(entry_point_filename.split('-')[:-5])+'.ipynb'
                        # now we should try to find the actual file
                        entry_point_alternative = (Path.cwd() / alternative_entry_point).absolute()
                        if not entry_point_alternative.is_file():
                            entry_point_alternative = (Path.cwd() / alternative_entry_point).absolute()

                        # If we found it replace it
                        if entry_point_alternative.exists():
                            entry_point = entry_point_alternative
                    except Exception as ex:
                        cls._get_logger().warning('Failed accessing jupyter notebook {}: {}'.format(notebook_path, ex))

                # get local ipynb for observer
                local_ipynb_file = entry_point.as_posix()

                # now replace the .ipynb with .py
                # we assume we will have that file available with the Jupyter notebook plugin
                entry_point = entry_point.with_suffix('.py')

                script_entry_point = entry_point.as_posix()

            # install the post store hook,
            # notice that if we do not have a local file we serialize/write every time the entire notebook
            cls._jupyter_install_post_store_hook(local_ipynb_file, is_google_colab)

            return script_entry_point
        except Exception:
            return None
Example #9
0
class Video(object):
    def __init__(self, path):
        self.path = Path(path)
        self.get_info()

        # *that's* not a video format we care about
        if self._format['format_name'] == 'tty':
            raise NotAVideo('unable to transcode ansi videos')

        # This checks that there is at least one video stream available
        if not any(s.get('codec_type') == 'video' for s in self._streams):
            raise NotAVideo('no video streams available')

    def get_info(self):
        cmd = ['ffprobe',
               '-show_streams', '-show_format',
               '-hide_banner',
               '-print_format', 'json', str(self.path)]

        p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)

        stdout, stderr = p.communicate()
        if p.returncode != 0:
            LOG.debug(stderr)
            raise NotAVideo('failed to read video metadata')

        res = json.loads(stdout)
        self._streams = res.get('streams')
        self._format = res.get('format')

    def __str__(self):
        return '<%s (%s)>' % (
            self.path,
            self._format['format_long_name'])

    def stream_types(self):
        return [(s['codec_type'], s['codec_name']) for s in self._streams]

    def audio_codec(self):
        '''Returns the codec of the first audio stream'''
        return [s[1] for s in self.stream_types()
                if s[0] == 'audio'][0]

    def video_codec(self):
        '''Returns the codec of the first video stream'''
        return [s[1] for s in self.stream_types()
                if s[0] == 'video'][0]

    def video_size(self):
        stream = [s for s in self._streams if s['codec_type'] == 'video'][0]
        return (stream['width'], stream['height'])

    def transcode(self,
                  video_codec=None, video_args=None,
                  audio_codec=None, audio_args=None,
                  height=None, width=None, scale=True,
                  profile=None, loglevel='warning',
                  copy_if_same=False, suffix=None,
                  output=None, progress=None):

        cmd = [
            'ffmpeg',
            '-loglevel', loglevel, '-hide_banner', '-nostats',
            '-y',
            '-i', str(self.path),
        ]

        if progress is not None:
            cmd += [
                '-progress', progress,
            ]

        if profile is None:
            profile = {
                'video_codec': 'copy',
                'audio_codec': 'copy',
                'suffix': '.vid',
            }

        if video_codec is not None:
            profile['video_codec'] = video_codec

        if video_args is not None:
            profile['video_args'] = video_args

        if audio_codec is not None:
            profile['audio_codec'] = audio_codec

        if audio_args is not None:
            profile['audio_args'] = audio_args

        if height is not None:
            profile['height'] = height

        if suffix is not None:
            profile['suffix'] = suffix

        if width is not None:
            profile['width'] = width

        if copy_if_same and profile['video_codec'] == self.video_codec():
            profile['video_codec'] = 'copy'

        if copy_if_same and profile['audio_codec'] == self.audio_codec():
            profile['audio_codec'] = 'copy'

        if scale and ((profile['width'], profile['height'])
                      != self.video_size()):
            if profile['video_codec'] == 'copy':
                raise ValueError('cannot resize with "copy" codec')

            # why -2? https://trac.ffmpeg.org/ticket/309
            cmd += ['-vf', 'scale=%d:-2' % profile['width']]

        cmd += ['-c:a', profile['audio_codec']]
        if profile.get('audio_args'):
            cmd += shlex.split(profile['audio_args'])

        cmd += ['-c:v', profile['video_codec']]
        if profile.get('video_args'):
            cmd += shlex.split(profile['video_args'])

        if output is None:
            output = self.path.with_suffix(profile['suffix'])

        cmd += [str(output)]

        LOG.debug('running %s', cmd)

        try:
            p = subprocess.Popen(cmd,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)

            stdout, stderr = p.communicate()
            if p.returncode != 0:
                LOG.debug(stderr)
                LOG.error('%s: transcoding failed: removing output file "%s"',
                          self.path, output)
                try:
                    output.unlink()
                except OSError:
                    pass

                raise TranscodingFailed('transcoding failed')
        except KeyboardInterrupt:
            LOG.error('%s: transcoding interrupted by user: '******'removing output file "%s"',
                      self.path, output)
            try:
                output.unlink()
            except OSError:
                pass

            raise
Example #10
0
    link_gmx_mpi = request.config.getoption('link_gmx_mpi')
    tools = str(Path('~/gmx_mpi').expanduser()) if link_gmx_mpi else ''
    append_suffix = 'yes' if request.config.getoption(
        'append_suffix') else 'no'
    return tools, append_suffix, Path


path_config = Path('~/.gromacswrapper.cfg').expanduser()
gw_config = ConfigParser()
if path_config.exists():
    gw_config.read(str(path_config.resolve()))
    config_existed = True
else:
    gw_config.read('gromacs/templates/gromacswrapper.cfg')
    config_existed = False
config_backup = path_config.with_suffix('.bak')


def pytest_configure(config):
    link_gmx_mpi = config.getoption('link_gmx_mpi')
    append_suffix = 'yes' if config.getoption('append_suffix') else 'no'
    if config_existed:
        shutil.copy(str(path_config), str(config_backup))
    tools = gmx_mpi_linked(link_gmx_mpi)
    gw_config.set('Gromacs', 'tools', tools)
    gw_config.set('Gromacs', 'append_suffix', append_suffix)
    with open(str(path_config), 'w') as config_file:
        gw_config.write(config_file)


def pytest_unconfigure(config):