示例#1
0
 def __init__(self):
     super(IdleManager, self).__init__()
     self.log = Logger().get_logger(self.__class__.__name__)
     self.signal_reset_timer.connect(self._reset_time)
     self.qaction = None
     self.failed_icon = None
     self._is_running = False
示例#2
0
    def __init__(self, logger=None):
        if not logger:
            self.log = Logger().get_logger(self.__class__.__name__)
        else:
            self.log = logger

        self.signature = "( {} )".format(self.__class__.__name__)
示例#3
0
class ResolvePrelaunch(PypeHook):
    """
    This hook will check if current workfile path has Resolve
    project inside. IF not, it initialize it and finally it pass
    path to the project by environment variable to Premiere launcher
    shell script.
    """

    def __init__(self, logger=None):
        if not logger:
            self.log = Logger().get_logger(self.__class__.__name__)
        else:
            self.log = logger

        self.signature = "( {} )".format(self.__class__.__name__)

    def execute(self, *args, env: dict = None) -> bool:

        if not env:
            env = os.environ

        # making sure pyton 3.6 is installed at provided path
        py36_dir = os.path.normpath(env.get("PYTHON36_RESOLVE", ""))
        assert os.path.isdir(py36_dir), (
            "Python 3.6 is not installed at the provided folder path. Either "
            "make sure the `environments\resolve.json` is having correctly "
            "set `PYTHON36_RESOLVE` or make sure Python 3.6 is installed "
            f"in given path. \nPYTHON36_RESOLVE: `{py36_dir}`"
        )
        self.log.info(f"Path to Resolve Python folder: `{py36_dir}`...")
        env["PYTHON36_RESOLVE"] = py36_dir

        # setting utility scripts dir for scripts syncing
        us_dir = os.path.normpath(env.get("RESOLVE_UTILITY_SCRIPTS_DIR", ""))
        assert os.path.isdir(us_dir), (
            "Resolve utility script dir does not exists. Either make sure "
            "the `environments\resolve.json` is having correctly set "
            "`RESOLVE_UTILITY_SCRIPTS_DIR` or reinstall DaVinci Resolve. \n"
            f"RESOLVE_UTILITY_SCRIPTS_DIR: `{us_dir}`"
        )

        # correctly format path for pre python script
        pre_py_sc = os.path.normpath(env.get("PRE_PYTHON_SCRIPT", ""))
        env["PRE_PYTHON_SCRIPT"] = pre_py_sc

        try:
            __import__("pype.resolve")
            __import__("pyblish")

        except ImportError as e:
            print(traceback.format_exc())
            print("pyblish: Could not load integration: %s " % e)

        else:
            # Resolve Setup integration
            importlib.reload(utils)
            utils.setup(env)

        return True
示例#4
0
    def __init__(self, tray_widget, main_window):
        self.tray_widget = tray_widget
        self.main_window = main_window
        self.log = Logger().get_logger(self.__class__.__name__)

        self.icon_run = QtGui.QIcon(get_resource('circle_green.png'))
        self.icon_stay = QtGui.QIcon(get_resource('circle_orange.png'))
        self.icon_failed = QtGui.QIcon(get_resource('circle_red.png'))

        self.services_thread = None
示例#5
0
    def __init__(self):
        super(StaticsServer, self).__init__()
        self.qaction = None
        self.failed_icon = None
        self._is_running = False
        self.log = Logger().get_logger(self.__class__.__name__)
        try:
            self.presets = config.get_presets().get('services',
                                                    {}).get('statics_server')
        except Exception:
            self.presets = {'default_port': 8010, 'exclude_ports': []}

        self.port = self.find_port()
示例#6
0
class AvalonApps:
    def __init__(self, main_parent=None, parent=None):
        self.log = Logger().get_logger(__name__)
        self.main_parent = main_parent
        self.parent = parent
        self.app_launcher = None

    def process_modules(self, modules):
        if "RestApiServer" in modules:
            from .rest_api import AvalonRestApi
            self.rest_api_obj = AvalonRestApi()

    # Definition of Tray menu
    def tray_menu(self, parent_menu=None):
        # Actions
        if parent_menu is None:
            if self.parent is None:
                self.log.warning('Parent menu is not set')
                return
            elif self.parent.hasattr('menu'):
                parent_menu = self.parent.menu
            else:
                self.log.warning('Parent menu is not set')
                return

        icon = QtGui.QIcon(launcher_lib.resource("icon", "main.png"))
        aShowLauncher = QtWidgets.QAction(icon, "&Launcher", parent_menu)
        aLibraryLoader = QtWidgets.QAction("Library", parent_menu)

        aShowLauncher.triggered.connect(self.show_launcher)
        aLibraryLoader.triggered.connect(self.show_library_loader)

        parent_menu.addAction(aShowLauncher)
        parent_menu.addAction(aLibraryLoader)

    def show_launcher(self):
        # if app_launcher don't exist create it/otherwise only show main window
        if self.app_launcher is None:
            io.install()
            APP_PATH = launcher_lib.resource("qml", "main.qml")
            self.app_launcher = launcher_widget.Launcher(APP_PATH)
        self.app_launcher.window.show()

    def show_library_loader(self):
        libraryloader.show(
            parent=self.main_parent,
            icon=self.parent.icon,
            show_projects=True,
            show_libraries=True
        )
示例#7
0
    def __init__(self, name, port, filepath):
        super(SocketThread, self).__init__()
        self.log = Logger().get_logger("SocketThread", "Event Thread")
        self.setName(name)
        self.name = name
        self.port = port
        self.filepath = filepath
        self.sock = None
        self.subproc = None
        self.connection = None
        self._is_running = False
        self.finished = False

        self.mongo_error = False
示例#8
0
    def launch_tray(self, debug=False):
        """Run tray.py.

        :param debug: if True, tray will run in debug mode (not detached)
        :type debug: bool

        .. seealso:: :func:`subprocess.Popen`
        """
        import subprocess
        from pypeapp import Logger
        from pypeapp import execute

        self._initialize()

        if debug:
            pype_setup = os.getenv('PYPE_SETUP_PATH')
            items = [pype_setup, "pypeapp", "tray.py"]
            fname = os.path.sep.join(items)

            execute([
                sys.executable,
                "-u",
                fname
                ])
            return

        DETACHED_PROCESS = 0x00000008  # noqa: N806

        pype_setup = os.getenv('PYPE_SETUP_PATH')
        items = [pype_setup, "pypeapp", "tray.py"]
        fname = os.path.sep.join(items)

        args = ["python", "-d", fname]
        if sys.platform.startswith('linux'):
            subprocess.Popen(
                args,
                universal_newlines=True,
                bufsize=1,
                # executable=sys.executable,
                env=os.environ,
                # stdin=None,
                stdout=None,
                stderr=None,
                preexec_fn=os.setpgrp
            )

        if sys.platform == 'win32':
            args = ["pythonw", "-d", fname]
            subprocess.Popen(
                args,
                universal_newlines=True,
                bufsize=1,
                cwd=None,
                # executable=sys.executable,
                env=os.environ,
                # stdin=None,
                stdout=open(Logger.get_file_path(), 'w+'),
                stderr=subprocess.STDOUT,
                creationflags=DETACHED_PROCESS
            )
示例#9
0
    def test_print_exception(self, capsys, monkeypatch, printer):
        monkeypatch.setitem(os.environ, 'PYPE_DEBUG', '3')
        lf = Logger()
        assert lf.PYPE_DEBUG == 3
        logger = Logger().get_logger('test_output', 'tests')

        printer("DEBUG LEVEL SET: {}".format(os.environ.get('PYPE_DEBUG')))

        test = {}

        try:
            test['nonexistent']
        except KeyError:
            logger.error("test access to undefined key")

        cap = capsys.readouterr()
        assert cap[1] == 1
示例#10
0
    def __init__(self, session, plugins_presets={}):
        '''Expects a ftrack_api.Session instance'''
        self.log = Logger().get_logger(self.__class__.__name__)
        if not (isinstance(session, ftrack_api.session.Session)
                or isinstance(session, SocketSession)):
            raise Exception(
                ("Session object entered with args is instance of \"{}\""
                 " but expected instances are \"{}\" and \"{}\"").format(
                     str(type(session)), str(ftrack_api.session.Session),
                     str(SocketSession)))

        self._session = session

        # Using decorator
        self.register = self.register_decorator(self.register)
        self.launch = self.launch_log(self.launch)
        self.plugins_presets = plugins_presets
示例#11
0
    def __init__(self, name, port, filepath, additional_args=[]):
        super(SocketThread, self).__init__()
        self.log = Logger().get_logger(self.__class__.__name__)
        self.setName(name)
        self.name = name
        self.port = port
        self.filepath = filepath
        self.additional_args = additional_args

        self.sock = None
        self.subproc = None
        self.connection = None
        self._is_running = False
        self.finished = False

        self.mongo_error = False

        self._temp_data = {}
示例#12
0
    def __init__(self, main_parent=None, parent=None):
        self.log = Logger().get_logger(self.__class__.__name__, "PypeTray")

        self.main_parent = main_parent
        self.parent = parent
        self.clockapi = ClockifyAPI()
        self.message_widget = None
        self.widget_settings = ClockifySettings(main_parent, self)
        self.widget_settings_required = None

        self.thread_timer_check = None
        # Bools
        self.bool_thread_check_running = False
        self.bool_api_key_set = False
        self.bool_workspace_set = False
        self.bool_timer_run = False

        self.clockapi.set_master(self)
        self.bool_api_key_set = self.clockapi.set_api()
示例#13
0
class SocketThread(threading.Thread):
    """Thread that checks suprocess of storer of processor of events"""

    MAX_TIMEOUT = 35

    def __init__(self, name, port, filepath, additional_args=[]):
        super(SocketThread, self).__init__()
        self.log = Logger().get_logger(self.__class__.__name__)
        self.setName(name)
        self.name = name
        self.port = port
        self.filepath = filepath
        self.additional_args = additional_args

        self.sock = None
        self.subproc = None
        self.connection = None
        self._is_running = False
        self.finished = False

        self.mongo_error = False

        self._temp_data = {}

    def stop(self):
        self._is_running = False

    def run(self):
        self._is_running = True
        time_socket = time.time()
        # Create a TCP/IP socket
        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.sock = sock

        # Bind the socket to the port - skip already used ports
        while True:
            try:
                server_address = ("localhost", self.port)
                sock.bind(server_address)
                break
            except OSError:
                self.port += 1

        self.log.debug(
            "Running Socked thread on {}:{}".format(*server_address))

        self.subproc = subprocess.Popen([
            sys.executable, self.filepath, *self.additional_args,
            str(self.port)
        ],
                                        stdin=subprocess.PIPE)

        # Listen for incoming connections
        sock.listen(1)
        sock.settimeout(1.0)
        while True:
            if not self._is_running:
                break
            try:
                connection, client_address = sock.accept()
                time_socket = time.time()
                connection.settimeout(1.0)
                self.connection = connection

            except socket.timeout:
                if (time.time() - time_socket) > self.MAX_TIMEOUT:
                    self.log.error("Connection timeout passed. Terminating.")
                    self._is_running = False
                    self.subproc.terminate()
                    break
                continue

            try:
                time_con = time.time()
                # Receive the data in small chunks and retransmit it
                while True:
                    try:
                        if not self._is_running:
                            break
                        data = None
                        try:
                            data = self.get_data_from_con(connection)
                            time_con = time.time()

                        except socket.timeout:
                            if (time.time() - time_con) > self.MAX_TIMEOUT:
                                self.log.error(
                                    "Connection timeout passed. Terminating.")
                                self._is_running = False
                                self.subproc.terminate()
                                break
                            continue

                        except ConnectionResetError:
                            self._is_running = False
                            break

                        self._handle_data(connection, data)

                    except Exception as exc:
                        self.log.error("Event server process failed",
                                       exc_info=True)

            finally:
                # Clean up the connection
                connection.close()
                if self.subproc.poll() is None:
                    self.subproc.terminate()

                self.finished = True

    def get_data_from_con(self, connection):
        return connection.recv(16)

    def _handle_data(self, connection, data):
        if not data:
            return

        if data == b"MongoError":
            self.mongo_error = True
        connection.sendall(data)
示例#14
0
class Deployment(object):
    """ Deployment class will load settings from `deploy/deploy.json` or
        if exists, use `deploy/studio/deploy.json`. Then it will process
        information, create `repos` and setup `vendors`
    """

    _deploy_dir = 'deploy'
    _deploy_file = 'deploy.json'
    _schema_file = 'deploy_schema-1.0.json'
    _pype_root = None
    _log = Logger().get_logger()

    def __init__(self, pype_root: str):
        """ Init deployment object

        This will initialize object and check if **pype_root** is valid
        location. It will normalize path.

        :param pype_root: Path to Pype setup
        :type pype_root: str
        :raises: :class:`DeployException`

        """
        normalized = os.path.normpath(pype_root)
        if not os.path.exists(normalized):
            raise DeployException(
                "PYPE_ROOT {} doesn't exists or wasn't set".format(normalized),
                100)
        self._pype_root = normalized
        pass

    def _read_deployment_file(self, file: str) -> dict:
        """ Just reads deployment file as a json

            :param file: path to json file
            :type file: str
            :return: parsed json
            :rtype: dict

        """
        with open(file) as deployment_file:
            data = json.load(deployment_file)
        return data

    def _read_schema(self, file: str) -> dict:
        """ Reads json schema from file

            :param file: path to schema json
            :type file: str
            :return: parsed json schema
            :rtype: dict
            :raises: :class:`DeployException`

        """
        if (not os.path.exists(file)):
            raise DeployException(
                "Cannot find schema to validate `{}`".format(
                    self._deploy_file), 110)
        with open(file) as schema:
            data = json.load(schema)
        return data

    def _determine_deployment_file(self) -> str:
        """ Determine which deployment file to use.

            We use default one distributed with **Pype**. But if
            under *deploy* directory is another with *deploy.json*, that
            one will take priority.

            :return: Path to deployment file
            :rtype: str
            :raises: :class:`DeployException`

            .. note::
                If there are more then one directory, only first found is used.

        """
        file = os.path.join(self._pype_root, self._deploy_dir,
                            self._deploy_file)
        if (not os.path.exists(file)):
            raise DeployException(
                "Directory `{}` doesn't exist".format(self._deploy_file), 120)

        override = None
        deploy_path = os.path.join(self._pype_root, self._deploy_dir)
        with os.scandir(deploy_path) as i:

            for entry in i:
                if entry.is_dir():
                    override = os.path.join(entry.path, self._deploy_file)
                    if (os.path.exists(override)):
                        file = override
                        break
        return os.path.normpath(file)

    def _validate_schema(self, settings: dict) -> bool:
        """ Validate json deployment setting against json schema.

            :param settings: Deployment settings from parsed json
            :type settings: dict
            :return: True if validated, False if not
            :rtype: bool

            .. seealso::
                :func:`Deployment._read_schema`
                :func:`Deployment.__read_deployment_file`

        """
        schema_file = os.path.join(self._pype_root, self._deploy_dir,
                                   self._schema_file)
        schema = self._read_schema(schema_file)

        try:
            jsonschema.validate(settings, schema)
        except jsonschema.exceptions.ValidationError as e:
            self._log.error(e)
            return False
        except jsonschema.exceptions.SchemaError as e:
            self._log.error(e)
            return False
        return True

    def validate(self, skip=False) -> bool:
        """ Do deployment setting validation.

            First, deployment settings is determined. It can be default
            provided *deploy.json* or overrided one in
            *deploy/somedir/deploy.json*. This file is then validated against
            json schema. Then it will check, if stuff defined in settings is
            present and deployed.

            :param skip:    if True skip if directory not exists. Used during
                            installation where some directories will be
                            installed nevertheless.
            :type skip: bool

            :return: True if validated, otherwise throw exception
            :rtype: bool
            :raises: :class:`DeployException` With info on what is wrong

        """
        import git
        settings = self._determine_deployment_file()
        deploy = self._read_deployment_file(settings)
        term = Terminal()
        if (not self._validate_schema(deploy)):
            raise DeployException(
                "Invalid deployment file [ {} ]".format(settings), 200)

        # go throught repositories
        for ritem in deploy.get('repositories'):
            test_path = os.path.join(self._pype_root, "repos",
                                     ritem.get('name'))

            term.echo("  - validating [ {} ]".format(ritem.get('name')))
            # does repo directory exist?
            if not self._validate_is_directory(test_path):
                if skip:
                    continue
                raise DeployException(
                    "Repo path doesn't exist [ {} ]".format(test_path), 130)

            if not self._validate_is_repo(test_path):
                raise DeployException(
                    "Path {} exists but it is not valid repository".format(
                        test_path))

            # bare repo isn't allowed
            if self._validate_is_bare(test_path):
                raise DeployException(
                    "Repo on path [ {} ] is bare".format(test_path), 300)

            # check origin
            if not self._validate_origin(test_path, ritem.get('url')):
                raise DeployException(
                    "Repo {} origin {} should be {}.".format(
                        test_path,
                        git.Repo(test_path).remotes.origin.url,
                        ritem.get('url')), 300)

            # check we are on branch
            if ritem.get('branch'):
                if not self._validate_is_branch(test_path,
                                                ritem.get('branch')):
                    raise DeployException(
                        'repo {0} head is not on {1}(!={2}) branch'.format(
                            ritem.get('name'), ritem.get('branch'),
                            git.Repo(test_path).heads[0].name), 210)

            # check we are on ref
            if ritem.get('ref'):
                if not self._validate_is_ref(test_path, ritem.get('ref')):
                    raise DeployException(
                        'repo {0} head is not on {1}(!={2}) ref'.format(
                            ritem.get('name'), ritem.get('ref'),
                            git.Repo(test_path).heads[0].commit.hexsha), 220)
            # check tag
            if ritem.get('tag'):
                if not self._validate_is_tag(test_path, ritem.get('tag')):
                    raise DeployException(
                        'repo {0} head is not on tag {1}'.format(
                            ritem.get('name'), ritem.get('tag')), 230)

        # Go through archive files.
        if deploy.get('archive_files'):
            for item in deploy.get('archive_files'):
                test_path = os.path.join(self._pype_root,
                                         item.get('extract_path'))
                # does repo directory exist?
                if not self._validate_is_directory(test_path):
                    if skip:
                        continue
                    raise DeployException(
                        "Vendor path doesn't exist [ {} ]".format(test_path),
                        130  # noqa: E501
                    )

        return True

    def _validate_is_directory(self, path: str) -> bool:
        """ Validate if path is directory.

            :param path: string path to test
            :type path: str
            :return: is dir
            :rtype: bool
        """
        return os.path.isdir(path)

    def _validate_is_empty(self, path: str) -> bool:
        """ Validate if directory is empty.

            :param path: string path to test
            :type path: str
            :return: is empty
            :rtype: bool
        """
        if any(os.scandir(path)):
            return False
        return True

    def _validate_is_repo(self, path: str) -> bool:
        """ Validate if directory is git repository.

            :param path: string path to test
            :type path: str
            :return: is repo
            :rtype: bool
        """
        import git
        try:
            git.Repo(path)
        except git.exc.InvalidGitRepositoryError:
            return False
        return True

    def _validate_is_bare(self, path: str) -> bool:
        """ Validate if directory is bare git repository.

            :param path: string path to test
            :type path: str
            :return: is bare
            :rtype: bool
        """
        import git
        repo = git.Repo(path)
        return repo.bare

    def _validate_is_dirty(self, path: str) -> bool:
        """ Validate if directory is git repository with dirty worktree.

            :param path: string path to test
            :type path: str
            :return: is dirty
            :rtype: bool
        """
        import git
        repo = git.Repo(path)
        return repo.is_dirty()

    def _validate_is_branch(self, path: str, branch: str) -> bool:
        """ Validate if directory is git repository with active branch.

            :param path: string path to test
            :type path: str
            :param branch: name of branch
            :type branch: str
            :return: is branch
            :rtype: bool
        """
        import git
        repo = git.Repo(path)
        if str(repo.active_branch) != str(branch):
            return False
        return True

    def _validate_is_ref(self, path: str, ref: str) -> bool:
        """ Validate if repository on given path is on specified ref.

            :param path: string path to test
            :type path: str
            :param branch: hash of reference
            :type branch: str
            :return: if is on reference
            :rtype: bool
        """
        import git
        repo = git.Repo(path)
        head = repo.heads[0]
        commit = head.commit
        return commit.hexsha.startswith(ref)

    def _validate_is_tag(self, path: str, tag: str) -> bool:
        """ Validate if repository head reference specified tag.

             :param path: string path to test
             :type path: str
             :param branch: tag name
             :type branch: str
             :return: if is on tag
             :rtype: bool
        """
        import git
        # get tag
        repo = git.Git(path)
        rtag = repo.describe('--tags')
        if rtag != tag:
            return False
        return True

    def _validate_origin(self, path: str, origin: str) -> bool:
        """ Validate if directory is git repository remote origin.

            :param path: string path to test
            :type path: str
            :param origin: url of remote origin
            :type branch: str
            :return: is origin
            :rtype: bool
        """
        import git
        repo = git.Repo(path)

        # if there is no origin, repo has no remotes possibly and is invalid
        try:
            if repo.remotes.origin.url != origin:
                return False
        except AttributeError:
            return False
        return True

    def _recreate_repository(self, path: str, repo: dict):
        """ Recreate (remove and clone) repository on specifed path.

            :param path: string path to repository
            :type path: string
            :param repo: dict representing item from deployment file
            :type repo: dict
            :raises: :class:`DeployException`
        """
        import git
        from pypeapp.lib.git_progress import _GitProgress
        try:
            shutil.rmtree(path)
        except (OSError, PermissionError) as e:
            raise DeployException(("Cannot remove existing non"
                                   " git repository.{}".format(e))) from e
        else:
            # clone repo
            try:
                git.Repo.clone_from(repo.get('url'),
                                    path,
                                    progress=_GitProgress(),
                                    env=None,
                                    b=repo.get('branch') or repo.get('tag'),
                                    recursive=True)
            except git.exc.GitCommandError as e:
                raise DeployException("Git clone failed for {}".format(
                    repo.get("url"))) from e

    def _download_file(self, url, path):
        r = requests.get(url, stream=True)
        with open(path, "wb") as f:
            for chunk in r.iter_content(chunk_size=1024):
                if chunk:
                    f.write(chunk)
        if os.path.exists(path):
            return True
        else:
            return False

    def deploy(self, force=False):
        """ Do repositories deployment and install python dependencies.

            Go throught deployment file and install repositories specified
            there. Also add additional python dependencies with pip.

            :param force:   overwrite existng repos if it's working tree is
                            dirty.
            :type force: bool
            :raises: :class:`DeployException`

        """
        import git
        from pypeapp.lib.git_progress import _GitProgress
        settings = self._determine_deployment_file()
        deploy = self._read_deployment_file(settings)
        term = Terminal()

        # go throught repositories
        term.echo(">>> Deploying repositories ...")
        for ritem in deploy.get('repositories'):
            path = os.path.join(self._pype_root, "repos", ritem.get('name'))

            term.echo(" -- processing [ {} / {} ]".format(
                ritem.get('name'),
                ritem.get('branch') or ritem.get('tag')))

            if self._validate_is_directory(path):
                # path exists
                # is it repository?
                if not self._validate_is_repo(path):
                    # no, remove dir no matter of content
                    term.echo("  - removing existing directory and cloning...")
                    self._recreate_repository(path, ritem)
                else:
                    # dir is repository
                    repo = git.Repo(path)
                    # is it right one?
                    if not self._validate_origin(path, str(ritem.get('url'))):
                        # repository has different origin then specified
                        term.echo("!!! repository has different origin. ")
                        if (self._validate_is_dirty(path) is True
                                and force is False):
                            raise DeployException(('Invalid repository on '
                                                   'path {}'.format(path)))
                        term.echo(" -> recreating repository. ")
                        self._recreate_repository(path, ritem)
                        pass
                    if (self._validate_is_dirty(path) is True
                            and force is False):
                        raise DeployException(("Repo on path [ {} ] has dirty"
                                               " worktree").format(path), 300)

                    # are we on correct branch?
                    if not ritem.get('tag'):
                        if not self._validate_is_branch(
                                path, ritem.get('branch')):

                            term.echo("  . switching to [ {} ] ...".format(
                                ritem.get('branch')))
                            branch = repo.create_head(ritem.get('branch'),
                                                      'HEAD')

                            branch.checkout(force=force)

                    # update repo
                    term.echo("  . updating ...")
                    repo.remotes.origin.fetch(tags=True, force=True)
                    # build refspec
                    if ritem.get('branch'):
                        refspec = "refs/heads/{}".format(ritem.get('branch'))
                        repo.remotes.origin.pull(refspec)
                    elif ritem.get('tag'):
                        tags = repo.tags
                        if ritem.get('tag') not in tags:
                            raise DeployException(
                                ("Tag {} is missing on remote "
                                 "origin").format(ritem.get('tag')))
                        t = tags[ritem.get('tag')]
                        term.echo("  . tag: [{}, {} / {}]".format(
                            t.name, t.commit, t.commit.committed_date))
                        repo.remotes.origin.pull(ritem.get('tag'))

            else:
                # path doesn't exist, clone
                try:
                    git.Repo.clone_from(ritem.get('url'),
                                        path,
                                        progress=_GitProgress(),
                                        env=None,
                                        b=ritem.get('branch')
                                        or ritem.get('tag'),
                                        recursive=True)
                except git.exc.GitCommandError as e:
                    raise DeployException("Git clone failed for {}".format(
                        ritem.get("url"))) from e

        # Go through zip files.
        term.echo(">>> Deploying archive files ...")
        if deploy.get('archive_files'):
            for item in deploy.get("archive_files"):
                term.echo(" -- processing [ {} ]".format(
                    item.get("extract_path")))
                path = os.path.normpath(
                    os.path.join(self._pype_root, item.get("extract_path")))

                if self._validate_is_directory(path):
                    term.echo("  - removing existing directory.")
                    shutil.rmtree(path)

                # Download archive file.
                archive_type = item.get('archive_type')
                basename = os.path.split(path)[-1]
                filename = '.'.join([basename, archive_type])
                archive_file_path = tempfile.mkdtemp(basename + '_archive')
                archive_file_path = os.path.join(archive_file_path, filename)

                if item.get("vendor"):
                    source = os.path.join(os.environ.get("PYPE_ROOT"),
                                          'vendor', 'packages',
                                          item.get("vendor"))
                    if not os.path.isfile(source):
                        raise DeployException(
                            "Local archive {} doesn't exist".format(source))
                    shutil.copyfile(source, archive_file_path)

                if item.get("url"):
                    term.echo("  - downloading [ {} ]".format(item.get("url")))
                    success = self._download_file(item.get("url"),
                                                  archive_file_path)

                    if not success:
                        raise DeployException(
                            "Failed to download [ {} ]".format(
                                item.get("url")),
                            130  # noqa: E501
                        )

                # checksum
                if item.get('md5_url'):
                    response = urlopen(item.get('md5_url'))
                    md5 = response.read().decode('ascii').split(" ")[0]
                    calc = self.calculate_checksum(archive_file_path)
                    if md5 != calc:
                        raise DeployException(
                            "Checksum failed {} != {} on {}".format(
                                md5, calc, archive_file_path))
                # Extract files from archive
                if archive_type in ['zip']:
                    zip_file = zipfile.ZipFile(archive_file_path)
                    zip_file.extractall(path)

                elif archive_type in [
                        'tar', 'tgz', 'tar.gz', 'tar.xz', 'tar.bz2'
                ]:
                    if archive_type == 'tar':
                        tar_type = 'r:'
                    elif archive_type.endswith('xz'):
                        tar_type = 'r:xz'
                    elif archive_type.endswith('gz'):
                        tar_type = 'r:gz'
                    elif archive_type.endswith('bz2'):
                        tar_type = 'r:bz2'
                    else:
                        tar_type = 'r:*'
                    try:
                        tar_file = tarfile.open(archive_file_path, tar_type)
                    except tarfile.ReadError:
                        raise DeployException(
                            "corrupted archive: also consider to download the "
                            "archive manually, add its path to the url, run "
                            "`./pype deploy`")
                    tar_file.extractall(path)
                    tar_file.close()

                # Move folders/files if skip first subfolder is set
                if item.get('skip_first_subfolder', False):
                    self.move_subfolders_to_main(path)

        # install python dependencies
        term.echo(">>> Adding python dependencies ...")
        for pitem in deploy.get('pip'):
            term.echo(" -- processing [ {} ]".format(pitem))
            try:
                subprocess.check_output(
                    [sys.executable, '-m', 'pip', 'install', pitem])
            except subprocess.CalledProcessError as e:
                raise DeployException('PIP command failed with {}'.format(
                    e.returncode)) from e

        # TODO(antirotor): This should be removed later as no changes
        # in requirements.txt should be made automatically. For that,
        # use `pype update-requirements` command

        # term.echo(">>> Updating requirements ...")
        # try:
        #     out = subprocess.check_output(
        #         [sys.executable,
        #          '-m', 'pip', 'freeze', '--disable-pip-version-check'],
        #         universal_newlines=True)
        # except subprocess.CalledProcessError as e:
        #     raise DeployException(
        #         'PIP command failed with {}'.format(e.returncode)
        #         ) from e
        #
        # r_path = os.path.join(
        #     os.path.abspath("."), 'pypeapp', 'requirements.txt')
        # with open(r_path, 'w') as r_write:
        #     r_write.write(out)
        # pass

    def move_subfolders_to_main(self, path):
        with os.scandir(path) as main_folder:
            sub_folders = [entry.path for entry in main_folder]

        if len(sub_folders) != 1:
            raise DeployException("Archive file has more then one main folder."
                                  " Please change 'skip_first_subfolder'"
                                  " for '{}'".format(path))
        sub_folder_path = sub_folders[0]
        with os.scandir(sub_folder_path) as sub_folder:
            paths_to_move = [entry.path for entry in sub_folder]

        for path_to_move in paths_to_move:
            shutil.move(path_to_move, path)

        if len(os.listdir(sub_folder_path)) == 0:
            shutil.rmtree(sub_folder_path)

    def get_deployment_paths(self) -> list:
        """ Return paths from **deploy.json** for later use.

            :returns: list of paths ``[str, str, ...]``
            :rtype: list
            :raises: :class:`DeployException` on invalid deploy file
        """
        settings = self._determine_deployment_file()
        deploy = self._read_deployment_file(settings)
        if (not self._validate_schema(deploy)):
            raise DeployException(
                "Invalid deployment file [ {} ]".format(settings), 200)

        dirs = []
        for ritem in deploy.get('repositories'):
            path = os.path.join(self._pype_root, "repos", ritem.get('name'))
            dirs.append(path)
        return dirs

    def get_environment_data(self):
        """ Returns list of environments from **deploy.json** to load as
            default and a path to PYPE_CONFIG folder.

            :returns: list of envs ``[str, str, ...]``, config_path ``str``
            :rtype: list,str
        """
        settings = self._determine_deployment_file()
        deploy = self._read_deployment_file(settings)

        files = deploy.get("init_env")
        config_path = os.path.normpath(
            deploy.get('PYPE_CONFIG').format(PYPE_ROOT=self._pype_root))

        return files, config_path

    def calculate_checksum(self, fname):
        """
        Return md5 hex checksum of file

        :param fname: file name
        :type fname: str
        :returns: md5 checkum hex encoded
        :rtype: str
        """
        blocksize = 1 << 16  # 64kB
        md5 = hashlib.md5()
        with open(fname, 'rb') as f:
            while True:
                block = f.read(blocksize)
                if not block:
                    break
                md5.update(block)
        return md5.hexdigest()

    def localize_package(self, path):
        """
        Copy package directory to pype environment "localized" folder.
        Useful for storing binaries that are not accessible when calling over
        UNC paths or similar scenarios.

        :param path: source
        :type path: str
        """
        package_name = path.split(os.path.sep)[-1]
        destination = os.path.join(os.environ.get("PYPE_ENV"), "localized",
                                   package_name)
        if os.path.isdir(destination):
            term = Terminal()
            term.echo("*** destination already exists "
                      "[ {} ], removing".format(destination))
            shutil.rmtree(destination)
        shutil.copytree(path, destination)
示例#15
0
 def __init__(self, message, code=0):
     super().__init__(message)
     log = Logger().get_logger('deployment')
     self._code = code
     log.error(message)
示例#16
0
import sys
import time
import datetime
import signal
import threading

from ftrack_server import FtrackServer
import ftrack_api
from pypeapp import Logger

log = Logger().get_logger("Event Server Legacy")


class TimerChecker(threading.Thread):
    max_time_out = 35

    def __init__(self, server, session):
        self.server = server
        self.session = session
        self.is_running = False
        self.failed = False
        super().__init__()

    def stop(self):
        self.is_running = False

    def run(self):
        start = datetime.datetime.now()
        self.is_running = True
        connected = False
示例#17
0
import os
import sys
import json
import logging
import subprocess
from operator import itemgetter
import ftrack_api
from pype.ftrack import BaseAction
from pypeapp import Logger, config

log = Logger().get_logger(__name__)


class DJVViewAction(BaseAction):
    """Launch DJVView action."""
    identifier = "djvview-launch-action"
    label = "DJV View"
    description = "DJV View Launcher"
    icon = '{}/app_icons/djvView.png'.format(
        os.environ.get('PYPE_STATICS_SERVER', ''))
    type = 'Application'

    def __init__(self, session, plugins_presets):
        '''Expects a ftrack_api.Session instance'''
        super().__init__(session, plugins_presets)
        self.djv_path = None

        self.config_data = config.get_presets()['djv_view']['config']
        self.set_djv_path()

        if self.djv_path is None:
示例#18
0
class BaseHandler(object):
    '''Custom Action base class

    <label> - a descriptive string identifing your action.
    <varaint>   - To group actions together, give them the same
                  label and specify a unique variant per action.
    <identifier>  - a unique identifier for app.
    <description>   - a verbose descriptive text for you action
    <icon>  - icon in ftrack
    '''
    # Default priority is 100
    priority = 100
    # Type is just for logging purpose (e.g.: Action, Event, Application,...)
    type = 'No-type'
    ignore_me = False
    preactions = []

    def __init__(self, session, plugins_presets={}):
        '''Expects a ftrack_api.Session instance'''
        self.log = Logger().get_logger(self.__class__.__name__)
        if not(
            isinstance(session, ftrack_api.session.Session) or
            isinstance(session, SocketSession)
        ):
            raise Exception((
                "Session object entered with args is instance of \"{}\""
                " but expected instances are \"{}\" and \"{}\""
            ).format(
                str(type(session)),
                str(ftrack_api.session.Session),
                str(SocketSession)
            ))

        self._session = session

        # Using decorator
        self.register = self.register_decorator(self.register)
        self.launch = self.launch_log(self.launch)
        self.plugins_presets = plugins_presets

    # Decorator
    def register_decorator(self, func):
        @functools.wraps(func)
        def wrapper_register(*args, **kwargs):

            presets_data = self.plugins_presets.get(self.__class__.__name__)
            if presets_data:
                for key, value in presets_data.items():
                    if not hasattr(self, key):
                        continue
                    setattr(self, key, value)

            if self.ignore_me:
                return

            label = self.__class__.__name__
            if hasattr(self, 'label'):
                if self.variant is None:
                    label = self.label
                else:
                    label = '{} {}'.format(self.label, self.variant)
            try:
                self._preregister()

                start_time = time.perf_counter()
                func(*args, **kwargs)
                end_time = time.perf_counter()
                run_time = end_time - start_time
                self.log.info((
                    '{} "{}" - Registered successfully ({:.4f}sec)'
                ).format(self.type, label, run_time))
            except MissingPermision as MPE:
                self.log.info((
                    '!{} "{}" - You\'re missing required {} permissions'
                ).format(self.type, label, str(MPE)))
            except AssertionError as ae:
                self.log.warning((
                    '!{} "{}" - {}'
                ).format(self.type, label, str(ae)))
            except NotImplementedError:
                self.log.error((
                    '{} "{}" - Register method is not implemented'
                ).format(self.type, label))
            except PreregisterException as exc:
                self.log.warning((
                    '{} "{}" - {}'
                ).format(self.type, label, str(exc)))
            except Exception as e:
                self.log.error('{} "{}" - Registration failed ({})'.format(
                    self.type, label, str(e))
                )
        return wrapper_register

    # Decorator
    def launch_log(self, func):
        @functools.wraps(func)
        def wrapper_launch(*args, **kwargs):
            label = self.__class__.__name__
            if hasattr(self, 'label'):
                label = self.label
                if hasattr(self, 'variant'):
                    if self.variant is not None:
                        label = '{} {}'.format(self.label, self.variant)

            self.log.info(('{} "{}": Launched').format(self.type, label))
            try:
                return func(*args, **kwargs)
            except Exception as exc:
                self.session.rollback()
                msg = '{} "{}": Failed ({})'.format(self.type, label, str(exc))
                self.log.error(msg, exc_info=True)
                return {
                    'success': False,
                    'message': msg
                }
            finally:
                self.log.info(('{} "{}": Finished').format(self.type, label))
        return wrapper_launch

    @property
    def session(self):
        '''Return current session.'''
        return self._session

    def reset_session(self):
        self.session.reset()

    def _preregister(self):
        if hasattr(self, "role_list") and len(self.role_list) > 0:
            username = self.session.api_user
            user = self.session.query(
                'User where username is "{}"'.format(username)
            ).one()
            available = False
            lowercase_rolelist = [x.lower() for x in self.role_list]
            for role in user['user_security_roles']:
                if role['security_role']['name'].lower() in lowercase_rolelist:
                    available = True
                    break
            if available is False:
                raise MissingPermision

        # Custom validations
        result = self.preregister()
        if result is None:
            self.log.debug((
                "\"{}\" 'preregister' method returned 'None'. Expected it"
                " didn't fail and continue as preregister returned True."
            ).format(self.__class__.__name__))
            return

        if result is True:
            return
        msg = None
        if isinstance(result, str):
            msg = result
        raise PreregisterException(msg)

    def preregister(self):
        '''
        Preregister conditions.
        Registration continues if returns True.
        '''
        return True

    def register(self):
        '''
        Registers the action, subscribing the discover and launch topics.
        Is decorated by register_log
        '''

        raise NotImplementedError()

    def _translate_event(self, event, session=None):
        '''Return *event* translated structure to be used with the API.'''
        if session is None:
            session = self.session

        _entities = event['data'].get('entities_object', None)
        if (
            _entities is None or
            _entities[0].get(
                'link', None
            ) == ftrack_api.symbol.NOT_SET
        ):
            _entities = self._get_entities(event)
            event['data']['entities_object'] = _entities

        return _entities

    def _get_entities(self, event, session=None, ignore=None):
        entities = []
        selection = event['data'].get('selection')
        if not selection:
            return entities

        if ignore is None:
            ignore = []
        elif isinstance(ignore, str):
            ignore = [ignore]

        filtered_selection = []
        for entity in selection:
            if entity['entityType'] not in ignore:
                filtered_selection.append(entity)

        if not filtered_selection:
            return entities

        if session is None:
            session = self.session
            session._local_cache.clear()

        for entity in filtered_selection:
            entities.append(session.get(
                self._get_entity_type(entity, session),
                entity.get('entityId')
            ))

        return entities

    def _get_entity_type(self, entity, session=None):
        '''Return translated entity type tht can be used with API.'''
        # Get entity type and make sure it is lower cased. Most places except
        # the component tab in the Sidebar will use lower case notation.
        entity_type = entity.get('entityType').replace('_', '').lower()

        if session is None:
            session = self.session

        for schema in self.session.schemas:
            alias_for = schema.get('alias_for')

            if (
                alias_for and isinstance(alias_for, str) and
                alias_for.lower() == entity_type
            ):
                return schema['id']

        for schema in self.session.schemas:
            if schema['id'].lower() == entity_type:
                return schema['id']

        raise ValueError(
            'Unable to translate entity type: {0}.'.format(entity_type)
        )

    def _launch(self, event):
        self.session.rollback()
        self.session._local_cache.clear()

        self.launch(self.session, event)

    def launch(self, session, event):
        '''Callback method for the custom action.

        return either a bool ( True if successful or False if the action failed )
        or a dictionary with they keys `message` and `success`, the message should be a
        string and will be displayed as feedback to the user, success should be a bool,
        True if successful or False if the action failed.

        *session* is a `ftrack_api.Session` instance

        *entities* is a list of tuples each containing the entity type and the entity id.
        If the entity is a hierarchical you will always get the entity
        type TypedContext, once retrieved through a get operation you
        will have the "real" entity type ie. example Shot, Sequence
        or Asset Build.

        *event* the unmodified original event

        '''
        raise NotImplementedError()

    def _handle_preactions(self, session, event):
        # If preactions are not set
        if len(self.preactions) == 0:
            return True
        # If no selection
        selection = event.get('data', {}).get('selection', None)
        if (selection is None):
            return False
        # If preactions were already started
        if event['data'].get('preactions_launched', None) is True:
            return True

        # Launch preactions
        for preaction in self.preactions:
            self.trigger_action(preaction, event)

        # Relaunch this action
        additional_data = {"preactions_launched": True}
        self.trigger_action(
            self.identifier, event, additional_event_data=additional_data
        )

        return False

    def _handle_result(self, result):
        '''Validate the returned result from the action callback'''
        if isinstance(result, bool):
            if result is True:
                result = {
                    'success': result,
                    'message': (
                        '{0} launched successfully.'.format(self.label)
                    )
                }
            else:
                result = {
                    'success': result,
                    'message': (
                        '{0} launch failed.'.format(self.label)
                    )
                }

        elif isinstance(result, dict):
            items = 'items' in result
            if items is False:
                for key in ('success', 'message'):
                    if key in result:
                        continue

                    raise KeyError(
                        'Missing required key: {0}.'.format(key)
                    )

        return result

    def show_message(self, event, input_message, result=False):
        """
        Shows message to user who triggered event
        - event - just source of user id
        - input_message - message that is shown to user
        - result - changes color of message (based on ftrack settings)
            - True = Violet
            - False = Red
        """
        if not isinstance(result, bool):
            result = False

        try:
            message = str(input_message)
        except Exception:
            return

        user_id = event['source']['user']['id']
        target = (
            'applicationId=ftrack.client.web and user.id="{0}"'
        ).format(user_id)
        self.session.event_hub.publish(
            ftrack_api.event.base.Event(
                topic='ftrack.action.trigger-user-interface',
                data=dict(
                    type='message',
                    success=result,
                    message=message
                ),
                target=target
            ),
            on_error='ignore'
        )

    def show_interface(
        self, items, title='',
        event=None, user=None, username=None, user_id=None
    ):
        """
        Shows interface to user
        - to identify user must be entered one of args:
            event, user, username, user_id
        - 'items' must be list containing Ftrack interface items
        """
        if not any([event, user, username, user_id]):
            raise TypeError((
                'Missing argument `show_interface` requires one of args:'
                ' event (ftrack_api Event object),'
                ' user (ftrack_api User object)'
                ' username (string) or user_id (string)'
            ))

        if event:
            user_id = event['source']['user']['id']
        elif user:
            user_id = user['id']
        else:
            if user_id:
                key = 'id'
                value = user_id
            else:
                key = 'username'
                value = username

            user = self.session.query(
                'User where {} is "{}"'.format(key, value)
            ).first()

            if not user:
                raise TypeError((
                    'Ftrack user with {} "{}" was not found!'
                ).format(key, value))

            user_id = user['id']

        target = (
            'applicationId=ftrack.client.web and user.id="{0}"'
        ).format(user_id)

        self.session.event_hub.publish(
            ftrack_api.event.base.Event(
                topic='ftrack.action.trigger-user-interface',
                data=dict(
                    type='widget',
                    items=items,
                    title=title
                ),
                target=target
            ),
            on_error='ignore'
        )

    def show_interface_from_dict(
        self, messages, title="", event=None,
        user=None, username=None, user_id=None
    ):
        if not messages:
            self.log.debug("No messages to show! (messages dict is empty)")
            return
        items = []
        splitter = {'type': 'label', 'value': '---'}
        first = True
        for key, value in messages.items():
            if not first:
                items.append(splitter)
            else:
                first = False

            subtitle = {'type': 'label', 'value': '<h3>{}</h3>'.format(key)}
            items.append(subtitle)
            if isinstance(value, list):
                for item in value:
                    message = {
                        'type': 'label', 'value': '<p>{}</p>'.format(item)
                    }
                    items.append(message)
            else:
                message = {'type': 'label', 'value': '<p>{}</p>'.format(value)}
                items.append(message)

        self.show_interface(items, title, event, user, username, user_id)

    def trigger_action(
        self, action_name, event=None, session=None,
        selection=None, user_data=None,
        topic="ftrack.action.launch", additional_event_data={},
        on_error="ignore"
    ):
        self.log.debug("Triggering action \"{}\" Begins".format(action_name))

        if not session:
            session = self.session

        # Getting selection and user data
        _selection = None
        _user_data = None

        if event:
            _selection = event.get("data", {}).get("selection")
            _user_data = event.get("source", {}).get("user")

        if selection is not None:
            _selection = selection

        if user_data is not None:
            _user_data = user_data

        # Without selection and user data skip triggering
        msg = "Can't trigger \"{}\" action without {}."
        if _selection is None:
            self.log.error(msg.format(action_name, "selection"))
            return

        if _user_data is None:
            self.log.error(msg.format(action_name, "user data"))
            return

        _event_data = {
            "actionIdentifier": action_name,
            "selection": _selection
        }

        # Add additional data
        if additional_event_data:
            _event_data.update(additional_event_data)

        # Create and trigger event
        session.event_hub.publish(
            ftrack_api.event.base.Event(
                topic=topic,
                data=_event_data,
                source=dict(user=_user_data)
            ),
            on_error=on_error
        )
        self.log.debug(
            "Action \"{}\" Triggered successfully".format(action_name)
        )

    def trigger_event(
        self, topic, event_data={}, session=None, source=None,
        event=None, on_error="ignore"
    ):
        if session is None:
            session = self.session

        if not source and event:
            source = event.get("source")
        # Create and trigger event
        event = ftrack_api.event.base.Event(
            topic=topic,
            data=event_data,
            source=source
        )
        session.event_hub.publish(event, on_error=on_error)

        self.log.debug((
            "Publishing event: {}"
        ).format(str(event.__dict__)))

    def get_project_from_entity(self, entity):
        low_entity_type = entity.entity_type.lower()
        if low_entity_type == "project":
            return entity

        if "project" in entity:
            # reviewsession, task(Task, Shot, Sequence,...)
            return entity["project"]

        if low_entity_type == "filecomponent":
            entity = entity["version"]
            low_entity_type = entity.entity_type.lower()

        if low_entity_type == "assetversion":
            asset = entity["asset"]
            if asset:
                parent = asset["parent"]
                if parent:
                    return parent["project"]

        project_data = entity["link"][0]
        return self.session.query(
            "Project where id is {}".format(project_data["id"])
        ).one()
示例#19
0
    def test_console_output(self, capsys, monkeypatch, printer):
        monkeypatch.setitem(os.environ, 'PYPE_DEBUG', '3')
        lf = Logger()
        assert lf.PYPE_DEBUG == 3
        logger = Logger().get_logger('test_output', 'tests')

        printer("DEBUG LEVEL SET: {}".format(os.environ.get('PYPE_DEBUG')))

        # critical
        printer("testing critical level")
        logger.critical("CRITICAL TEST")
        cap = capsys.readouterr()
        cri_regex = re.compile(
            r'\x1b\[1m\x1b\[31m!!! CRI: \x1b\[0m.* \x1b\[1m\x1b\[32m>>> \x1b\[0m\x1b\[92m{ test_output }\x1b\[0m: \x1b\[1m\x1b\[92m\[ \x1b\[0mCRITICAL TEST \x1b\[1m\x1b\[92m]\x1b\[0m \x1b\[0m\n'
        )  # noqa: E501
        assert cri_regex.match(cap[1])

        # error
        printer("testing error level")
        logger.error("ERROR TEST")
        cap = capsys.readouterr()
        err_regex = re.compile(
            r'\x1b\[1m\x1b\[91m!!! ERR: \x1b\[0m.* \x1b\[1m\x1b\[32m>>> \x1b\[0m\x1b\[92m{ test_output }\x1b\[0m: \x1b\[1m\x1b\[92m\[ \x1b\[0m\x1b\[1m\x1b\[91mERROR\x1b\[0m TEST \x1b\[1m\x1b\[92m]\x1b\[0m \x1b\[0m\n'
        )  # noqa: E501
        assert err_regex.match(cap[1])

        # warn
        printer("testing warning level")
        logger.warning("WARNING TEST")
        cap = capsys.readouterr()
        warn_regex = re.compile(
            r'\x1b\[1m\x1b\[93m\*\*\* WRN\x1b\[0m: \x1b\[1m\x1b\[32m>>> \x1b\[0m\x1b\[92m{ test_output }\x1b\[0m: \x1b\[1m\x1b\[92m\[ \x1b\[0mWARNING TEST \x1b\[1m\x1b\[92m]\x1b\[0m \x1b\[0m\n'
        )  # noqa: E501
        assert warn_regex.match(cap[1])

        # info
        printer("testing info level")
        logger.info("INFO TEST")
        cap = capsys.readouterr()
        info_regex = re.compile(
            r'\x1b\[1m\x1b\[32m>>> \x1b\[0m\x1b\[1m\x1b\[92m\[ \x1b\[0mINFO TEST \x1b\[1m\x1b\[92m]\x1b\[0m \x1b\[0m\n'
        )  # noqa: E501
        assert info_regex.match(cap[1])

        # debug
        printer("testing debug level")
        logger.debug("DEBUG TEST")
        cap = capsys.readouterr()
        debug_regex = re.compile(
            r'\x1b\[1m\x1b\[33m  - \x1b\[0m\x1b\[92m{ test_output }\x1b\[0m: \x1b\[1m\x1b\[92m\[ \x1b\[0mDEBUG TEST \x1b\[1m\x1b\[92m]\x1b\[0m \x1b\[0m\n'
        )  # noqa: E501
        assert debug_regex.match(cap[1])
示例#20
0
文件: lib.py 项目: tokejepsen/pype
class ProcessEventHub(SocketBaseEventHub):

    hearbeat_msg = b"processor"
    url, database, table_name = get_ftrack_event_mongo_info()

    is_table_created = False
    pypelog = Logger().get_logger("Session Processor")

    def __init__(self, *args, **kwargs):
        self.dbcon = DbConnector(mongo_url=self.url,
                                 database_name=self.database,
                                 table_name=self.table_name)
        super(ProcessEventHub, self).__init__(*args, **kwargs)

    def prepare_dbcon(self):
        try:
            self.dbcon.install()
            self.dbcon._database.list_collection_names()
        except pymongo.errors.AutoReconnect:
            self.pypelog.error(
                "Mongo server \"{}\" is not responding, exiting.".format(
                    os.environ["AVALON_MONGO"]))
            sys.exit(0)

        except pymongo.errors.OperationFailure:
            self.pypelog.error(
                ("Error with Mongo access, probably permissions."
                 "Check if exist database with name \"{}\""
                 " and collection \"{}\" inside.").format(
                     self.database, self.table_name))
            self.sock.sendall(b"MongoError")
            sys.exit(0)

    def wait(self, duration=None):
        """Overriden wait

        Event are loaded from Mongo DB when queue is empty. Handled event is
        set as processed in Mongo DB.
        """
        started = time.time()
        self.prepare_dbcon()
        while True:
            try:
                event = self._event_queue.get(timeout=0.1)
            except queue.Empty:
                if not self.load_events():
                    time.sleep(0.5)
            else:
                try:
                    self._handle(event)
                    self.dbcon.update_one(
                        {"id": event["id"]},
                        {"$set": {
                            "pype_data.is_processed": True
                        }})
                except pymongo.errors.AutoReconnect:
                    self.pypelog.error(
                        ("Mongo server \"{}\" is not responding, exiting."
                         ).format(os.environ["AVALON_MONGO"]))
                    sys.exit(0)
                # Additional special processing of events.
                if event['topic'] == 'ftrack.meta.disconnected':
                    break

            if duration is not None:
                if (time.time() - started) > duration:
                    break

    def load_events(self):
        """Load not processed events sorted by stored date"""
        ago_date = datetime.datetime.now() - datetime.timedelta(days=3)
        result = self.dbcon.delete_many({
            "pype_data.stored": {
                "$lte": ago_date
            },
            "pype_data.is_processed": True
        })

        not_processed_events = self.dbcon.find({
            "pype_data.is_processed": False
        }).sort([("pype_data.stored", pymongo.ASCENDING)])

        found = False
        for event_data in not_processed_events:
            new_event_data = {
                k: v
                for k, v in event_data.items()
                if k not in ["_id", "pype_data"]
            }
            try:
                event = ftrack_api.event.base.Event(**new_event_data)
            except Exception:
                self.logger.exception(
                    L('Failed to convert payload into event: {0}', event_data))
                continue
            found = True
            self._event_queue.put(event)

        return found

    def _handle_packet(self, code, packet_identifier, path, data):
        """Override `_handle_packet` which skip events and extend heartbeat"""
        code_name = self._code_name_mapping[code]
        if code_name == "event":
            return

        return super()._handle_packet(code, packet_identifier, path, data)
示例#21
0
文件: rest_api.py 项目: kalisp/pype
import os
import sys
import copy
from pype.services.rest_api import RestApi, route, abort, CallbackResult
from .io_nonsingleton import DbConnector
from pypeapp import config, execute, Logger

log = Logger().get_logger("AdobeCommunicator")

CURRENT_DIR = os.path.dirname(__file__)
PUBLISH_SCRIPT_PATH = os.path.join(CURRENT_DIR, "publish.py")

PUBLISH_PATHS = []


class AdobeRestApi(RestApi):
    dbcon = DbConnector()

    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.dbcon.install()

    @route("/available", "/adobe")
    def available(self):
        return CallbackResult()

    @route("/presets/<project_name>", "/adobe")
    def get_presets(self, request):
        project_name = request.url_data["project_name"]
        return CallbackResult(data=config.get_presets(project_name))
示例#22
0
import os
import sys
import KnobScripter

from pype.nuke.lib import (writes_version_sync, onScriptLoad,
                           checkInventoryVersions)

import nuke
from pypeapp import Logger

log = Logger().get_logger(__name__, "nuke")

# nuke.addOnScriptSave(writes_version_sync)
nuke.addOnScriptSave(onScriptLoad)
nuke.addOnScriptLoad(checkInventoryVersions)
nuke.addOnScriptSave(checkInventoryVersions)
# nuke.addOnScriptSave(writes_version_sync)

log.info('Automatic syncing of write file knob to script version')
示例#23
0
import re
import os
import hiero

from pypeapp import (config, Logger)
from avalon import io

log = Logger().get_logger(__name__, "nukestudio")


def create_tag(key, value):
    """
    Creating Tag object.

    Args:
        key (str): name of tag
        value (dict): parameters of tag

    Returns:
        object: Tag object
    """

    tag = hiero.core.Tag(str(key))

    return update_tag(tag, value)


def update_tag(tag, value):
    """
    Fixing Tag object.
示例#24
0
import os
import sys
import signal
import socket
import datetime

from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import (
    SocketSession, ProcessEventHub, TOPIC_STATUS_SERVER
)
import ftrack_api
from pypeapp import Logger

log = Logger().get_logger("Event processor")

subprocess_started = datetime.datetime.now()


class SessionFactory:
    session = None


def send_status(event):
    subprocess_id = event["data"].get("subprocess_id")
    if not subprocess_id:
        return

    if subprocess_id != os.environ["FTRACK_EVENT_SUB_ID"]:
        return

    session = SessionFactory.session
示例#25
0
文件: menu.py 项目: kalisp/pype
import nuke
from avalon.api import Session

from pype.nuke import lib
from pypeapp import Logger

log = Logger().get_logger(__name__, "nuke")


def install():
    menubar = nuke.menu("Nuke")
    menu = menubar.findItem(Session["AVALON_LABEL"])
    workfile_settings = lib.WorkfileSettings
    # replace reset resolution from avalon core to pype's
    name = "Reset Resolution"
    new_name = "Set Resolution"
    rm_item = [(i, item) for i, item in enumerate(menu.items())
               if name in item.name()][0]

    log.debug("Changing Item: {}".format(rm_item))
    # rm_item[1].setEnabled(False)
    menu.removeItem(rm_item[1].name())
    menu.addCommand(new_name,
                    lambda: workfile_settings().reset_resolution(),
                    index=(rm_item[0]))

    # replace reset frame range from avalon core to pype's
    name = "Reset Frame Range"
    new_name = "Set Frame Range"
    rm_item = [(i, item) for i, item in enumerate(menu.items())
               if name in item.name()][0]
示例#26
0
import sys
import signal
import socket

import traceback

from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import SocketSession, SocketBaseEventHub

from pypeapp import Logger

log = Logger().get_logger("FtrackUserServer")


def main(args):
    port = int(args[-1])

    # Create a TCP/IP socket
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

    # Connect the socket to the port where the server is listening
    server_address = ("localhost", port)
    log.debug(
        "User Ftrack Server connected to {} port {}".format(*server_address))
    sock.connect(server_address)
    sock.sendall(b"CreatedUser")

    try:
        session = SocketSession(auto_connect_event_hub=True,
                                sock=sock,
                                Eventhub=SocketBaseEventHub)
示例#27
0
文件: rest_api.py 项目: jonike/pype
import os
import socket
import socketserver
from Qt import QtCore

from .lib import RestApiFactory, Handler
from .base_class import route, register_statics
from pypeapp import config, Logger

log = Logger().get_logger("RestApiServer")


class RestApiServer:
    """Rest Api allows to access statics or callbacks with http requests.

    To register statics use `register_statics`.

    To register callback use `register_callback` method or use `route` decorator.
    `route` decorator should be used with not-class functions, it is possible
    to use within class when inherits `RestApi` (defined in `base_class.py`)
    or created object, with used decorator, is registered with `register_obj`.

    .. code-block:: python
        @route("/username", url_prefix="/api", methods=["get"], strict_match=False)
        def get_username():
            return {"username": getpass.getuser()}

    In that case response to `localhost:{port}/api/username` will be status
    `200` with body including `{"data": {"username": getpass.getuser()}}`

    Callback may expect one argument which will be filled with request
示例#28
0
import sys
import datetime
import signal
import socket
import pymongo

import ftrack_api
from ftrack_server import FtrackServer
from pype.ftrack.ftrack_server.lib import (SocketSession, StorerEventHub,
                                           get_ftrack_event_mongo_info,
                                           TOPIC_STATUS_SERVER,
                                           TOPIC_STATUS_SERVER_RESULT)
from pype.ftrack.lib.custom_db_connector import DbConnector
from pypeapp import Logger

log = Logger().get_logger("Event storer")
subprocess_started = datetime.datetime.now()


class SessionFactory:
    session = None


url, database, table_name = get_ftrack_event_mongo_info()
dbcon = DbConnector(mongo_url=url,
                    database_name=database,
                    table_name=table_name)

# ignore_topics = ["ftrack.meta.connected"]
ignore_topics = []
示例#29
0
import os
from Qt import QtCore
from pypeapp import Logger
from pypeapp.lib.log import _bootstrap_mongo_log

log = Logger().get_logger("LogModel", "LoggingModule")


class LogModel(QtCore.QAbstractItemModel):
    COLUMNS = [
        "user",
        "host",
        "lineNumber",
        "method",
        "module",
        "fileName",
        "loggerName",
        "message",
        "level",
        "timestamp",
    ]

    colums_mapping = {
        "user": "******",
        "host": "Host",
        "lineNumber": "Line n.",
        "method": "Method",
        "module": "Module",
        "fileName": "File name",
        "loggerName": "Logger name",
        "message": "Message",
示例#30
0
文件: lib.py 项目: ldunham1/pype
import sys
from .utils import get_resolve_module
from pypeapp import Logger

log = Logger().get_logger(__name__, "resolve")

self = sys.modules[__name__]
self.pm = None


def get_project_manager():
    if not self.pm:
        resolve = get_resolve_module()
        self.pm = resolve.GetProjectManager()
    return self.pm


def set_project_manager_to_folder_name(folder_name):
    """
    Sets context of Project manager to given folder by name.

    Searching for folder by given name from root folder to nested.
    If no existing folder by name it will create one in root folder.

    Args:
        folder_name (str): name of searched folder

    Returns:
        bool: True if success

    Raises: