def test_add_app(self): self.assertRaises(TankError, self.env.create_app_settings, "test_engine", "test_app") self.assertRaises(TankError, self.env.create_app_settings, "unknown_engine", "test_app") # get raw environment before env_file = os.path.join(self.project_config, "env", "test.yml") with open(env_file) as fh: env_before = yaml.load(fh) self.env.create_app_settings("test_engine", "new_app") # get raw environment after env_file = os.path.join(self.project_config, "env", "test.yml") with open(env_file) as fh: env_after = yaml.load(fh) # ensure that disk was updated self.assertNotEqual(env_after, env_before) env_before["engines"]["test_engine"]["apps"]["new_app"] = {} env_before["engines"]["test_engine"]["apps"]["new_app"]["location"] = {} self.assertEqual(env_after, env_before) # ensure memory was updated cfg_after = self.env.get_app_settings("test_engine", "new_app") self.assertEqual(cfg_after, {})
def test_add_app(self): self.assertRaises(TankError, self.env.create_app_settings, "test_engine", "test_app") self.assertRaises(TankError, self.env.create_app_settings, "unknown_engine", "test_app") # get raw environment before env_file = os.path.join(self.project_config, "env", "test.yml") fh = open(env_file) env_before = yaml.load(fh) fh.close() self.env.create_app_settings("test_engine", "new_app") # get raw environment after env_file = os.path.join(self.project_config, "env", "test.yml") fh = open(env_file) env_after = yaml.load(fh) fh.close() # ensure that disk was updated self.assertNotEqual(env_after, env_before) env_before["engines"]["test_engine"]["apps"]["new_app"] = {} env_before["engines"]["test_engine"]["apps"]["new_app"][ "location"] = {} self.assertEqual(env_after, env_before) # ensure memory was updated cfg_after = self.env.get_app_settings("test_engine", "new_app") self.assertEqual(cfg_after, {})
def setUp(self): super(TestEnvironment, self).setUp() self.setup_fixtures() self.test_env = "test" self.test_engine = "test_engine" # create env object self.env = self.tk.pipeline_configuration.get_environment(self.test_env) # get raw environment env_file = os.path.join(self.project_config, "env", "test.yml") fh = open(env_file) self.raw_env_data = yaml.load(fh) fh.close() # get raw app metadata app_md = os.path.join(self.project_config, "bundles", "test_app", "info.yml") fh = open(app_md) self.raw_app_metadata = yaml.load(fh) fh.close() # get raw engine metadata eng_md = os.path.join(self.project_config, "bundles", "test_engine", "info.yml") fh = open(eng_md) self.raw_engine_metadata = yaml.load(fh) fh.close()
def test_update_engine_settings(self): self.assertRaises(TankError, self.env.update_engine_settings, "bad_engine", {}, {}) # get raw environment before env_file = os.path.join(self.project_config, "env", "test.yml") fh = open(env_file) env_before = yaml.load(fh) fh.close() prev_settings = self.env.get_engine_settings("test_engine") self.env.update_engine_settings("test_engine", {"foo":"bar"}, {"type":"dev", "path":"foo"}) # get raw environment after env_file = os.path.join(self.project_config, "env", "test.yml") fh = open(env_file) env_after = yaml.load(fh) fh.close() # ensure that disk was updated self.assertNotEqual(env_after, env_before) env_before["engines"]["test_engine"]["foo"] = "bar" env_before["engines"]["test_engine"]["location"] = {"type":"dev", "path":"foo"} self.assertEqual(env_after, env_before) # ensure memory was updated new_settings = self.env.get_engine_settings("test_engine") prev_settings.update({"foo":"bar"}) self.assertEqual(new_settings, prev_settings) desc_after = self.env.get_engine_descriptor("test_engine") self.assertEqual(desc_after.get_location(), {"type":"dev", "path":"foo"})
def setUp(self): super(TestEnvironment, self).setUp() self.setup_fixtures() self.test_env = "test" self.test_engine = "test_engine" # create env object self.env = self.tk.pipeline_configuration.get_environment( self.test_env) # get raw environment env_file = os.path.join(self.project_config, "env", "test.yml") fh = open(env_file) self.raw_env_data = yaml.load(fh) fh.close() # get raw app metadata app_md = os.path.join(self.project_config, "bundles", "test_app", "info.yml") fh = open(app_md) self.raw_app_metadata = yaml.load(fh) fh.close() # get raw engine metadata eng_md = os.path.join(self.project_config, "bundles", "test_engine", "info.yml") fh = open(eng_md) self.raw_engine_metadata = yaml.load(fh) fh.close()
def get_location(sgtk, app_bootstrap): """ Returns a location dictionary for the bundled framework. :param sgtk: Handle to the Toolkit API :param app_bootstrap: Instance of the application bootstrap. :returns: A dictionary with keys and values following the Toolkit location convention. Read more at https://support.shotgunsoftware.com/entries/95442678#Code%20Locations """ dev_descriptor = { "type": "dev", "path": app_bootstrap.get_startup_location_override() } # If the startup location had been overriden, the descriptor is automatically # a dev descriptor. if app_bootstrap.get_startup_location_override(): return dev_descriptor # If we are running the bundled startup, it is possible to override it's # value on first run in order to trigger the system into pulling from # another location that the app_store, namely git. This is done through an # environment variable. This is also great for testing app_store updates # by pretending you have an older version of the code bundled and need an update. # Local import since sgtk is lazily loaded. from tank_vendor import yaml if app_bootstrap.runs_bundled_startup() and "SGTK_DESKTOP_BUNDLED_DESCRIPTOR" in os.environ: try: return yaml.load(os.environ["SGTK_DESKTOP_BUNDLED_DESCRIPTOR"]) except yaml.YAMLError, e: raise BundledDescriptorEnvVarError(e)
def get_associated_core_version(self): """ Returns the version string for the core api associated with this config. This method is 'forgiving' and in the case no associated core API can be found for this pipeline configuration, None will be returned rather than an exception raised. """ associated_api_root = self.get_install_location() info_yml_path = os.path.join(associated_api_root, "core", "info.yml") if os.path.exists(info_yml_path): try: info_fh = open(info_yml_path, "r") try: data = yaml.load(info_fh) finally: info_fh.close() data = data.get("version") except: data = None else: data = None return data
def get_pc_roots_metadata(pipeline_config_root_path): """ Loads and validates the roots metadata file. The roots.yml file is a reflection of the local storages setup in Shotgun at project setup time and may contain anomalies in the path layout structure. The roots data will be prepended to paths and used for comparison so it is critical that the paths are on a correct normalized form once they have been loaded into the system. """ # now read in the roots.yml file # this will contain something like # {'primary': {'mac_path': '/studio', 'windows_path': None, 'linux_path': '/studio'}} roots_yml = os.path.join(pipeline_config_root_path, "config", "core", "roots.yml") if not os.path.exists(roots_yml): raise TankError("Roots metadata file '%s' missing! Please contact support." % roots_yml) fh = open(roots_yml, "rt") try: data = yaml.load(fh) except Exception, e: raise TankError("Looks like the roots file is corrupt. Please contact " "support! File: '%s' Error: %s" % (roots_yml, e))
def _get_metadata(self): """ Loads the pipeline config metadata (the pipeline_configuration.yml) file from disk. :param pipeline_config_path: path to a pipeline configuration root folder :returns: deserialized content of the file in the form of a dict. """ # now read in the pipeline_configuration.yml file cfg_yml = self._get_pipeline_config_file_location() if not os.path.exists(cfg_yml): raise TankError("Configuration metadata file '%s' missing! " "Please contact support." % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) if data is None: raise Exception("File contains no data!") except Exception as e: raise TankError("Looks like a config file is corrupt. Please contact " "support! File: '%s' Error: %s" % (cfg_yml, e)) finally: fh.close() return data
def get_pc_registered_location(pipeline_config_root_path): """ Loads the location metadata file from install_location.yml This contains a reflection of the paths given in the pc entity. Returns the path that has been registered for this pipeline configuration for the current OS. This is the path that has been defined in shotgun. It is also the path that is being used in the inverse pointer files that exist in each storage. This is useful when drive letter mappings or symlinks are being used - in these cases get_path() may not return the same value as get_registered_location_path(). This may return None if no path has been registered for the current os. """ # now read in the pipeline_configuration.yml file cfg_yml = os.path.join(pipeline_config_root_path, "config", "core", "install_location.yml") if not os.path.exists(cfg_yml): raise TankError( "Location metadata file '%s' missing! Please contact support." % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) except Exception, e: raise TankError("Looks like a config file is corrupt. Please contact " "support! File: '%s' Error: %s" % (cfg_yml, e))
def get_pc_registered_location(pipeline_config_root_path): """ Loads the location metadata file from install_location.yml This contains a reflection of the paths given in the pc entity. Returns the path that has been registered for this pipeline configuration for the current OS. This is the path that has been defined in shotgun. It is also the path that is being used in the inverse pointer files that exist in each storage. This is useful when drive letter mappings or symlinks are being used - in these cases get_path() may not return the same value as get_registered_location_path(). This may return None if no path has been registered for the current os. """ # now read in the pipeline_configuration.yml file cfg_yml = os.path.join(pipeline_config_root_path, "config", "core", "install_location.yml") if not os.path.exists(cfg_yml): raise TankError("Location metadata file '%s' missing! Please contact support." % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) except Exception, e: raise TankError("Looks like a config file is corrupt. Please contact " "support! File: '%s' Error: %s" % (cfg_yml, e))
def _get_pc_disk_metadata(pipeline_config_root_path): """ Helper method. Loads the pipeline config metadata (the pipeline_configuration.yml) file from disk. :returns: deserialized content of the file in the form of a dict. """ # now read in the pipeline_configuration.yml file cfg_yml = os.path.join(pipeline_config_root_path, "config", "core", "pipeline_configuration.yml") if not os.path.exists(cfg_yml): raise TankError( "Configuration metadata file '%s' missing! Please contact support." % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) if data is None: raise Exception("File contains no data!") except Exception, e: raise TankError("Looks like a config file is corrupt. Please contact " "support! File: '%s' Error: %s" % (cfg_yml, e))
def get_location(app_bootstrap): """ Returns a location dictionary for the bundled framework. :param app_bootstrap: Instance of the application bootstrap. :returns: A dictionary with keys and values following the Toolkit location convention. Read more at https://support.shotgunsoftware.com/entries/95442678#Code%20Locations """ dev_descriptor = { "type": "dev", "path": app_bootstrap.get_startup_location_override() } # If the startup location had been overriden, the descriptor is automatically # a dev descriptor. if app_bootstrap.get_startup_location_override(): return dev_descriptor # If we are running the bundled startup, it is possible to override it's # value on first run in order to trigger the system into pulling from # another location that the app_store, namely git. This is done through an # environment variable. This is also great for testing app_store updates # by pretending you have an older version of the code bundled and need an update. # Local import since sgtk is lazily loaded. from tank_vendor import yaml location = _get_location_yaml_location(app_bootstrap.get_startup_path()) # If the file is missing, we're in dev mode. if not os.path.exists(location): return {"type": "dev", "path": app_bootstrap.get_startup_path()} # Read the location.yml file. with open(location, "r") as location_file: # If the file is empty, we're in dev mode. return yaml.load(location_file) or dev_descriptor
def _load_nuke_publish_snapshot_comments(self, snapshot_file_path): """ Load old nuke-style snapshot comments if they exist. These are only ever read - all new comments are saved to the new file. """ comments = {} try: # look for old nuke style path: snapshot_dir = os.path.dirname(snapshot_file_path) fields = self._snapshot_template.get_fields(snapshot_file_path) SNAPSHOT_COMMENTS_FILE = r"%s_comments.yml" comments_file_name = SNAPSHOT_COMMENTS_FILE % fields.get( "name", "unknown") comments_file_path = os.path.join(snapshot_dir, comments_file_name) comments = {} if os.path.exists(comments_file_path): raw_comments = yaml.load(open(comments_file_path, "r")) for (name, timestamp), comment in raw_comments.iteritems(): fields["name"] = name fields["timestamp"] = timestamp snapshot_path = self._snapshot_template.apply_fields( fields) if os.path.exists(snapshot_path): # add comment to dictionary in new style: comments_key = os.path.basename(snapshot_path) comments[comments_key] = {"comment": comment} except: # it's not critical that this succeeds so just ignore any exceptions pass return comments
def _get_snapshot_comments(self, snapshot_file_path): """ Return the snapshot comments for the specified file path """ # first, attempt to load old-nuke-publish-style comments: comments = self._load_nuke_publish_snapshot_comments( snapshot_file_path) # now load new style comments: comments_file_path = self._get_comments_file_path(snapshot_file_path) raw_comments = {} if os.path.exists(comments_file_path): raw_comments = yaml.load(open(comments_file_path, "r")) # process raw comments to convert old-style to new if need to: for key, value in raw_comments.iteritems(): if isinstance(value, basestring): # old style string comments[key] = {"comment": value} elif isinstance(value, dict): # new style dictionary comments[key] = value else: # value isn't valid! pass # ensure all comments are returned as utf-8 strings rather than # unicode - this is due to a previous bug where the snapshot UI # would return the comment as unicode! for comment_dict in comments.values(): comment = comment_dict.get("comment") if comment and isinstance(comment, unicode): comment_dict["comment"] = comment.encode("utf8") return comments
def _get_install_locations(path): """ Given a pipeline configuration OR core location, return paths on all platforms. :param path: Path to a pipeline configuration on disk. :returns: dictionary with keys linux2, darwin and win32 """ # basic sanity check if not os.path.exists(path): raise TankError("The core path '%s' does not exist on disk!" % path) # for other platforms, read in install_location location_file = os.path.join(path, "config", "core", "install_location.yml") if not os.path.exists(location_file): raise TankError( "Cannot find core config file '%s' - please contact support!" % location_file) # load the config file try: open_file = open(location_file) try: location_data = yaml.load(open_file) finally: open_file.close() except Exception, error: raise TankError("Cannot load core config file '%s'. Error: %s" % (location_file, error))
def get_core_descriptor(pipeline_config_path, shotgun_connection, bundle_cache_fallback_paths=None): """ Returns a descriptor object for the uri/dict defined in the config's ``core_api.yml`` file (if it exists). If the config does not define a core descriptor file, then ``None`` will be returned. :param str pipeline_config_path: The path to the pipeline configuration :param shotgun_connection: An open connection to shotgun :param bundle_cache_fallback_paths: bundle cache search path :return: A core descriptor object """ # avoid circular dependencies from .descriptor import Descriptor, create_descriptor, is_descriptor_version_missing descriptor_file_path = _get_core_descriptor_file(pipeline_config_path) if not os.path.exists(descriptor_file_path): return None # the core_api.yml contains info about the core: # # | location: # | name: tk-core # | type: app_store # | version: v0.16.34 logger.debug("Found core descriptor file '%s'" % descriptor_file_path) # read the file first fh = open(descriptor_file_path, "rt") try: data = yaml.load(fh, Loader=yaml.FullLoader) core_descriptor_dict = data["location"] except Exception as e: raise TankError("Cannot read invalid core descriptor file '%s': %s" % (descriptor_file_path, e)) finally: fh.close() # we have a core descriptor specification. Get a descriptor object for it logger.debug( "Config has a specific core defined in core/core_api.yml: %s" % core_descriptor_dict) # when core is specified, check if it defines a specific version or not use_latest = is_descriptor_version_missing(core_descriptor_dict) return create_descriptor( shotgun_connection, Descriptor.CORE, core_descriptor_dict, fallback_roots=bundle_cache_fallback_paths or [], resolve_latest=use_latest, )
def _get_install_locations(path): """ Given a pipeline configuration OR core location, return paths on all platforms. :param path: Path to a pipeline configuration on disk. :returns: dictionary with keys linux2, darwin and win32 """ # basic sanity check if not os.path.exists(path): raise TankError("The core path '%s' does not exist on disk!" % path) # for other platforms, read in install_location location_file = os.path.join(path, "config", "core", "install_location.yml") if not os.path.exists(location_file): raise TankError("Cannot find core config file '%s' - please contact support!" % location_file) # load the config file try: open_file = open(location_file) try: location_data = yaml.load(open_file) finally: open_file.close() except Exception, error: raise TankError("Cannot load core config file '%s'. Error: %s" % (location_file, error))
def _get_metadata(self): """ Returns the info.yml metadata associated with this descriptor. Note that this call involves deep introspection; in order to access the metadata we normally need to have the code content local, so this method may trigger a remote code fetch if necessary. """ if self.__manifest_data is None: # make sure payload exists locally if not self.exists_local(): self.download_local() # get the metadata bundle_root = self.get_path() file_path = os.path.join(bundle_root, constants.BUNDLE_METADATA_FILE) if not os.path.exists(file_path): raise TankError("Toolkit metadata file '%s' missing." % file_path) try: file_data = open(file_path) try: metadata = yaml.load(file_data) finally: file_data.close() except Exception, exp: raise TankError("Cannot load metadata file '%s'. Error: %s" % (file_path, exp)) # cache it self.__manifest_data = metadata
def _get_metadata(self): """ Loads the pipeline config metadata (the pipeline_configuration.yml) file from disk. :param pipeline_config_path: path to a pipeline configuration root folder :returns: deserialized content of the file in the form of a dict. """ # now read in the pipeline_configuration.yml file cfg_yml = self._get_pipeline_config_file_location() if not os.path.exists(cfg_yml): raise TankError("Configuration metadata file '%s' missing! " "Please contact support." % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh, Loader=yaml.FullLoader) if data is None: raise Exception("File contains no data!") except Exception as e: raise TankError( "Looks like a config file is corrupt. Please contact " "support! File: '%s' Error: %s" % (cfg_yml, e)) finally: fh.close() return data
def _get_roots_data(self): """ Returns roots.yml data for this config. If no root file can be loaded, {} is returned. :returns: Roots data yaml content, usually a dictionary """ self._io_descriptor.ensure_local() # get the roots definition root_file_path = os.path.join( self._io_descriptor.get_path(), "core", constants.STORAGE_ROOTS_FILE) roots_data = {} if os.path.exists(root_file_path): root_file = open(root_file_path, "r") try: # if file is empty, initializae with empty dict... roots_data = yaml.load(root_file) or {} finally: root_file.close() return roots_data
def test_project_root_mismatch(self): """ Case that root name specified in projects yml file does not exist in roots file. """ # remove root name from the roots file self.setup_multi_root_fixtures() self.tk = tank.Tank(self.project_root) # should be fine folder.configuration.FolderConfiguration(self.tk, self.schema_location) roots_file = os.path.join(self.tk.pipeline_configuration.get_path(), "config", "core", "schema", "alternate_1.yml") fh = open(roots_file, "r") data = yaml.load(fh) fh.close() data["root_name"] = "some_bogus_Data" fh = open(roots_file, "w") fh.write(yaml.dump(data)) fh.close() self.tk = tank.Tank(self.project_root) self.assertRaises(TankError, folder.configuration.FolderConfiguration, self.tk, self.schema_location)
def _get_metadata(self): """ Loads the pipeline config metadata (the pipeline_configuration.yml) file from disk. :param pipeline_config_path: path to a pipeline configuration root folder :returns: deserialized content of the file in the form of a dict. """ # now read in the pipeline_configuration.yml file cfg_yml = os.path.join(self.get_config_location(), "core", constants.PIPELINECONFIG_FILE) if not os.path.exists(cfg_yml): raise TankError("Configuration metadata file '%s' missing! " "Please contact support." % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) if data is None: raise Exception("File contains no data!") except Exception, e: raise TankError( "Looks like a config file is corrupt. Please contact " "support! File: '%s' Error: %s" % (cfg_yml, e))
def _update_deploy_file(self, generation=None, descriptor=None, corrupt=False): """ Updates the deploy file. :param generation: If set, will update the generation number of the config. :param descriptor: If set, will update the descriptor of the config. :param corrupt: If set, will corrupt the configuration file. """ path = self._cached_config._config_writer.get_descriptor_metadata_file( ) if corrupt: data = "corrupted" else: with open(path, "rt") as fh: data = yaml.load(fh, Loader=yaml.FullLoader) if generation is not None: data["deploy_generation"] = generation if descriptor is not None: data["config_descriptor"] = descriptor with open(path, "wt") as fh: yaml.dump(data, fh)
def test_project_root_mismatch(self): """ Case that root name specified in projects yml file does not exist in roots file. """ # remove root name from the roots file self.setup_multi_root_fixtures() # should be fine folder.configuration.FolderConfiguration(self.tk, self.schema_location) roots_file = os.path.join( self.tk.pipeline_configuration.get_path(), "config", "core", "schema", "alternate_1.yml", ) fh = open(roots_file, "r") data = yaml.load(fh, Loader=yaml.FullLoader) fh.close() data["root_name"] = "some_bogus_Data" fh = open(roots_file, "w") fh.write(yaml.dump(data)) fh.close() self.assertRaises( TankError, folder.configuration.FolderConfiguration, self.tk, self.schema_location, )
def _get_snapshot_comments(self, snapshot_file_path): """ Return the snapshot comments for the specified file path """ # first, attempt to load old-nuke-publish-style comments: comments = self._load_nuke_publish_snapshot_comments(snapshot_file_path) # now load new style comments: comments_file_path = self._get_comments_file_path(snapshot_file_path) raw_comments = {} if os.path.exists(comments_file_path): raw_comments = yaml.load(open(comments_file_path, "r")) # process raw comments to convert old-style to new if need to: for key, value in raw_comments.iteritems(): if isinstance(value, basestring): # old style string comments[key] = {"comment":value} elif isinstance(value, dict): # new style dictionary comments[key] = value else: # value isn't valid! pass # ensure all comments are returned as utf-8 strings rather than # unicode - this is due to a previous bug where the snapshot UI # would return the comment as unicode! for comment_dict in comments.values(): comment = comment_dict.get("comment") if comment and isinstance(comment, unicode): comment_dict["comment"] = comment.encode("utf8") return comments
def associated_core_descriptor(self): """ The descriptor dict or url required for this core or None if not defined. :returns: Core descriptor dict or uri or None if not defined """ core_descriptor_dict = None self._io_descriptor.ensure_local() core_descriptor_path = os.path.join( self._io_descriptor.get_path(), "core", constants.CONFIG_CORE_DESCRIPTOR_FILE) if os.path.exists(core_descriptor_path): # the core_api.yml contains info about the core config: # # location: # name: tk-core # type: app_store # version: v0.16.34 log.debug("Detected core descriptor file '%s'" % core_descriptor_path) # read the file first fh = open(core_descriptor_path, "rt") try: data = yaml.load(fh) core_descriptor_dict = data["location"] except Exception, e: raise TankDescriptorError( "Cannot read invalid core descriptor file '%s': %s" % (core_descriptor_path, e)) finally:
def _load_nuke_publish_snapshot_comments(self, snapshot_file_path): """ Load old nuke-style snapshot comments if they exist. These are only ever read - all new comments are saved to the new file. """ comments = {} try: # look for old nuke style path: snapshot_dir = os.path.dirname(snapshot_file_path) fields = self._snapshot_template.get_fields(snapshot_file_path) SNAPSHOT_COMMENTS_FILE = r"%s_comments.yml" comments_file_name = SNAPSHOT_COMMENTS_FILE % fields.get("name", "unknown") comments_file_path = os.path.join(snapshot_dir, comments_file_name) comments = {} if os.path.exists(comments_file_path): raw_comments = yaml.load(open(comments_file_path, "r")) for (name, timestamp), comment in raw_comments.iteritems(): fields["name"] = name fields["timestamp"] = timestamp snapshot_path = self._snapshot_template.apply_fields(fields) if os.path.exists(snapshot_path): # add comment to dictionary in new style: comments_key = os.path.basename(snapshot_path) comments[comments_key] = {"comment":comment} except: # it's not critical that this succeeds so just ignore any exceptions pass return comments
def test_url_cleanup(self): # Make sure that if a file has the url saved incorrectly... with patch("sgtk.util.shotgun.connection.sanitize_url", wraps=lambda x: x): session_cache.set_current_host("https://host.cleaned.up.on.read/") # ... then sure we indeed disabled cleanup and that the malformed value was written to disk... self.assertEquals("https://host.cleaned.up.on.read/", session_cache.get_current_host()) # ... and finaly that the value is filtered when being read back from disk. self.assertEquals("https://host.cleaned.up.on.read", session_cache.get_current_host()) # Make sure we're cleaning up the hostname when saving it. session_cache.set_current_host("https://host.cleaned.up.on.write/") with open( os.path.join( LocalFileStorageManager.get_global_root( LocalFileStorageManager.CACHE ), "authentication.yml" ), "r" ) as fh: # Let's read the file directly to see if the data was cleaned up. data = yaml.load(fh) self.assertEqual( data["current_host"], "https://host.cleaned.up.on.write" )
def test_url_cleanup(self): # Make sure that if a file has the url saved incorrectly... with patch("sgtk.util.shotgun.connection.sanitize_url", wraps=lambda x: x): session_cache.set_current_host("https://host.cleaned.up.on.read/") # ... then sure we indeed disabled cleanup and that the malformed value was written to disk... self.assertEquals("https://host.cleaned.up.on.read/", session_cache.get_current_host()) # ... and finaly that the value is filtered when being read back from disk. self.assertEquals("https://host.cleaned.up.on.read", session_cache.get_current_host()) # Make sure we're cleaning up the hostname when saving it. session_cache.set_current_host("https://host.cleaned.up.on.write/") with open( os.path.join( LocalFileStorageManager.get_global_root( LocalFileStorageManager.CACHE), "authentication.yml"), "r") as fh: # Let's read the file directly to see if the data was cleaned up. data = yaml.load(fh) self.assertEqual(data["current_host"], "https://host.cleaned.up.on.write")
def _get_demo_info(self, demo_class): """ Given a demo class, parse the ``demo.yml`` file for more info. :param demo_class: The demo ``QtGui.QWidget`` subclass :returns: A dict of info about the demo class. """ # read the demo.yml file from the demo_class's directory demo_dir = os.path.dirname(inspect.getfile(demo_class)) # construct the full path to the manifest manifest = os.path.join(demo_dir, "demo.yml") if not os.path.exists: # no path fo the manifest logger.error( "No manifest file exists for this demo class: %s." % (demo_class,) ) return None # attempt to read the manifest file try: fh = open(manifest, "r") except Exception as e: logger.error( "Could not open demo manifest file '%s'.\n" " Error reported: '%s'" % (manifest, e) ) return None # now try to parse it try: demo_info = yaml.load(fh) except Exception as e: logger.error( "Could not parse demo manifest file '%s'.\n" " Error reported: '%s'" % (manifest, e) ) return None finally: fh.close() # make sure the required fields are present for field in ["display_name", "description", "documentation_url"]: if field not in demo_info: logger.error( "The `%s` field is missing from the demo " "manifest file: %s." % (field, manifest) ) return None # add the directory and class in there as well so that we have one stop # shopping for all the demo information demo_info["widget_class"] = demo_class demo_info["directory"] = demo_dir return demo_info
def _process_includes_r(file_name, data, context): """ Recursively process includes for an environment file. Algorithm (recursive): 1. Load include data into a big dictionary X 2. recursively go through the current file and replace any @ref with a dictionary value from X :param file_name: The root yml file to process :param data: The contents of the root yml file to process :param context: The current context :returns: A tuple containing the flattened yml data after all includes have been recursively processed together with a lookup for frameworks to the file they were loaded from. """ # first build our big fat lookup dict include_files = _resolve_includes(file_name, data, context) lookup_dict = {} fw_lookup = {} for include_file in include_files: # path exists, so try to read it fh = open(include_file, "r") try: included_data = yaml.load(fh) or {} finally: fh.close() # now resolve this data before proceeding included_data, included_fw_lookup = _process_includes_r(include_file, included_data, context) # update our big lookup dict with this included data: if "frameworks" in included_data and isinstance(included_data["frameworks"], dict): # special case handling of frameworks to merge them from the various # different included files rather than have frameworks section from # one file overwrite the frameworks from previous includes! lookup_dict = _resolve_frameworks(included_data, lookup_dict) # also, keey track of where the framework has been referenced from: for fw_name in included_data["frameworks"].keys(): fw_lookup[fw_name] = include_file del(included_data["frameworks"]) fw_lookup.update(included_fw_lookup) lookup_dict.update(included_data) # now go through our own data, recursively, and replace any refs. # recurse down in dicts and lists try: data = _resolve_refs_r(lookup_dict, data) data = _resolve_frameworks(lookup_dict, data) except TankError, e: raise TankError("Include error. Could not resolve references for %s: %s" % (file_name, e))
def run(self, log, args): if len(args) != 4: log.info("Syntax: move_studio_install current_path linux_path windows_path mac_path") log.info("") log.info("This command will move the main location of the Toolkit config.") log.info("") log.info("Specify the current location of your studio install in the first parameter. " "Specify the new location for each platform in the subsequent parameters.") log.info("") log.info("You typically need to quote your paths, like this:") log.info("") log.info('> tank move_studio_install /projects/tank /tank_install/studio "p:\\tank_install\\studio" /tank_install/studio') log.info("") log.info("If you want to leave a platform blank, just just empty quotes:") log.info("") log.info('> tank move_studio_install "P:\\projects\\tank" "" "p:\\tank_install\\studio" ""') raise TankError("Wrong number of parameters!") current_path = args[0] linux_path = args[1] windows_path = args[2] mac_path = args[3] new_paths = {"mac_path": mac_path, "windows_path": windows_path, "linux_path": linux_path} storage_map = {"linux2": "linux_path", "win32": "windows_path", "darwin": "mac_path" } local_target_path = new_paths.get(storage_map[sys.platform]) # basic checks if not os.path.exists(current_path): raise TankError("Path '%s' does not exist!" % current_path) if os.path.exists(local_target_path): raise TankError("The path %s already exists on disk!" % local_target_path) # probe for some key file api_file = os.path.join(current_path, "install", "core", "_core_upgrader.py") if not os.path.exists(api_file): raise TankError("Path '%s' does not look like an Toolkit install!" % current_path) # make sure this is NOT a PC pc_file = os.path.join(current_path, "config", "info.yml") if os.path.exists(pc_file): raise TankError("Path '%s' does not look like a pipeline configuration. Move it " "using the move_configuration command instead!" % current_path) # now read in the pipeline_configuration.yml file # to find out the locations for all other platforms cfg_yml = os.path.join(current_path, "config", "core", "install_location.yml") if not os.path.exists(cfg_yml): raise TankError("Location metadata file '%s' missing!" % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) except Exception, e: raise TankError("Config file %s is invalid: %s" % (cfg_yml, e))
def run_interactive(self, log, args): if len(args) != 4: log.info("Syntax: move_studio_install current_path linux_path windows_path mac_path") log.info("") log.info("This command will move the main location of the Toolkit config.") log.info("") log.info("Specify the current location of your studio install in the first parameter. " "Specify the new location for each platform in the subsequent parameters.") log.info("") log.info("You typically need to quote your paths, like this:") log.info("") log.info('> tank move_studio_install /projects/tank /tank_install/studio "p:\\tank_install\\studio" /tank_install/studio') log.info("") log.info("If you want to leave a platform blank, just just empty quotes:") log.info("") log.info('> tank move_studio_install "P:\\projects\\tank" "" "p:\\tank_install\\studio" ""') raise TankError("Wrong number of parameters!") current_path = args[0] linux_path = args[1] windows_path = args[2] mac_path = args[3] new_paths = {"mac_path": mac_path, "windows_path": windows_path, "linux_path": linux_path} storage_map = {"linux2": "linux_path", "win32": "windows_path", "darwin": "mac_path" } local_target_path = new_paths.get(storage_map[sys.platform]) # basic checks if not os.path.exists(current_path): raise TankError("Path '%s' does not exist!" % current_path) if os.path.exists(local_target_path): raise TankError("The path %s already exists on disk!" % local_target_path) # probe for some key file if not pipelineconfig_utils.is_localized(current_path): raise TankError("Path '%s' does not look like an Toolkit install!" % current_path) # make sure this is NOT a PC pc_file = os.path.join(current_path, "config", "info.yml") if os.path.exists(pc_file): raise TankError("Path '%s' does not look like a pipeline configuration. Move it " "using the move_configuration command instead!" % current_path) # now read in the pipeline_configuration.yml file # to find out the locations for all other platforms cfg_yml = os.path.join(current_path, "config", "core", "install_location.yml") if not os.path.exists(cfg_yml): raise TankError("Location metadata file '%s' missing!" % cfg_yml) fh = open(cfg_yml, "rt") try: data = yaml.load(fh) except Exception, e: raise TankError("Config file %s is invalid: %s" % (cfg_yml, e))
def __load_data(self, path): """ loads the main data from disk, raw form """ # load the data in try: env_file = open(path, "r") data = yaml.load(env_file) except Exception, exp: raise TankError("Could not parse file %s. Error reported: %s" % (path, exp))
def __refresh(self): """Refreshes the environment data from disk """ if not os.path.exists(self.__env_path): raise TankError("Attempting to load non-existent environment file: %s" % self.__env_path) try: env_file = open(self.__env_path, "r") data = yaml.load(env_file) except Exception, exp: raise TankError("Could not parse file %s. Error reported: %s" % (self.__env_path, exp))
def _populate_cache_item_data(self, item): """ Loads the CacheItem's YAML data from disk. """ path = item.path try: with open(path, "r") as fh: raw_data = yaml.load(fh) except IOError: raise TankFileDoesNotExistError("File does not exist: %s" % path) except Exception, e: raise TankError("Could not open file '%s'. Error reported: '%s'" % (path, e))
def find_reference(file_name, context, token): """ Non-recursive. Looks at all include files and searches for @token. Returns the file in which it is found. """ # load the data in try: fh = open(file_name, "r") data = yaml.load(fh) except Exception, exp: raise TankError("Could not parse file %s. Error reported: %s" % (file_name, exp))
def _get_shotgun_yml_content(self, cw): """ Retrieves the content of the shotgun.yml file based on the configuration writer passed in. :param cw: Configuration writer used to write the shotgun.yml file. :returns: Path to the Shotgun file. """ shotgun_yml_path = os.path.join(cw.path.current_os, "config", "core", "shotgun.yml") self.assertTrue(os.path.exists(shotgun_yml_path)) with open(shotgun_yml_path, "rb") as fh: return yaml.load(fh)
def _initialize_manager(plugin_root_path): """ Initializes a ToolkitManager for use in zero-config mode. """ # running in situ as part of zero config. sgtk has already added sgtk # to the python path. need to extract the plugin info from info.yml # import the yaml parser from tank_vendor import yaml # build the path to the info.yml file plugin_info_yml = os.path.join(plugin_root_path, "info.yml") # open the yaml file and read the data with open(plugin_info_yml, "r") as plugin_info_fh: plugin_info = yaml.load(plugin_info_fh, yaml.SafeLoader) base_config = plugin_info["base_configuration"] plugin_id = plugin_info["plugin_id"] import sgtk _initialize_logger(sgtk.LogManager()) # get a logger for the plugin sgtk_logger = sgtk.LogManager.get_logger("plugin") sgtk_logger.debug("Booting up toolkit plugin.") sgtk_logger.debug("Executable: %s", sys.executable) try: # Authenticates with Toolkit. If already logged in, this will # return the current user. user = sgtk.authentication.ShotgunAuthenticator().get_user() except sgtk.authentication.AuthenticationCancelled: # Show a "Shotgun > Login" menu. sgtk_logger.info("Shotgun login was cancelled by the user.") return # Create a boostrap manager for the logged in user with the plug-in # configuration data. toolkit_mgr = sgtk.bootstrap.ToolkitManager(user) toolkit_mgr.progress_callback = lambda pct, msg: print("{0} - {1}".format( int(pct * 100), msg)) # Pulls the latest Unreal configuration from the master branch. toolkit_mgr.base_configuration = base_config toolkit_mgr.plugin_id = plugin_id return toolkit_mgr
def _add_snapshot_comment(self, snapshot_file_path, comment): """ Add a comment to the comment file for a snapshot file. The comments are stored in the following format: {<snapshot file name> : { comment: String - comment to store sg_user: Shotgun entity dictionary representing the user that created the snapshot } ... } :param str file_path: path to the snapshot file. :param str comment: comment string to save. """ # validate to make sure path is sane if not self._snapshot_template.validate(snapshot_file_path): self._app.log_warning("Could not add comment to " "invalid snapshot path %s!" % snapshot_file_path) return # get comments file path: comments_file_path = self._get_comments_file_path(snapshot_file_path) self._app.log_debug("Snapshot: Adding comment to file %s" % comments_file_path) # load yml file comments = {} if os.path.exists(comments_file_path): with open(comments_file_path, "r") as fp: comments = yaml.load(fp) or {} # comment is now a dictionary so that we can also include the user: comments_value = { "comment": comment, "sg_user": self._app.context.user } # add entry for snapshot file: comments_key = os.path.basename(snapshot_file_path) comments[comments_key] = comments_value # and save yml file old_umask = os.umask(0) try: with open(comments_file_path, "w") as fp: yaml.dump(comments, fp) finally: os.umask(old_umask)
class WritableEnvironment(Environment): """ Represents a mutable environment. If you need to make change to the environment, this class should be used rather than the Environment class. Additional methods are added to support modification and updates and handling of writing yaml content back to disk. """ def __init__(self, env_path, pipeline_config, context=None): """ Constructor """ self.set_yaml_preserve_mode(True) Environment.__init__(self, env_path, pipeline_config, context) def __load_writable_yaml(self, path): """ Loads yaml data from disk. :param path: Path to yaml file :returns: yaml object representing the data structure """ try: fh = open(path, "r") except Exception, e: raise TankError("Could not open file '%s'. Error reported: '%s'" % (path, e)) try: # the ruamel parser doesn't have 2.5 support so # only use it on 2.6+ if self._use_ruamel_yaml_parser and not (sys.version_info < (2, 6)): # note that we use the RoundTripLoader loader here. This ensures # that structure and comments are preserved when the yaml is # written back to disk. # # the object returned back is a dictionary-like object # which also holds the additional contextual metadata # required by the parse to maintain the lexical integrity # of the content. from tank_vendor import ruamel_yaml yaml_data = ruamel_yaml.load(fh, ruamel_yaml.RoundTripLoader) else: # use pyyaml parser yaml_data = yaml.load(fh) except Exception, e: raise TankError("Could not parse file '%s'. Error reported: '%s'" % (path, e))
def __get_nuke_path(self): # Get the shotgun paths.yml file current_engine = sgtk.platform.current_engine() context = current_engine.context tk = tank.tank_from_path(context.tank.roots["primary"]) config_path = tk.pipeline_configuration.get_path() + "\\config\\env\\includes\\paths.yml" # use yaml to extract the path location with open(config_path, 'r') as yml_config_file: config_file = yaml.load(yml_config_file) nuke_path = config_file["nuke_windows"] return nuke_path
def __get_sg_config_data(shotgun_cfg_path, user="******"): """ Returns the shotgun configuration yml parameters given a config file. The shotgun.yml may look like: host: str api_script: str api_key: str http_proxy: str or may now look like: <User>: host: str api_script: str api_key: str http_proxy: str <User>: host: str api_script: str api_key: str http_proxy: str The optional user param refers to the <User> in the shotgun.yml. If a user is not found the old style is attempted. :param shotgun_cfg_path: path to config file :param user: Optional user to pass when a multi-user config is being read :returns: dictionary with key host and optional keys api_script, api_key and http_proxy """ # read in settings from shotgun.yml if not os.path.exists(shotgun_cfg_path): raise TankError("Could not find shotgun configuration file '%s'!" % shotgun_cfg_path) # load the config file try: open_file = open(shotgun_cfg_path) try: file_data = yaml.load(open_file) finally: open_file.close() except Exception, error: raise TankError("Cannot load config file '%s'. Error: %s" % (shotgun_cfg_path, error))
def get_pipeline_configs(self): """ Returns a list of current os paths to pipeline configs """ data = [] if os.path.exists(self._config_file): # we have a config already - so read it in fh = open(self._config_file, "rt") try: data = yaml.load(fh) except Exception, e: raise TankError("Looks like the config lookup file %s is corrupt. Please contact " "support! File: '%s' Error: %s" % (self._config_file, e)) finally:
def _populate_cache_item_data(self, item): """ Loads the CacheItem's YAML data from disk. """ path = item.path try: fh = open(path, "r") raw_data = yaml.load(fh) except IOError: raise TankFileDoesNotExistError("File does not exist: %s" % path) except Exception, e: raise TankError("Could not open file '%s'. Error reported: '%s'" % (path, e)) # Since it wasn't an IOError it means we have an open # filehandle to close. fh.close()
def __tank_startup_node_callback(): """ Callback that fires every time a node gets created. Carefully manage exceptions here so that a bug in Tank never interrupts the normal workflows in Nuke. """ try: # look for the root node - this is created only when a new or existing file is opened. tn = nuke.thisNode() if tn != nuke.root(): return if nuke.root().name() == "Root": # file->new # base it on the context we 'inherited' from the prev session # get the context from the previous session - this is helpful if user does file->new project_root = os.environ.get("TANK_NUKE_ENGINE_INIT_PROJECT_ROOT") tk = tank.Tank(project_root) ctx_yaml = os.environ.get("TANK_NUKE_ENGINE_INIT_CONTEXT") if ctx_yaml: try: new_ctx = yaml.load(ctx_yaml) except: new_ctx = tk.context_empty() else: new_ctx = tk.context_empty() else: # file->open file_name = nuke.root().name() try: tk = tank.tank_from_path(file_name) except tank.TankError, e: __create_tank_disabled_menu(e) return # try to get current ctx and inherit its values if possible curr_ctx = None if tank.platform.current_engine(): curr_ctx = tank.platform.current_engine().context new_ctx = tk.context_from_path(file_name, curr_ctx) # now restart the engine with the new context __engine_refresh(tk, new_ctx)
def save_comments(self, file_path, comment): """ Add a comment to the comment file for the saved file. The comments are stored in the following format: {<file name> : { comment: String - comment to store sg_user: Shotgun entity dictionary representing the user that created the snapshot } ... } :param str file_path: path to the snapshot file. :param str comments: comment string to save. """ # clense the comment string orig_comment = comment comment = "" for c in orig_comment: if c in ['\n', ';', '\'', '}', '{', '`', '~', ':', '@', '<', '>', '\\']: comment += '_' else: comment += c # get comments file path: comments_file_path = self._get_comments_file_path(file_path) self._app.log_debug("Save_As: Adding comment to file %s" % comments_file_path) # load yml file comments = {} if os.path.exists(comments_file_path): comments = yaml.load(open(comments_file_path, "r")) # comment is now a dictionary so that we can also include the user: comments_value = {"comment":comment, "sg_user":self._app.context.user} # add entry for snapshot file: comments_key = os.path.basename(file_path) comments[comments_key] = comments_value # and save yml file old_umask = os.umask(0) try: yaml.dump(comments, open(comments_file_path, "w")) finally: os.umask(old_umask)
def get_manifest(self, file_location): """ Returns the info.yml metadata associated with this descriptor. Note that this call involves deep introspection; in order to access the metadata we normally need to have the code content local, so this method may trigger a remote code fetch if necessary. :param file_location: Path relative to the root of the bundle where info.yml can be found. :returns: dictionary with the contents of info.yml """ if self.__manifest_data is None: # make sure payload exists locally if not self.exists_local(): # @todo - at this point add to a metadata cache for performance # we can either just store it in a pickle, in order to avoid yaml parsing, which # is expensive, or if we want to be more fancy, we can maintain a single # "registry" file which holds the metadata for all known bundles in a single place. # given that all descriptors are immutable (except the ones where the immutable) # property returns false, we can keep adding to this global cache file over time. self.download_local() # get the metadata bundle_root = self.get_path() file_path = os.path.join(bundle_root, file_location) if not os.path.exists(file_path): # at this point we have downloaded the bundle, but it may have # an invalid internal structure. raise TankMissingManifestError("Toolkit metadata file '%s' missing." % file_path) try: file_data = open(file_path) try: metadata = yaml.load(file_data) finally: file_data.close() except Exception as exp: raise TankDescriptorError("Cannot load metadata file '%s'. Error: %s" % (file_path, exp)) # cache it self.__manifest_data = metadata return self.__manifest_data
def get_core_api_version_for_pc(pc_root): """ Returns the version number string for the core API, based on a given path """ # read this from info.yml info_yml_path = os.path.join(pc_root, "install", "core", "info.yml") try: info_fh = open(info_yml_path, "r") try: data = yaml.load(info_fh) finally: info_fh.close() data = str(data.get("version", "unknown")) except: data = "unknown" return data