def _keys_from_definition(cls, definition, template_name, keys): """Extracts Template Keys from a definition. :param definition: Template definition as string :param template_name: Name of template. :param keys: Mapping of key names to keys as dict :returns: Mapping of key names to keys and collection of keys ordered as they appear in the definition. :rtype: List of Dictionaries, List of lists """ names_keys = {} ordered_keys = [] # regular expression to find key names regex = r"(?<={)%s(?=})" % dsk_constants.TEMPLATE_KEY_NAME_REGEX key_names = re.findall(regex, definition) for key_name in key_names: key = keys.get(key_name) if key is None: msg = "Template definition for template %s refers to key {%s}, which does not appear in supplied keys." raise DevError(msg % (template_name, key_name)) else: if names_keys.get(key.name, key) != key: # Different keys using same name msg = ( "Template definition for template %s uses two keys" + " which use the name '%s'.") raise DevError(msg % (template_name, key.name)) names_keys[key.name] = key ordered_keys.append(key) return names_keys, ordered_keys
def _init_format_spec(self, name, format_spec): """ Asserts that the format_spec parameter is a valid value. :param name: Name of this template key. :param format_spec: Parameter to be validated. :raises DevError: Raised when the parameter is not a string that maching a %d format option. """ # No format spec means no formatting options. if format_spec is None: return if not isinstance(format_spec, six.string_types): msg = "format_spec for IntegerKey %s is not of type string: %s" raise DevError(msg % (name, format_spec)) if len(format_spec) == 0: raise DevError("format_spec can't be empty.") matches = self._FORMAT_SPEC_RE.match(format_spec) if not matches: raise DevError( "format_spec for <Sgtk IntegerKey %s> has to either be a number (e.g. '3') or " "a 0 followed by a number (e.g. '03'), not '%s'" % (name, format_spec)) groups = matches.groups() # groups[0] is either '' or '0', in which case the padding is ' ' self._zero_padded = groups[0] == "0" # groups[1] is the minimum width of the number. self._minimum_width = int(groups[1]) self._format_spec = format_spec
def _resolve_frame_spec(self, format_string, format_spec): """ Turns a format_string %d and a format_spec "03" into a sequence identifier (%03d) """ error_msg = "Illegal format pattern for framespec: '%s'. " % format_string error_msg += "Legal patterns are: %s" % ", ".join( self.VALID_FORMAT_STRINGS) if format_string not in self.VALID_FORMAT_STRINGS: raise DevError(error_msg) if format_spec.startswith("0") and format_spec != "01": use_zero_padding = True else: use_zero_padding = False places = int(format_spec) if format_spec.isdigit() else 1 if use_zero_padding: if format_string == "%d": frame_spec = "%%0%dd" % places elif format_string == "#": frame_spec = "#" * places elif format_string == "@": frame_spec = "@" * places elif format_string == "$F": frame_spec = "$F%d" % places elif format_string in ("<UDIM>", "$UDIM"): # UDIM's aren't padded! frame_spec = format_string else: raise DevError(error_msg) else: # non zero padded rules if format_string == "%d": frame_spec = "%d" elif format_string == "#": frame_spec = "#" elif format_string == "@": frame_spec = "@" elif format_string == "$F": frame_spec = "$F" elif format_string in ("<UDIM>", "$UDIM"): # UDIM's aren't padded! frame_spec = format_string else: raise DevError(error_msg) return frame_spec
def get_roots_metadata(pipeline_config_path): """ Loads and validates the roots metadata file. The roots.yml file is a reflection of the local storages setup in Shotgun at project setup time and may contain anomalies in the path layout structure. The roots data will be prepended to paths and used for comparison so it is critical that the paths are on a correct normalized form once they have been loaded into the system. :param pipeline_config_path: Path to the root of a pipeline configuration, (excluding the "config" folder). :returns: A dictionary structure with an entry for each storage defined. Each storage will have three keys mac_path, windows_path and linux_path, for example { "primary" : <ShotgunPath>, "textures" : <ShotgunPath> } """ # now read in the roots.yml file # this will contain something like # {'primary': {'mac_path': '/studio', 'windows_path': None, 'linux_path': '/studio'}} roots_yml = os.path.join(pipeline_config_path, dsk_constants.ENVI_TEMPLATE_NAME, dsk_constants.ENVI_NAMING_CORE, dsk_constants.STORAGE_ROOTS_FILE) try: # if file is empty, initialize with empty dict... data = yaml_cache.g_yaml_cache.get(roots_yml, deepcopy_data=False) or {} except Exception as e: raise DevError("Looks like the roots file is corrupt. Please contact " "support! File: '%s' Error: %s" % (roots_yml, e)) # if there are more than zero storages defined, ensure one of them is the primary storage if len(data) > 0 and dsk_constants.PRIMARY_STORAGE_NAME not in data: raise DevError("Could not find a primary storage in roots file " "for configuration %s!" % pipeline_config_path) # sanitize path data by passing it through the ShotgunPath shotgun_paths = {} for storage_name in data: shotgun_paths[storage_name] = ShotgunPath.from_shotgun_dict( data[storage_name]) return shotgun_paths
def __init__(self, name, default=None, format_spec="%Y-%m-%d-%H-%M-%S"): """ :param str name: Name by which the key will be referred. :param default: Default value for this field. Acceptable values are: - ``None`` - a string formatted according to the format_spec, e.g. '2003-01-02 12:23' - ``utc_now``, which means the current time in the UTC timezone will be used as the default value. - ``now``, which means the current time in the local timezone will be used as the default value. :param str format_spec: Specification for formatting when casting to/from a string. The format follows the convention of :meth:`time.strftime`. The default value is ``%Y-%m-%d-%H-%M-%S``. Given June 24th, 2015 at 9:20:30 PM, this will yield ``2015-06-24-21-20-30``. """ # Can't use __repr__ because of a chicken and egg problem. The base class validates the # default value, so format_spec needs to be set first. But if I am testing format_spec # before calling the base class, then repr will crash since self.name won't have been set # yet. if isinstance(format_spec, six.string_types) is False: raise DevError( "format_spec for <Sgtk TimestampKey %s> is not of type string: %s" % (name, format_spec.__class__.__name__)) self._format_spec = format_spec if isinstance(default, six.string_types): # if the user passes in now or utc, we'll generate the current time as the default time. if default.lower() == "now": default = self.__get_current_time elif default.lower() == "utc_now": default = self.__get_current_utc_time else: # Normally the base class is the one to validate, but in this case we need to # convert the string value into an actual value because the default is expected to # be a value and not a string, so we'll validate right away. if not self.validate(default): raise DevError(self._last_error) # If we are here everything went well, so convert the string to an actual value. default = datetime.datetime.strptime(default, self.format_spec) # Base class will validate other values using the format specifier. elif default is not None: raise DevError( "default for <Sgtk TimestampKey %s> is not of type string or None: %s" % (name, default.__class__.__name__)) super(TimestampKey, self).__init__(name, default=default)
def _conform_template_data(template_data, template_name): """ Takes data for single template and conforms it expected data structure. """ if isinstance(template_data, six.string_types): template_data = {"definition": template_data} elif not isinstance(template_data, dict): raise DevError( "template %s has data which is not a string or dictionary." % template_name) if "definition" not in template_data: raise DevError("Template %s missing definition." % template_name) return template_data
def _get_storage_roots_metadata(storage_roots_file): """ Parse the supplied storage roots file :param storage_roots_file: Path to the roots file. :return: The parsed metadata as a dictionary. """ log.debug("Reading storage roots file form disk: %s" % (storage_roots_file,)) try: # keep a handle on the raw metadata read from the roots file roots_metadata = ( yaml_cache.g_yaml_cache.get(storage_roots_file, deepcopy_data=False) or {} ) # if file is empty, initialize with empty dict except Exception as e: raise DevError( "Looks like the roots file is corrupt. " "Please contact support! " "File: '%s'. " "Error: %s" % (storage_roots_file, e) ) log.debug("Read metadata: %s" % (roots_metadata,)) return roots_metadata
def make_keys(data): """ Factory method for instantiating template keys. :param data: Key data. :type data: Dictionary of the form: {<key name>: {'type': <key type>, <option>: <option value} :returns: Dictionary of the form: {<key name>: <TemplateKey object>} """ keys = {} names_classes = { "str": StringKey, "int": IntegerKey, "sequence": SequenceKey, "timestamp": TimestampKey, } for initial_key_name, key_data in data.items(): # We need to remove data before passing in as arguments, so copy it. prepped_data = key_data.copy() class_name = prepped_data.pop("type") KeyClass = names_classes.get(class_name) if not KeyClass: raise DevError("Invalid type: '%s'. Valid types are: %s" % (class_name, list(names_classes.keys()))) if "alias" in prepped_data: # The alias becomes the key's name and is used internally by Templates as the key's name key_name = prepped_data.pop("alias") else: key_name = initial_key_name key = KeyClass(key_name, **prepped_data) keys[initial_key_name] = key return keys
def make_template_strings(data, keys, template_paths): """ Factory function which creates TemplateStrings. :param data: Data from which to construct the template strings. :type data: Dictionary of form: {<template name>: {<option>: <option value>}} :param keys: Available keys. :type keys: Dictionary of form: {<key name> : <TemplateKey object>} :param template_paths: TemplatePaths available for optional validation. :type template_paths: Dictionary of form: {<template name>: <TemplatePath object>} :returns: Dictionary of form {<template name> : <TemplateString object>} """ template_strings = {} templates_data = _process_templates_data(data, "path") for template_name, template_data in templates_data.items(): definition = template_data["definition"] validator_name = template_data.get("validate_with") validator = template_paths.get(validator_name) if validator_name and not validator: msg = "Template %s validate_with is set to undefined template %s." raise DevError(msg % (template_name, validator_name)) template_string = TemplateString(definition, keys, template_name, validate_with=validator) template_strings[template_name] = template_string return template_strings
def _definition_variations(self, definition): """ Determines all possible definition based on combinations of optional sectionals. "{foo}" ==> ['{foo}'] "{foo}_{bar}" ==> ['{foo}_{bar}'] "{foo}[_{bar}]" ==> ['{foo}', '{foo}_{bar}'] "{foo}_[{bar}_{baz}]" ==> ['{foo}_', '{foo}_{bar}_{baz}'] """ # split definition by optional sections tokens = re.split(r"(\[[^]]*\])", definition) # seed with empty string definitions = [""] for token in tokens: temp_definitions = [] # regex return some blank strings, skip them if token == "": continue if token.startswith("["): # check that optional contains a key if not re.search( "{*%s}" % dsk_constants.TEMPLATE_KEY_NAME_REGEX, token): raise DevError( 'Optional sections must include a key definition. Token: "%s" Template: %s' % (token, self)) # Add definitions skipping this optional value temp_definitions = definitions[:] # strip brackets from token token = re.sub(r"[\[\]]", "", token) # check non-optional contains no dangleing brackets if re.search(r"[\[\]]", token): raise DevError( "Square brackets are not allowed outside of optional section definitions." ) # make defintions with token appended for definition in definitions: temp_definitions.append(definition + token) definitions = temp_definitions return definitions
def _apply_fields(self, fields, ignore_types=None, platform=None, skip_defaults=False): """ Creates path using fields. :param fields: Mapping of keys to fields. Keys must match those in template definition. :param ignore_types: Keys for whom the defined type is ignored as list of strings. This allows setting a Key whose type is int with a string value. :param platform: Optional operating system platform. If you leave it at the default value of None, paths will be created to match the current operating system. If you pass in a sys.platform-style string (e.g. 'win32', 'linux2' or 'darwin'), paths will be generated to match that platform. :param skip_defaults: Optional. If set to True, if a key has a default value and no corresponding value in the fields argument, its default value will be used. If set to False, keys that are not specified in the fields argument are skipped whether they have a default value or not. Defaults to False :returns: Full path, matching the template with the given fields inserted. """ ignore_types = ignore_types or [] # find largest key mapping without missing values keys = None # index of matching keys will be used to find cleaned_definition index = -1 for index, cur_keys in enumerate(self._keys): # We are iterating through all possible key combinations from the longest to shortest # and using the first one that doesn't have any missing keys. skip_defaults=False on # _apply_fields means we don't want to use a key that is not specified in the fields # parameter. So we want the missing_keys function to flag even the default keys that are # missing. Therefore we need to negate the skip_defaults parameter for the _missing_keys argument missing_keys = self._missing_keys(fields, cur_keys, skip_defaults=not skip_defaults) if not missing_keys: keys = cur_keys break if keys is None: raise DevError( "Tried to resolve a path from the template %s and a set " "of input fields '%s' but the following required fields were missing " "from the input: %s" % (self, fields, missing_keys)) # Process all field values through template keys processed_fields = {} for key_name, key in keys.items(): value = fields.get(key_name) ignore_type = key_name in ignore_types processed_fields[key_name] = key.str_from_value( value, ignore_type=ignore_type) return self._cleaned_definitions[index] % processed_fields
def value_from_str(self, str_value): """ Validates and translates a string into an appropriate value for this key. :param str_value: The string to translate. :returns: The translated value. """ if self.validate(str_value): value = self._as_value(str_value) else: raise DevError(self._last_error) return value
def _init_strict_matching(self, name, strict_matching): """ Asserts that the strict_matching parameter is a valid value. :param name: Name of this template key. :param strict_matching: Parameter to be validated. :raises DevError: Raised when the parameter is not a boolean. """ # make sure that strict_matching is not set or that it is a boolean if not (strict_matching is None or isinstance(strict_matching, bool)): msg = "strict_matching for <Sgtk IntegerKey %s> is not of type boolean: %s" raise DevError(msg % (name, str(strict_matching))) # If there is a format and strict_matching is set, there's an error, since there # is no format to enforce or not. if self._format_spec is None and strict_matching is not None: raise DevError( "strict_matching can't be set if there is no format_spec") # By default, if strict_matching is not set but there is a format spec, we'll # strictly match. if strict_matching is None and self.format_spec is not None: strict_matching = True if strict_matching: # This regular expression is blind to the actual length of the string for performance # reasons. Code that uses it should test that the string's length is of # self._minimum_width first. It first matches up to n-1 padding characters. It then # matches either a single 0, or an actual multiple digit number that doesn't start with # 0. self._strict_validation_re = re.compile("^%s{0,%d}((%s)|0)$" % ( "0" if self._zero_padded else " ", self._minimum_width - 1, self._NON_ZERO_POSITIVE_INTEGER_EXP, )) else: self._strict_validation_re = None self._strict_matching = strict_matching
def str_from_value(self, value=None, ignore_type=False): """ Returns a string version of a value as appropriate for the key's setting. :param value: Value to process. If None, the key's default will be used. :param ignore_type: If true, no validation will be carried out prior to casting. :returns: String version of value as processed by the key. :raises: :class:`DevError` if value is not valid for the key. """ if value is None: if self.default is None: raise DevError( "No value provided and no default available for %s" % self) else: value = self.default elif ignore_type: return value if isinstance(value, six.string_types) else str(value) if self.validate(value): return self._as_string(value) else: raise DevError(self._last_error)
def _populate_cache_item_data(self, item): """ Loads the CacheItem's YAML data from disk. """ path = item.path try: with open(path, "r") as fh: raw_data = yaml.load(fh, Loader=yaml.FullLoader) except IOError: raise DevFileDoesNotExistError("File does not exist: %s" % path) except Exception as e: raise DevError( "Could not open file '%s'. Error reported: '%s'" % (path, e)) # Populate the item's data before adding it to the cache. item.data = raw_data
def resolve_include(file_name, include): """ Resolve an include. If the path has a ~ or an environment variable, it will be resolved first. If the path is relative, it will be considered relative to the file that included it and it will be considered for any OS. If the path is absolute, it will only be considered to be a valid include if it is an absolute path for the current platform. Finally, the path will be sanitized to remove any extraneous slashes or slashes in the wrong direction. :param str file_name: Name of the file containing the include. :param str include: Include to resolve. :returns str: An absolute path to the resolved include or None if the file wasn't specified for the current platform. :raises DevError: Raised when the path doesn't exist. """ # First resolve all environment variables and ~ path = os.path.expanduser(os.path.expandvars(include)) # If the path is not absolute, make it so! if not _is_abs(path): # Append it to the current file's directory. path = os.path.join(os.path.dirname(file_name), path) # We have an absolute path, so check if it is meant for this platform. elif not _is_current_platform_abspath(path): # It wasn't meant for this platform, return nothing. return None # ShotgunPath cleans up paths so that slashes are all # in the same direction and no doubles exist. path = ShotgunPath.normalize(path) # make sure that the paths all exist if not os.path.exists(path): raise DevError( "Include resolve error in '%s': '%s' resolved to '%s' which does not exist!" % ( file_name, include, path ) ) return path
def _process_templates_data(data, template_type): """ Conforms templates data and checks for duplicate definitions. :param data: Dictionary in form { <template name> : <data> } :param template_type: path or string :returns: Processed data. """ templates_data = {} # Track definition to detect duplicates definitions = {} for template_name, template_data in data.items(): cur_data = _conform_template_data(template_data, template_name) definition = cur_data["definition"] if template_type == "path": root_name = cur_data.get("root_name") else: root_name = None # Record this templates definition cur_key = (root_name, definition) definitions[cur_key] = definitions.get(cur_key, []) + [template_name] templates_data[template_name] = cur_data dups_msg = "" for (root_name, definition), template_names in definitions.items(): if len(template_names) > 1: # We have a duplicate dups_msg += "%s: %s\n" % (", ".join(template_names), definition) if dups_msg: raise DevError( "It looks like you have one or more " "duplicate entries in your templates.yml file. Each template path that you " "define in the templates.yml file needs to be unique, otherwise toolkit " "will not be able to resolve which template a particular path on disk " "corresponds to. The following duplicate " "templates were detected:\n %s" % dups_msg) return templates_data
def read_templates(pipeline_configuration): """ Creates templates and keys based on contents of templates file. :param pipeline_configuration: pipeline config object :returns: Dictionary of form {template name: template object} """ per_platform_roots = pipeline_configuration.get_all_platform_data_roots() data = pipeline_configuration.get_templates_config() # get dictionaries from the templates config file: def get_data_section(section_name): # support both the case where the section # name exists and is set to None and the case where it doesn't exist d = data.get(section_name) if d is None: d = {} return d keys = templatekey.make_keys(get_data_section("keys")) template_paths = make_template_paths( get_data_section("paths"), keys, per_platform_roots, default_root=pipeline_configuration.get_primary_data_root_name()) template_strings = make_template_strings(get_data_section("strings"), keys, template_paths) # Detect duplicate names across paths and strings dup_names = set(template_paths).intersection(set(template_strings)) if dup_names: raise DevError("Detected paths and strings with the same name: %s" % str(list(dup_names))) # Put path and strings together templates = template_paths templates.update(template_strings) return templates
def value_from_str(self, str_value): """ Validates and translates a string into an appropriate value for this key. :param str_value: The string to translate. :returns: The translated value. """ # this is used by the parser when transforming # a path or string into an actual value. # in this case, we don't want to validate transforms # such as the substring regext transform, since these # may not be valid in both directions. # # for example, a regex that extracts the initials from # a "Firstname Lastname" string will result in a value # which will not match the regex that is used to # extract it. # if self.__validate(str_value, validate_transforms=False): value = self._as_value(str_value) else: raise DevError(self._last_error) return value
def get_fields(self, input_path, skip_keys=None): """ Extracts key name, value pairs from a string. Example:: >>> input_path = '/studio_root/sgtk/demo_project_1/sequences/seq_1/shot_2/comp/publish/henry.v003.ma' >>> template_path.get_fields(input_path) {'Sequence': 'seq_1', 'Shot': 'shot_2', 'Step': 'comp', 'name': 'henry', 'version': 3} :param input_path: Source path for values :type input_path: String :param skip_keys: Optional keys to skip :type skip_keys: List :returns: Values found in the path based on keys in template :rtype: Dictionary """ path_parser = None fields = None for ordered_keys, static_tokens in zip(self._ordered_keys, self._static_tokens): path_parser = TemplatePathParser(ordered_keys, static_tokens) fields = path_parser.parse_path(input_path, skip_keys) if fields != None: break if fields is None: raise DevError("Template %s: %s" % (str(self), path_parser.last_error)) return fields
def __init__(self, pipeline_configuration_path, project_name, descriptor=None): """ Constructor. Do not call this directly, use the factory methods in pipelineconfig_factory. NOTE ABOUT SYMLINKS! The pipeline_configuration_path is always populated by the paths that were registered in shotgun, regardless of how the symlink setup is handled on the OS level. :param str pipeline_configuration_path: Path to the pipeline configuration on disk. :param descriptor: Descriptor that was used to create this pipeline configuration. Defaults to ``None`` for backwards compatibility with Bootstrapper that only pass down one argument. Also this argument was passed down by cores from v0.18.72 to 0.18.94. The descriptor is now read from the disk inside pipeline_configuration.yml. :type descriptor: :class:`sgtk.descriptor.ConfigDescriptor` """ self._project_name = project_name self._pc_root = pipeline_configuration_path self._roots = get_roots_metadata(self._pc_root) # keep a storage roots object interface instance in order to query roots # info as needed config_folder = os.path.join(self._pc_root, dsk_constants.ENVI_TEMPLATE_NAME) self._storage_roots = StorageRoots.from_config(config_folder) if self._storage_roots.required_roots and not self._storage_roots.default_path: raise DevError( "Could not identify a default storage root for this pipeline " "configuration! File: '%s'" % (self._storage_roots.roots_file, ))
def write(cls, sg_connection, config_folder, storage_roots): """ Given a ``StorageRoots`` object, write it's metadata to the standard roots location within the supplied config folder. The method will write the corresponding local storage paths to the file as defined in Shotgun. This action will overwrite any existing storage roots file defined by the configuration. :param sg_connection: An existing SG connection, used to query local storage entities to ensure paths are up-to-date when the file is written. :param config_folder: The configuration folder under which the required roots file is written. :param storage_roots: A ``StorageRoots`` object instance that defines the required roots. """ (local_storage_lookup, unmapped_roots) = storage_roots.get_local_storages( sg_connection ) roots_file = os.path.join(config_folder, cls.STORAGE_ROOTS_FILE_PATH) log.debug("Writing storage roots to: %s" % (roots_file,)) # raise an error if there are any roots that can not be mapped to SG # local storage entries if unmapped_roots: raise DevError( "The following storages are defined by %s but can not be " "mapped to a local storage in Shotgun: %s" % (roots_file, ", ".join(unmapped_roots)) ) if os.path.exists(roots_file): # warn if this file already exists log.warning( "The file '%s' exists in the configuration " "but will be overwritten with an auto generated file." % (roots_file,) ) # build up a new metadata dict roots_metadata = storage_roots.metadata for root_name, root_info in storage_roots: # get the cached SG storage dict sg_local_storage = local_storage_lookup[root_name] # get the local storage as a ShotgunPath object storage_sg_path = ShotgunPath.from_shotgun_dict(sg_local_storage) # update the root's metadata with the dictionary of all # sys.platform-style paths root_info.update(storage_sg_path.as_shotgun_dict()) log.debug("Writing storage roots metadata: %s" % (roots_metadata,)) # write the new metadata to disk with filesystem.auto_created_yml(roots_file) as fh: yaml.safe_dump(roots_metadata, fh, default_flow_style=False) log.debug("Finished writing storage roots file: %s" % (roots_file,))
def __init__( self, name, default=None, choices=None, filter_by=None, shotgun_entity_type=None, shotgun_field_name=None, exclusions=None, abstract=False, length=None, subset=None, subset_format=None, ): """ :param str name: Name by which the key will be referred. :param str default: Default value for the key. :param choices: List of possible values for this key. Can be either a list or a dictionary of choice:label pairs. :param str filter_by: Name of filter type to limit values for string. Currently only accepted values are 'alphanumeric', 'alpha', None and a regex string. :param str shotgun_entity_type: For keys directly linked to a shotgun field, the entity type. :param str shotgun_field_name: For keys directly linked to a shotgun field, the field name. :param list exclusions: List of forbidden values. :param bool abstract: Flagging that this should be treated as an abstract key. :param int length: If non-None, indicating that the value should be of a fixed length. :param str subset: Regular expression defining a subset of the value to use. :param str subset_format: String to express the formatting of subset tokens. """ self._filter_by = filter_by # Build regexes for alpha and alphanumeric filter_by clauses # # Note that we cannot use a traditional [^a-zA-Z0-9] regex since we want # to support unicode and not just ascii. \W covers "Non-word characters", # which is basically the international equivalent of 7-bit ascii # self._filter_regex_u = None self._custom_regex_u = None if self._filter_by == "alphanumeric": self._filter_regex_u = re.compile(r"[\W_]", re.UNICODE) elif self._filter_by == "alpha": self._filter_regex_u = re.compile(r"[\W_0-9]", re.UNICODE) elif self._filter_by is not None: # filter_by is a regex self._custom_regex_u = re.compile(self._filter_by, re.UNICODE) self._subset_str = subset self._subset_format = subset_format if self._subset_format and sys.version_info < (2, 6): raise DevError( "Subset formatting in template keys require python 2.6+!") self._subset_str = subset if subset: try: self._subset_regex = re.compile(subset, re.UNICODE) except Exception as e: raise DevError( "Template key %s: Invalid subset regex '%s': %s" % (name, subset, e)) else: self._subset_regex = None super(StringKey, self).__init__( name, default=default, choices=choices, shotgun_entity_type=shotgun_entity_type, shotgun_field_name=shotgun_field_name, exclusions=exclusions, abstract=abstract, length=length, ) if self._subset_format and not self._subset_str: raise DevError( "%s: Cannot specify subset_format parameter without a subset parameter." % self)
def make_template_paths(data, keys, all_per_platform_roots, default_root=None): """ Factory function which creates TemplatePaths. :param data: Data from which to construct the template paths. Dictionary of form: {<template name>: {<option>: <option value>}} :param keys: Available keys. Dictionary of form: {<key name> : <TemplateKey object>} :param all_per_platform_roots: Root paths for all platforms. nested dictionary first keyed by storage root name and then by sys.platform-style os name. :returns: Dictionary of form {<template name> : <TemplatePath object>} """ if data and not all_per_platform_roots: raise DevError( "At least one root must be defined when using 'path' templates.") template_paths = {} templates_data = _process_templates_data(data, "path") for template_name, template_data in templates_data.items(): definition = template_data["definition"] root_name = template_data.get("root_name") if not root_name: # If the root name is not explicitly set we use the default arg # provided if default_root: root_name = default_root else: raise DevError( "The template %s (%s) can not be evaluated. No root_name " "is specified, and no root name can be determined from " "the configuration. Update the template definition to " "include a root_name or update your configuration's " "roots.yml file to mark one of the storage roots as the " "default: `default: true`." % (template_name, definition)) # to avoid confusion between strings and paths, validate to check # that each item contains at least a "/" (#19098) if "/" not in definition: raise DevError( "The template %s (%s) does not seem to be a valid path. A valid " "path needs to contain at least one '/' character. Perhaps this " "template should be in the strings section " "instead?" % (template_name, definition)) root_path = all_per_platform_roots.get(root_name, {}).get(sgsix.platform) if root_path is None: raise DevError( "Undefined Shotgun storage! The local file storage '%s' is not defined for this " "operating system." % root_name) template_path = TemplatePath( definition, keys, root_path, template_name, all_per_platform_roots[root_name], ) template_paths[template_name] = template_path return template_paths
def _apply_fields(self, fields, ignore_types=None, platform=None, skip_defaults=False): """ Creates path using fields. :param fields: Mapping of keys to fields. Keys must match those in template definition. :param ignore_types: Keys for whom the defined type is ignored as list of strings. This allows setting a Key whose type is int with a string value. :param platform: Optional operating system platform. If you leave it at the default value of None, paths will be created to match the current operating system. If you pass in a sys.platform-style string (e.g. 'win32', 'linux2' or 'darwin'), paths will be generated to match that platform. :param skip_defaults: Optional. If set to True, if a key has a default value and no corresponding value in the fields argument, its default value will be used. If set to False, keys that are not specified in the fields argument are skipped whether they have a default value or not. Defaults to False :returns: Full path, matching the template with the given fields inserted. """ relative_path = super(TemplatePath, self)._apply_fields(fields, ignore_types, platform, skip_defaults=skip_defaults) if platform is None: # return the current OS platform's path return (os.path.join(self.root_path, relative_path) if relative_path else self.root_path) else: platform = sgsix.normalize_platform(platform) # caller has requested a path for another OS if self._per_platform_roots is None: # it's possible that the additional os paths are not set for a template # object (mainly because of backwards compatibility reasons) and in this case # we cannot compute the path. raise DevError( "Template %s cannot resolve path for operating system '%s' - " "it was instantiated in a mode which only supports the resolving " "of current operating system paths." % (self, platform)) platform_root_path = self._per_platform_roots.get(platform) if platform_root_path is None: # either the platform is undefined or unknown raise DevError( "Cannot resolve path for operating system '%s'! Please ensure " "that you have a valid storage set up for this platform." % platform) elif is_windows(platform): # use backslashes for windows if relative_path: return "%s\\%s" % ( platform_root_path, relative_path.replace(os.sep, "\\"), ) else: # not path generated - just return the root path return platform_root_path elif is_macos(platform) or is_linux(platform): # unix-like plaforms - use slashes if relative_path: return "%s/%s" % ( platform_root_path, relative_path.replace(os.sep, "/"), ) else: # not path generated - just return the root path return platform_root_path else: raise DevError( "Cannot evaluate path. Unsupported platform '%s'." % platform)
def __init__( self, name, default=None, choices=None, shotgun_entity_type=None, shotgun_field_name=None, exclusions=None, abstract=False, length=None, ): """ :param str name: Name by which the key will be referred. :param default: Default value for this key. If the default is a callable, it will be invoked without any parameters whenever a default value is required. :param choices: List of possible values for this key. Can be either a list or a dictionary of choice:label pairs. :param str shotgun_entity_type: For keys directly linked to a shotgun field, the entity type. :param str shotgun_field_name: For keys directly linked to a shotgun field, the field name. :param list exclusions: List of forbidden values. :param bool abstract: Flagging that this should be treated as an abstract key. :param int length: If non-None, indicating that the value should be of a fixed length. """ self._name = name self._default = default # special handling for choices: if isinstance(choices, dict): # new style choices dictionary containing choice:label pairs: self._choices = choices elif isinstance(choices, list) or isinstance(choices, set): # old style choices - labels and choices are the same: self._choices = dict(list(zip(choices, choices))) else: self._choices = {} self._exclusions = exclusions or [] self._shotgun_entity_type = shotgun_entity_type self._shotgun_field_name = shotgun_field_name self._is_abstract = abstract self._length = length self._last_error = "" # check that the key name doesn't contain invalid characters if not re.match(r"^%s$" % constants.TEMPLATE_KEY_NAME_REGEX, name): raise DevError("%s: Name contains invalid characters. " "Valid characters are %s." % (self, constants.VALID_TEMPLATE_KEY_NAME_DESC)) # Validation if self.shotgun_field_name and not self.shotgun_entity_type: raise DevError( "%s: Shotgun field requires a shotgun entity be set." % self) if self.is_abstract and self.default is None: raise DevError( "%s: Fields marked as abstract needs to have a default value!" % self) if not ((self.default is None) or self.validate(self.default)): raise DevError(self._last_error) if not all(self.validate(choice) for choice in self.choices): raise DevError(self._last_error)