def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opened ' + ', '.join(sorted(paths))) path = paths[0] if sys.platform.startswith('darwin'): call(['open', '-a', 'Terminal', path]) else: terminal_cmd = 'gnome-terminal' if 'MATE_DESKTOP_SESSION_ID' in os.environ: terminal_cmd = 'mate-terminal' call([terminal_cmd, '--working-directory', path])
def update(self, entity): # TODO: Do this async. by, at, desc = entity.fetch(('created_by.HumanUser.name', 'created_at', 'description'), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join(path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = QtGui.QPixmap(thumbnail_path) else: path = os.path.abspath(os.path.join( __file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png' )) pixmap = QtGui.QPixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth(165, Qt.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, "->", repr(path) paths.append(path) if not paths: notify("No paths for %s %s" % (entity_type, selected_ids)) return notify("Opened " + ", ".join(sorted(paths))) path = paths[0] if sys.platform.startswith("darwin"): call(["open", "-a", "Terminal", path]) else: terminal_cmd = "gnome-terminal" if "MATE_DESKTOP_SESSION_ID" in os.environ: terminal_cmd = "mate-terminal" call([terminal_cmd, "--working-directory", path])
def assert_workspace(): scene_path = cmds.file(q=True, sceneName=True) if not scene_path: raise ValueError("Scene is not saved.") sgfs = SGFS() tasks = sgfs.entities_from_path(scene_path, ['Task']) if not tasks: raise ValueError("Scene is not in a task.") path = sgfs.path_for_entity(tasks[0]) workspace_path(path)
def open_workspace(): path = workspace_path() sgfs = SGFS() entities = sgfs.entities_from_path(workspace_path(), ['Task', 'PublishEvent']) if entities: path = sgfs.path_for_entity(entities[0]) or path if sys.platform == 'darwin': call(['open', path]) else: call(['xdg-open', path])
def update(self, entity): # TODO: Do this async. # We're also priming the sg_default_reference_namespace (assuming # it exists). by, at, desc, _ = entity.fetch(( 'created_by.HumanUser.name', 'created_at', 'description', 'sg_link.Task.entity.Asset.sg_default_reference_namespace', ), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join( path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = Q.Pixmap(thumbnail_path) else: path = os.path.abspath( os.path.join(__file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png')) pixmap = Q.Pixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth( 165, Q.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() entity = sgfs.session.merge(dict(type=entity_type, id=selected_ids[0])) path = sgfs.path_for_entity(entity) if not path: notify('No folders for %s %s' % (entity['type'], entity['id'])) return print entity, '->', repr(path) # Signal to maya what entity this is. env = dict(os.environ) env['SGFS_ENTITY_TYPE'] = entity['type'] env['SGFS_ENTITY_ID'] = str(entity['id']) call(['maya_launcher'], cwd=path, env=env)
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opening:\n' + '\n'.join(sorted(paths))) for path in set(paths): if sys.platform.startswith('darwin'): call(['open', path]) else: call(['xdg-open', path])
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opened ' + ', '.join(sorted(paths))) for path in set(paths): if sys.platform.startswith('darwin'): call(['open', path]) else: call(['xdg-open', path])
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) # Create/update the version. version = kwargs.pop('version_entity', None) fields.update(kwargs) if version is not None: sgfs.session.update('Version', version['id'], fields) else: fields['created_by'] = publish['created_by'] version = sgfs.session.create('Version', fields) futures = [] with ThreadPoolExecutor(4) as executor: # Share thumbnails. futures.append( executor.submit( sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, )) # Set the status/version on the task. futures.append( executor.submit( sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, )) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): futures.append( executor.submit( sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, )) # Allow them to raise if they must. for future in futures: future.result() return version
class SceneName(object): def __init__(self, **kwargs): # Reasonable defaults. self.detail = '' self.entity_name = '' self.entity_type = None self.extension = '' self.revision = 1 self.step_name = kwargs.get('step_name') self.sub_directory = '' self.directory = 'scenes' self.version = 0 self.sep = ',' self._all_seps_class = '[%s]' % re.escape('-_,.') self._strip_seps_re = re.compile( r'(^%s+)|(%s+$)' % (self._all_seps_class, self._all_seps_class)) self._sgfs = SGFS() # Callbacks. self.warning = kwargs.pop('warning', self.warning) self.error = kwargs.pop('error', self.error) if self.error is False: self.error = self.warning self._step_names = [] # Parse given paths. self.workspace = kwargs.pop('workspace', None) if self.workspace is not None: self._parse_workspace(self.workspace) self.filename = kwargs.pop('filename', None) if self.filename is not None: self._parse_filename(self.filename) # Set kwargs. self.detail = kwargs.pop('detail', self.detail) self.entity_name = kwargs.pop('entity_name', self.entity_name) self.entity_type = kwargs.pop('entity_type', self.entity_type) self.extension = kwargs.pop('extension', self.extension) self.revision = int(kwargs.pop('revision', self.revision)) self.step_name = kwargs.pop('step_name', self.step_name) # "scenes_name" one is for backwards compatibility. self.directory = kwargs.pop('directory', kwargs.pop('scenes_name', self.directory)) self.sub_directory = kwargs.pop('sub_directory', self.sub_directory) self.version = int(kwargs.pop('version', self.version)) if kwargs: raise TypeError(('%s recieved too many kwargs: ' % self.__class__.__name__) + ', '.join(kwargs)) def __repr__(self): return '<%s at 0x%x>' % (self.__class__.__name__, id(self)) def __str__(self): return self.get_path() def warning(self, message): print '# Warning:', message def error(self, message): raise ValueError(message) def _strip_seps(self, x): return self._strip_seps_re.sub('', x) def _split_workspace(self, workspace): tasks = self._sgfs.entities_from_path(workspace, ['Task']) if not tasks: self.error('No Tasks in current workspace') return # Incase error is not an exception. if len(tasks) > 1: warning_parts = [ '%s Tasks in current workspace; picking first of:' % len(tasks) ] for task in tasks: warning_parts.append(str(task)) self.warning('\n'.join(warning_parts)) task = tasks[0] try: task_workspace = self._sgfs.path_from_template( task, 'maya_workspace') except ValueError as e: self.warning('No maya_workspace template: %s' % e) task_workspace = os.path.join(self._sgfs.path_for_entity(task), 'maya') remaining = os.path.relpath(workspace, task_workspace) if remaining == os.path.curdir: remaining = '' return task, task_workspace, remaining def _parse_workspace(self, workspace, warn_on_remaining=True): task, task_workspace, remaining = self._split_workspace(workspace) if remaining.startswith(os.path.pardir): self.error( 'Entity not in workspace; SGFS seems broken! %s not in %s' % (task, workspace)) return if remaining and warn_on_remaining: self.warning('workspace may be too specific; %r remains' % remaining) entity = task.fetch('entity') self.entity_type = entity['type'] self.entity_name = entity.name self.step_name = task.fetch('step.Step.short_name') self.workspace = task_workspace self._step_names = [] def _parse_filename(self, filename): if os.path.isabs(filename): rel_filename = os.path.relpath(filename, self.workspace) if rel_filename.startswith('.'): self.warning('file not in workspace; %r not in %r' % (filename, self.workspace)) _, _, rel_filename = self._split_workspace(filename) else: rel_filename = filename # Extension filename, self.extension = os.path.splitext(rel_filename) directory = os.path.dirname(filename) filename = os.path.basename(filename) # Versions and revisions come out of the basename, and then the dirname m = re.search(r'v(\d+)', filename) or re.search(r'v(\d+)', directory) if m: self.version = int(m.group(1)) else: self.warning('Could not match version.') m = re.search(r'r(\d+)', filename) or re.search(r'r(\d+)', directory) if m: self.revision = int(m.group(1)) else: self.revision = 0 # Completely strip versioning out of the basename. filename = re.sub(r'[_]?[rv]\d+[_/]?', '', filename) filename = self._strip_seps(filename) # Assign (sub)directory around versioning. directory_parts = re.split(r'v\d+(?:/revisions?)?(?:/|$)', directory) if len(directory_parts) > 1: self.directory, self.sub_directory = directory_parts else: self.directory = directory # Strip entity name. if self.entity_name and filename.lower().startswith( self.entity_name.lower()): filename = filename[len(self.entity_name):] filename = self._strip_seps(filename) else: self.warning('Could not find shot/asset name prefix.') # Strip step name. if self.step_name and filename.lower().startswith( self.step_name.lower()): filename = filename[len(self.step_name):] filename = self._strip_seps(filename) else: self.warning('Could not find task/step prefix.') self.detail = filename def get_step_names(self): if self._step_names: return self._step_names step_dir = os.path.dirname(os.path.dirname(self.workspace)) try: for name in os.listdir(step_dir): # XXX: Hardcoded SGFS tag name?! if os.path.exists(os.path.join(step_dir, name, '.sgfs.yml')): self._step_names.append(name) except OSError: pass # Make sure we have a step name. if self.step_name is None: if not self._step_names: self.error('Could not identify pipeline step.') self._step_names = [''] self.step_name = self._step_names[0] # Make sure the step name is in step_names. self._step_names.append(self.step_name) self._step_names = sorted(set(self._step_names), key=lambda x: x.lower()) return self._step_names def get_basename(self): parts = [ self.entity_name, self.step_name, self.detail, 'v%04d' % self.version, 'r%04d' % self.revision if self.revision else None, ] parts = [x for x in parts if x] parts = [re.sub(r'[^\w-]+', '_', x) for x in parts] parts = [self._strip_seps(x) for x in parts] basename = self.sep.join(parts) return basename + self.extension def get_directory(self): path = os.path.join(self.workspace, self.directory) # Add '/v0001/revisions' if in an Asset and this is a maya scene. # Because the artists said so. That's why. if self.entity_type == 'Asset' and self.directory.startswith('scenes'): path = os.path.join(path, 'v' + '%04d' % self.version) if self.revision: path = os.path.join(path, 'revisions') path = os.path.join(path, self.sub_directory) return path def get_path(self): return os.path.join(self.get_directory(), self.get_basename())
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'created_by': publish['created_by'], 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) fields.update(kwargs) # Create the new version. version = sgfs.session.create('Version', fields) with ThreadPoolExecutor(4) as executor: # Share thumbnails. executor.submit(sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, ) # Set the status/version on the task. executor.submit(sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, ) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): executor.submit(sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, ) return version
def __init__(self, link=None, type=None, name=None, version=None, parent=None, directory=None, sgfs=None, template=None, **kwargs ): if not sgfs: if isinstance(template, Entity): sgfs = SGFS(session=template.session) elif isinstance(link, Entity): sgfs = SGFS(session=link.session) else: sgfs = SGFS() self.sgfs = sgfs if template: template = sgfs.session.merge(template) to_fetch = ['sg_link', 'sg_type', 'code', 'sg_version'] to_fetch.extend(_kwarg_to_field.itervalues()) template.fetch(to_fetch) tpl_link, tpl_type, tpl_name, tpl_version = template.get(('sg_link', 'sg_type', 'code', 'sg_version')) link = link or tpl_link type = type or tpl_type name = name or tpl_name version = version or tpl_version kwargs.setdefault('source_publish', template) kwargs.setdefault('source_publishes', [template]) for key, field in _kwarg_to_field.iteritems(): kwargs.setdefault(key, template.get(field)) if not kwargs.get('thumbnail_path'): # We certainly jump through a lot of hoops to do this... # Perhaps this should be sgfs.get_entity_tags(entity) publish_path = sgfs.path_for_entity(template) if publish_path: tags = sgfs.get_directory_entity_tags(publish_path) tags = [tag for tag in tags if tag['entity'] == template] if tags: meta = tags[0].get('sgpublish', {}) thumbnail = meta.get('thumbnail') if thumbnail: kwargs['thumbnail_path'] = os.path.join(publish_path, thumbnail) if not (link and type and name): raise ValueError('requires link, type, and name') self._type = str(type) self._link = self.sgfs.session.merge(link) self._name = str(name) self._parent = parent if re.search(r'[^\w-]', self._name): raise ValueError('name cannot have spaces or special characters', self._name) # Get information about the promotion for review. self._review_version_entity = None self._review_version_fields = kwargs.pop('review_version_fields', None) # To only allow us to commit once. self._committed = False # Will be set into the tag. self.metadata = {} # Files to copy on commit; (src_path, dst_path) self._files = [] # Set attributes from kwargs. for name in ( 'created_by', 'description', 'frames_path', 'movie_path', 'movie_url', 'path', 'source_publish', 'source_publishes', 'thumbnail_path', 'trigger_event', 'extra_fields', ): setattr(self, name, kwargs.pop(name, None)) if kwargs: raise TypeError('too many kwargs: %r' % sorted(kwargs)) # Required for normalizing. self._directory = None # Get everything into the right type before sending it to Shotgun. self._normalize_attributes() # Prep for async processes. We can do a lot of "frivolous" Shotgun # queries at the same time since we must do at least one. executor = concurrent.futures.ThreadPoolExecutor(8) futures = [] # Figure out the version number (async). if version is None: futures.append(executor.submit(self._set_automatic_version)) else: self._version = int(version) # Grab all data on the link (assuming that is all that is used when # creating publish templates). futures.append(executor.submit(self.link.fetch_core)) # Create the review version stub (async). if self._review_version_fields is not None: futures.append(executor.submit(self._get_review_version)) # First stage of the publish: create an "empty" PublishEvent. initial_data = { 'code': self.name, 'created_by': self.created_by, 'description': self.description, 'project': self.link.project(), 'sg_link': self.link, 'sg_path_to_frames': self.frames_path, 'sg_path_to_movie': self.movie_path, 'sg_qt': self.movie_url, 'sg_source_publish': self.source_publish or None, # singular 'sg_source_publishes': self.source_publishes or [], # multiple 'sg_trigger_event_id': self.trigger_event['id'] if self.trigger_event else None, 'sg_type': self.type, 'sg_version': 0, # Signifies that this is "empty". } initial_data.update(self.extra_fields) try: self.entity = self.sgfs.session.create('PublishEvent', initial_data) except ShotgunFault: if not self.link.exists(): raise RuntimeError('%s %d (%r) has been retired' % (link['type'], link['id'], link.get('name'))) else: raise # Lets have our async processes catch up. for future in futures: future.result() # Manually forced directory. if directory is not None: self._directory_supplied = True # Make it if it doesn't already exist, but don't care if it does. self._directory = os.path.abspath(directory) else: self._directory_supplied = False # Find a unique name using the template result as a base. base_path = self.sgfs.path_from_template(link, '%s_publish' % type, dict( publish=self, # For b/c. publisher=self, PublishEvent=self.entity, self=self.entity, # To mimick Shotgun templates. )) unique_iter = ('%s_%d' % (base_path, i) for i in itertools.count(1)) for path in itertools.chain([base_path], unique_iter): try: os.makedirs(path) except OSError as e: if e.errno != 17: # File exists raise else: self._directory = path break # Make the directory so that tools which want to manually copy files # don't have to. utils.makedirs(self._directory) # If the directory is tagged with existing entities, then we cannot # proceed. This allows one to retire a publish and then overwrite it. tags = self.sgfs.get_directory_entity_tags(self._directory) if any(tag['entity'].exists() for tag in tags): raise ValueError('directory is already tagged: %r' % self._directory)
def __init__(self, link=None, type=None, name=None, version=None, parent=None, directory=None, sgfs=None, template=None, makedirs=True, defer_entities=False, **kwargs): if not sgfs: if isinstance(template, Entity): sgfs = SGFS(session=template.session) elif isinstance(link, Entity): sgfs = SGFS(session=link.session) else: sgfs = SGFS() self.sgfs = sgfs if template: template = sgfs.session.merge(template) to_fetch = ['sg_link', 'sg_type', 'code', 'sg_version'] to_fetch.extend(_kwarg_to_field.itervalues()) template.fetch(to_fetch) tpl_link, tpl_type, tpl_name, tpl_version = template.get( ('sg_link', 'sg_type', 'code', 'sg_version')) link = link or tpl_link type = type or tpl_type name = name or tpl_name version = version or tpl_version kwargs.setdefault('source_publish', template) kwargs.setdefault('source_publishes', [template]) for key, field in _kwarg_to_field.iteritems(): kwargs.setdefault(key, template.get(field)) if not kwargs.get('thumbnail_path'): # We certainly jump through a lot of hoops to do this... # Perhaps this should be sgfs.get_entity_tags(entity) publish_path = sgfs.path_for_entity(template) if publish_path: tags = sgfs.get_directory_entity_tags(publish_path) tags = [tag for tag in tags if tag['entity'] == template] if tags: meta = tags[0].get('sgpublish', {}) thumbnail = meta.get('thumbnail') if thumbnail: kwargs['thumbnail_path'] = os.path.join( publish_path, thumbnail) if not (link and type and name): raise ValueError('requires link, type, and name') self._type = str(type) self._link = self.sgfs.session.merge(link) self._name = str(name) self._parent = parent invalid = re.sub(r'[\w\.,:;?&%=/-]', '', self._name) if invalid: raise ValueError( 'Publish name has invalid characters {!r}.'.format(''.join( sorted(set(invalid))))) # Get information about the promotion for review. self._review_version_entity = None self._review_version_fields = kwargs.pop('review_version_fields', None) # To only allow us to commit once. self._committed = False # Will be set into the tag. self.metadata = {} # Files to copy on commit; (src_path, dst_path) self._files = [] self.lock_permissions = True # Set attributes from kwargs. for name in ( 'created_by', 'description', 'extra_fields', 'frames_path', 'lock_permissions', 'movie_path', 'movie_url', 'path', 'source_publish', 'source_publishes', 'thumbnail_path', 'trigger_event', ): setattr(self, name, kwargs.pop(name, None)) if kwargs: raise TypeError('too many kwargs: %r' % sorted(kwargs)) # Required for normalizing. self._directory = None # Get everything into the right type before sending it to Shotgun. self._normalize_attributes() # Prep for async processes. We can do a lot of "frivolous" Shotgun # queries at the same time since we must do at least one. executor = concurrent.futures.ThreadPoolExecutor(8) futures = [] # Figure out the version number (async). if version is None: futures.append(executor.submit(self._set_automatic_version)) else: self._version = int(version) # Grab all data on the link (assuming that is all that is used when # creating publish templates). futures.append(executor.submit(self.link.fetch_core)) # First stage of the publish: create an "empty" PublishEvent. initial_data = { 'code': self.name, 'created_by': self.created_by, 'description': self.description, 'project': self.link.project(), 'sg_link': self.link, 'sg_path_to_frames': self.frames_path, 'sg_path_to_movie': self.movie_path, 'sg_qt': self.movie_url, 'sg_source_publish': self.source_publish or None, # singular 'sg_source_publishes': self.source_publishes or [], # multiple 'sg_trigger_event_id': self.trigger_event['id'] if self.trigger_event else None, 'sg_type': self.type, 'sg_version': 0, # Signifies that this is "empty". } initial_data.update(self.extra_fields) initial_data['type'] = 'PublishEvent' initial_data['id'] = 0 # HACK! Although sgsession supports this. self.entity = self.sgfs.session.merge(initial_data) if not defer_entities: future = self.assert_entities(_executor=executor) if future is not None: futures.append(future) # Lets have our async processes catch up. for future in futures: future.result() # Manually forced directory. if directory is not None: self._directory_supplied = True self._directory = os.path.abspath(directory) # Make the directory so that tools which want to manually copy files # don't have to. if makedirs: utils.makedirs(self._directory) else: self._directory_supplied = False self._directory = self.pick_unique_directory(makedirs=makedirs) # If the directory is tagged with existing entities, then we cannot # proceed. This allows one to retire a publish and then overwrite it. tags = self.sgfs.get_directory_entity_tags(self._directory) if any(tag['entity'].exists() for tag in tags): raise ValueError('directory is already tagged: %r' % self._directory)