def update(self, entity): # TODO: Do this async. by, at, desc = entity.fetch(('created_by.HumanUser.name', 'created_at', 'description'), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join(path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = QtGui.QPixmap(thumbnail_path) else: path = os.path.abspath(os.path.join( __file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png' )) pixmap = QtGui.QPixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth(165, Qt.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def update(self, entity): # TODO: Do this async. # We're also priming the sg_default_reference_namespace (assuming # it exists). by, at, desc, _ = entity.fetch(( 'created_by.HumanUser.name', 'created_at', 'description', 'sg_link.Task.entity.Asset.sg_default_reference_namespace', ), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join( path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = Q.Pixmap(thumbnail_path) else: path = os.path.abspath( os.path.join(__file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png')) pixmap = Q.Pixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth( 165, Q.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) # Create/update the version. version = kwargs.pop('version_entity', None) fields.update(kwargs) if version is not None: sgfs.session.update('Version', version['id'], fields) else: fields['created_by'] = publish['created_by'] version = sgfs.session.create('Version', fields) futures = [] with ThreadPoolExecutor(4) as executor: # Share thumbnails. futures.append( executor.submit( sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, )) # Set the status/version on the task. futures.append( executor.submit( sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, )) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): futures.append( executor.submit( sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, )) # Allow them to raise if they must. for future in futures: future.result() return version
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'created_by': publish['created_by'], 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) fields.update(kwargs) # Create the new version. version = sgfs.session.create('Version', fields) with ThreadPoolExecutor(4) as executor: # Share thumbnails. executor.submit(sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, ) # Set the status/version on the task. executor.submit(sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, ) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): executor.submit(sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, ) return version
def __init__(self, link=None, type=None, name=None, version=None, parent=None, directory=None, sgfs=None, template=None, **kwargs ): if not sgfs: if isinstance(template, Entity): sgfs = SGFS(session=template.session) elif isinstance(link, Entity): sgfs = SGFS(session=link.session) else: sgfs = SGFS() self.sgfs = sgfs if template: template = sgfs.session.merge(template) to_fetch = ['sg_link', 'sg_type', 'code', 'sg_version'] to_fetch.extend(_kwarg_to_field.itervalues()) template.fetch(to_fetch) tpl_link, tpl_type, tpl_name, tpl_version = template.get(('sg_link', 'sg_type', 'code', 'sg_version')) link = link or tpl_link type = type or tpl_type name = name or tpl_name version = version or tpl_version kwargs.setdefault('source_publish', template) kwargs.setdefault('source_publishes', [template]) for key, field in _kwarg_to_field.iteritems(): kwargs.setdefault(key, template.get(field)) if not kwargs.get('thumbnail_path'): # We certainly jump through a lot of hoops to do this... # Perhaps this should be sgfs.get_entity_tags(entity) publish_path = sgfs.path_for_entity(template) if publish_path: tags = sgfs.get_directory_entity_tags(publish_path) tags = [tag for tag in tags if tag['entity'] == template] if tags: meta = tags[0].get('sgpublish', {}) thumbnail = meta.get('thumbnail') if thumbnail: kwargs['thumbnail_path'] = os.path.join(publish_path, thumbnail) if not (link and type and name): raise ValueError('requires link, type, and name') self._type = str(type) self._link = self.sgfs.session.merge(link) self._name = str(name) self._parent = parent if re.search(r'[^\w-]', self._name): raise ValueError('name cannot have spaces or special characters', self._name) # Get information about the promotion for review. self._review_version_entity = None self._review_version_fields = kwargs.pop('review_version_fields', None) # To only allow us to commit once. self._committed = False # Will be set into the tag. self.metadata = {} # Files to copy on commit; (src_path, dst_path) self._files = [] # Set attributes from kwargs. for name in ( 'created_by', 'description', 'frames_path', 'movie_path', 'movie_url', 'path', 'source_publish', 'source_publishes', 'thumbnail_path', 'trigger_event', 'extra_fields', ): setattr(self, name, kwargs.pop(name, None)) if kwargs: raise TypeError('too many kwargs: %r' % sorted(kwargs)) # Required for normalizing. self._directory = None # Get everything into the right type before sending it to Shotgun. self._normalize_attributes() # Prep for async processes. We can do a lot of "frivolous" Shotgun # queries at the same time since we must do at least one. executor = concurrent.futures.ThreadPoolExecutor(8) futures = [] # Figure out the version number (async). if version is None: futures.append(executor.submit(self._set_automatic_version)) else: self._version = int(version) # Grab all data on the link (assuming that is all that is used when # creating publish templates). futures.append(executor.submit(self.link.fetch_core)) # Create the review version stub (async). if self._review_version_fields is not None: futures.append(executor.submit(self._get_review_version)) # First stage of the publish: create an "empty" PublishEvent. initial_data = { 'code': self.name, 'created_by': self.created_by, 'description': self.description, 'project': self.link.project(), 'sg_link': self.link, 'sg_path_to_frames': self.frames_path, 'sg_path_to_movie': self.movie_path, 'sg_qt': self.movie_url, 'sg_source_publish': self.source_publish or None, # singular 'sg_source_publishes': self.source_publishes or [], # multiple 'sg_trigger_event_id': self.trigger_event['id'] if self.trigger_event else None, 'sg_type': self.type, 'sg_version': 0, # Signifies that this is "empty". } initial_data.update(self.extra_fields) try: self.entity = self.sgfs.session.create('PublishEvent', initial_data) except ShotgunFault: if not self.link.exists(): raise RuntimeError('%s %d (%r) has been retired' % (link['type'], link['id'], link.get('name'))) else: raise # Lets have our async processes catch up. for future in futures: future.result() # Manually forced directory. if directory is not None: self._directory_supplied = True # Make it if it doesn't already exist, but don't care if it does. self._directory = os.path.abspath(directory) else: self._directory_supplied = False # Find a unique name using the template result as a base. base_path = self.sgfs.path_from_template(link, '%s_publish' % type, dict( publish=self, # For b/c. publisher=self, PublishEvent=self.entity, self=self.entity, # To mimick Shotgun templates. )) unique_iter = ('%s_%d' % (base_path, i) for i in itertools.count(1)) for path in itertools.chain([base_path], unique_iter): try: os.makedirs(path) except OSError as e: if e.errno != 17: # File exists raise else: self._directory = path break # Make the directory so that tools which want to manually copy files # don't have to. utils.makedirs(self._directory) # If the directory is tagged with existing entities, then we cannot # proceed. This allows one to retire a publish and then overwrite it. tags = self.sgfs.get_directory_entity_tags(self._directory) if any(tag['entity'].exists() for tag in tags): raise ValueError('directory is already tagged: %r' % self._directory)
def __init__(self, link=None, type=None, name=None, version=None, parent=None, directory=None, sgfs=None, template=None, makedirs=True, defer_entities=False, **kwargs): if not sgfs: if isinstance(template, Entity): sgfs = SGFS(session=template.session) elif isinstance(link, Entity): sgfs = SGFS(session=link.session) else: sgfs = SGFS() self.sgfs = sgfs if template: template = sgfs.session.merge(template) to_fetch = ['sg_link', 'sg_type', 'code', 'sg_version'] to_fetch.extend(_kwarg_to_field.itervalues()) template.fetch(to_fetch) tpl_link, tpl_type, tpl_name, tpl_version = template.get( ('sg_link', 'sg_type', 'code', 'sg_version')) link = link or tpl_link type = type or tpl_type name = name or tpl_name version = version or tpl_version kwargs.setdefault('source_publish', template) kwargs.setdefault('source_publishes', [template]) for key, field in _kwarg_to_field.iteritems(): kwargs.setdefault(key, template.get(field)) if not kwargs.get('thumbnail_path'): # We certainly jump through a lot of hoops to do this... # Perhaps this should be sgfs.get_entity_tags(entity) publish_path = sgfs.path_for_entity(template) if publish_path: tags = sgfs.get_directory_entity_tags(publish_path) tags = [tag for tag in tags if tag['entity'] == template] if tags: meta = tags[0].get('sgpublish', {}) thumbnail = meta.get('thumbnail') if thumbnail: kwargs['thumbnail_path'] = os.path.join( publish_path, thumbnail) if not (link and type and name): raise ValueError('requires link, type, and name') self._type = str(type) self._link = self.sgfs.session.merge(link) self._name = str(name) self._parent = parent invalid = re.sub(r'[\w\.,:;?&%=/-]', '', self._name) if invalid: raise ValueError( 'Publish name has invalid characters {!r}.'.format(''.join( sorted(set(invalid))))) # Get information about the promotion for review. self._review_version_entity = None self._review_version_fields = kwargs.pop('review_version_fields', None) # To only allow us to commit once. self._committed = False # Will be set into the tag. self.metadata = {} # Files to copy on commit; (src_path, dst_path) self._files = [] self.lock_permissions = True # Set attributes from kwargs. for name in ( 'created_by', 'description', 'extra_fields', 'frames_path', 'lock_permissions', 'movie_path', 'movie_url', 'path', 'source_publish', 'source_publishes', 'thumbnail_path', 'trigger_event', ): setattr(self, name, kwargs.pop(name, None)) if kwargs: raise TypeError('too many kwargs: %r' % sorted(kwargs)) # Required for normalizing. self._directory = None # Get everything into the right type before sending it to Shotgun. self._normalize_attributes() # Prep for async processes. We can do a lot of "frivolous" Shotgun # queries at the same time since we must do at least one. executor = concurrent.futures.ThreadPoolExecutor(8) futures = [] # Figure out the version number (async). if version is None: futures.append(executor.submit(self._set_automatic_version)) else: self._version = int(version) # Grab all data on the link (assuming that is all that is used when # creating publish templates). futures.append(executor.submit(self.link.fetch_core)) # First stage of the publish: create an "empty" PublishEvent. initial_data = { 'code': self.name, 'created_by': self.created_by, 'description': self.description, 'project': self.link.project(), 'sg_link': self.link, 'sg_path_to_frames': self.frames_path, 'sg_path_to_movie': self.movie_path, 'sg_qt': self.movie_url, 'sg_source_publish': self.source_publish or None, # singular 'sg_source_publishes': self.source_publishes or [], # multiple 'sg_trigger_event_id': self.trigger_event['id'] if self.trigger_event else None, 'sg_type': self.type, 'sg_version': 0, # Signifies that this is "empty". } initial_data.update(self.extra_fields) initial_data['type'] = 'PublishEvent' initial_data['id'] = 0 # HACK! Although sgsession supports this. self.entity = self.sgfs.session.merge(initial_data) if not defer_entities: future = self.assert_entities(_executor=executor) if future is not None: futures.append(future) # Lets have our async processes catch up. for future in futures: future.result() # Manually forced directory. if directory is not None: self._directory_supplied = True self._directory = os.path.abspath(directory) # Make the directory so that tools which want to manually copy files # don't have to. if makedirs: utils.makedirs(self._directory) else: self._directory_supplied = False self._directory = self.pick_unique_directory(makedirs=makedirs) # If the directory is tagged with existing entities, then we cannot # proceed. This allows one to retire a publish and then overwrite it. tags = self.sgfs.get_directory_entity_tags(self._directory) if any(tag['entity'].exists() for tag in tags): raise ValueError('directory is already tagged: %r' % self._directory)