def __init__(self, **kwargs): # Reasonable defaults. self.detail = '' self.entity_name = '' self.entity_type = None self.extension = '' self.revision = 1 self.step_name = kwargs.get('step_name') self.sub_directory = '' self.directory = 'scenes' self.version = 0 self.sep = ',' self._all_seps_class = '[%s]' % re.escape('-_,.') self._strip_seps_re = re.compile( r'(^%s+)|(%s+$)' % (self._all_seps_class, self._all_seps_class)) self._sgfs = SGFS() # Callbacks. self.warning = kwargs.pop('warning', self.warning) self.error = kwargs.pop('error', self.error) if self.error is False: self.error = self.warning self._step_names = [] # Parse given paths. self.workspace = kwargs.pop('workspace', None) if self.workspace is not None: self._parse_workspace(self.workspace) self.filename = kwargs.pop('filename', None) if self.filename is not None: self._parse_filename(self.filename) # Set kwargs. self.detail = kwargs.pop('detail', self.detail) self.entity_name = kwargs.pop('entity_name', self.entity_name) self.entity_type = kwargs.pop('entity_type', self.entity_type) self.extension = kwargs.pop('extension', self.extension) self.revision = int(kwargs.pop('revision', self.revision)) self.step_name = kwargs.pop('step_name', self.step_name) # "scenes_name" one is for backwards compatibility. self.directory = kwargs.pop('directory', kwargs.pop('scenes_name', self.directory)) self.sub_directory = kwargs.pop('sub_directory', self.sub_directory) self.version = int(kwargs.pop('version', self.version)) if kwargs: raise TypeError(('%s recieved too many kwargs: ' % self.__class__.__name__) + ', '.join(kwargs))
def assert_workspace(): scene_path = cmds.file(q=True, sceneName=True) if not scene_path: raise ValueError("Scene is not saved.") sgfs = SGFS() tasks = sgfs.entities_from_path(scene_path, ['Task']) if not tasks: raise ValueError("Scene is not in a task.") path = sgfs.path_for_entity(tasks[0]) workspace_path(path)
def open_workspace(): path = workspace_path() sgfs = SGFS() entities = sgfs.entities_from_path(workspace_path(), ['Task', 'PublishEvent']) if entities: path = sgfs.path_for_entity(entities[0]) or path if sys.platform == 'darwin': call(['open', path]) else: call(['xdg-open', path])
def _run(dry_run, entity_type, selected_ids, **kwargs): sgfs = SGFS() entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids]) heirarchy = sgfs.session.fetch_heirarchy(entities) sgfs.session.fetch_core(heirarchy) commands = sgfs.create_structure(entities, dry_run=dry_run) notify( title='Preview Folders' if dry_run else 'Creating Folders', message='\n'.join(commands) or 'Everything is up to date.', )
def _export(self, kwargs): if not self.safety_check(**kwargs): raise PublishSafetyError() task_data = self._task_combo.currentData() task = task_data.get('task') if not task: sgfs = SGFS() tasks = sgfs.entities_from_path(self._exporter.workspace, 'Task') if not tasks: raise ValueError('Could not find SGFS tagged entities') task = tasks[0] stream_data = self._name_combo.currentData() parent = stream_data.get('publish') # Do the promotion. if self._promote_checkbox.isChecked(): review_version_fields = self._exporter.fields_for_review_version(**kwargs) else: review_version_fields = None publisher = self._exporter.publish(task, name=self.name(), description=self.description(), version=self.version(), parent=parent, thumbnail_path=self.thumbnail_path(), frames_path=self.frames_path(), movie_path=self.movie_path(), review_version_fields=review_version_fields, export_kwargs=kwargs, ) # Create the timelog. minutes = self._timelog_spinbox.value() if minutes: publisher.sgfs.session.create('TimeLog', { 'project': publisher.entity.project(), 'entity': publisher.link, 'user': publisher.sgfs.session.guess_user(), 'duration': minutes, 'description': '%s_v%04d' % (publisher.name, publisher.version), 'date': datetime.datetime.utcnow().date(), }) return publisher
def __init__(self, root_state=None, sgfs=None, shotgun=None, session=None): super(Model, self).__init__() self._root_state = root_state or {} self._root = None self.sgfs = sgfs or SGFS(shotgun=shotgun, session=session) # Force a more reasonable timeout. Note that this does change the # global parameters on the shotgun object. self.sgfs.session.shotgun.close() self.sgfs.session.shotgun.config.timeout_secs = 5 self.sgfs.session.shotgun.config.max_rpc_attempts = 1 self.threadpool = ThreadPool(8, lifo=True) self._node_types = []
def extract_publisher_kwargs(args, sgfs=None, delete=True): kwargs = {} for key, value in args.__dict__.items(): if key.startswith('publisher_'): if value is not None: kwargs[key[10:]] = value if delete: delattr(args, key) sgfs = sgfs or SGFS() if 'link' in kwargs: kwargs['link'] = parse_spec(sgfs, kwargs['link']) if 'template' in kwargs: kwargs['template'] = parse_spec(sgfs, kwargs['template'], entity_types=['PublishEvent']) return kwargs
def run(entity_type, selected_ids, **kwargs): app = QtGui.QApplication([]) QtGui.QApplication.setWindowIcon(ui_utils.icon('fatcow/brick_go', as_icon=True)) sgfs = SGFS() publish = sgfs.session.merge({'type': entity_type, 'id': selected_ids[0]}) task, type_, _, _ = publish.fetch(('sg_link', 'sg_type', 'sg_path', 'sg_version'), force=True) if type_ != 'maya_scene': QtGui.QMessageBox.critical(None, 'Unknown Publish Type', 'Cannot process publishes of type %r.' % type_) exit(1) task['step'].fetch_core() # For the picker. dialog = Dialog(publish) dialog.show() dialog.raise_() exit(app.exec_())
def update(self, entity): # TODO: Do this async. # We're also priming the sg_default_reference_namespace (assuming # it exists). by, at, desc, _ = entity.fetch(( 'created_by.HumanUser.name', 'created_at', 'description', 'sg_link.Task.entity.Asset.sg_default_reference_namespace', ), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join( path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = Q.Pixmap(thumbnail_path) else: path = os.path.abspath( os.path.join(__file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png')) pixmap = Q.Pixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth( 165, Q.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def __init__(self, exporter): super(Widget, self).__init__() self._exporter = exporter self._existing_streams = set() basename = os.path.basename(exporter.filename_hint) basename = os.path.splitext(basename)[0] basename = re.sub(r'[^\w-]+', '_', basename) basename = re.sub(r'_*[rv]\d+', '', basename) basename = _strip_seps(basename) # TODO: Strip entity_name and step_name with SGFS scene_name functions. if self._exporter.workspace: sgfs = SGFS() tasks = sgfs.entities_from_path(self._exporter.workspace, ['Task']) if tasks: task = tasks[0] task_name, entity, step = task.fetch(('content', 'entity', 'step')) if entity: basename = _strip_prefix(basename, entity.fetch('code')) if step: basename = _strip_prefix(basename, step.fetch('short_name')) # Default to something reasonable. basename = ( basename or task_name or (step.get('short_name') if step else None) or (entity.get('code') if entity else None) or '' ) self._basename = basename self._setup_ui() # First screenshot. self.take_full_screenshot()
def _populate_references(self): sgfs = SGFS() reference_statuses = check.check_paths(cmds.file(q=True, reference=True), only_published=False) for reference in reference_statuses: item = ReferenceItem(sgfs, reference) self._tree.addTopLevelItem(item) item.attach_to_tree(self._tree) geocaches = geocache_utils.get_existing_cache_mappings().keys() geocache_statuses = check.check_paths(geocaches, only_published=True) for geocache in geocache_statuses: item = GeocacheItem(sgfs, geocache) self._tree.addTopLevelItem(item) item.attach_to_tree(self._tree) for i in range(7): self._tree.resizeColumnToContents(i) self._tree.setColumnWidth(i, self._tree.columnWidth(i) + 10)
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opening:\n' + '\n'.join(sorted(paths))) for path in set(paths): if sys.platform.startswith('darwin'): call(['open', path]) else: call(['xdg-open', path])
def __init__(self, root_state=None, sgfs=None, shotgun=None, session=None): super(Model, self).__init__() self._root_state = root_state or {} self._root = None self.sgfs = sgfs or SGFS(shotgun=shotgun, session=session) # Force a more reasonable timeout. Note that this does change the # global parameters on the shotgun object. self.sgfs.session.shotgun.close() self.sgfs.session.shotgun.config.timeout_secs = 5 self.sgfs.session.shotgun.config.max_rpc_attempts = 1 self.threadpool = ThreadPool(8, lifo=True) self._node_types = [] # See comment above. for name in 'layoutChanged', 'layoutAboutToBeChanged': old = getattr(self, name) new = getattr(self, '_legacy_' + name) setattr(self, name, new) new.connect(old.emit)
def check_paths(paths, only_published=True): sgfs = SGFS() res = [] for path in paths: publishes = sgfs.entities_from_path(path, 'PublishEvent') if only_published and not publishes: continue publish = publishes[0] if publishes else None if publish: siblings = sgfs.session.find('PublishEvent', [ ('sg_link', 'is', publish['sg_link']), ('code', 'is', publish['code']), ('sg_type', 'is', publish['sg_type']), ], ['sg_path']) siblings.sort(key=lambda x: x['sg_version']) latest = max(siblings, key=lambda pub: pub['sg_version']) else: siblings = [] latest = None res.append( ReferenceStatus( path=path, used=publish, latest=latest, is_latest=publish is latest if publish else False, all=siblings, )) return res
def SGFS(self, **kwargs): kwargs.setdefault('root', self.sandbox) kwargs.setdefault('session', self.session) return SGFS(**kwargs)
def main(argv=None): import argparse import logging log = logging.getLogger(__name__) parser = argparse.ArgumentParser() parser.add_argument('--world', action='store_true') parser.add_argument('--no-nuke', action='store_true') parser.add_argument('-s', '--start', type=int) parser.add_argument('-e', '--end', type=int) parser.add_argument('-d', '--out-dir') parser.add_argument('--publish-link') parser.add_argument('--publish-name') parser.add_argument('--publish-thumbnail') parser.add_argument('-l', '--list-cameras', action='store_true') parser.add_argument('scene') parser.add_argument('camera', nargs='?') args = parser.parse_args(argv) log.info('initializing Maya') import maya.standalone maya.standalone.initialize() log.info('loading file') cmds.file(args.scene, open=True) log.info('done loading file') cameras = cmds.ls(args.camera or '*', type='camera', long=True) or () if args.list_cameras: print '\n'.join(cameras) return if args.camera: if not cameras: log.error('no cameras matching %s' % args.camera) exit(1) camera = cameras[0] if len(cameras) > 1: log.warning('more than one camera matching %s; taking %s' % (args.camera, camera)) else: cameras = [ c for c in cameras if c.split('|')[1] not in ('top', 'side', 'persp', 'front') ] if not cameras: log.error('no non-default cameras') exit(1) camera = cameras[0] if len(cameras) > 1: log.warning('more than one non-default camera; taking %s' % camera) log.info('will export %s' % camera) name = args.publish_name or os.path.splitext(os.path.basename( args.scene))[0] exporter = CameraExporter() if args.publish_link: link = parse_spec(SGFS(), args.publish_link) print link # TODO: take a screenshot (on OS X) via screenshot thumbnail_path = args.publish_thumbnail exporter.publish(link, name, dict(camera=camera, bake_to_world_space=args.world), thumbnail_path=thumbnail_path) else: directory = args.out_dir or os.path.join(args.scene, '..', 'data', 'camera', name) exporter.export(directory=directory, path=directory, camera=camera, bake_to_world_space=args.world) log.info('DONE')
def __init__(self, publish, sgfs=None): super(Dialog, self).__init__() self._sgfs = sgfs or SGFS(session=publish.session) self._publish = publish self._setup_ui()
def republish(entity_type, selected_ids, type_name, type_code): # no fancy UI needed here assert entity_type == 'PublishEvent' assert selected_ids title = 'Republish as %s' % type_name progress(title=title, message='Fetching entities...') sgfs = SGFS() entities = [ sgfs.session.merge(dict(type='PublishEvent', id=id_)) for id_ in selected_ids ] sgfs.session.fetch( entities, ('code', 'sg_link', 'sg_link.Task.entity', 'sg_type', 'sg_path')) futures = [] errors = [] executor = Executor() for i, publish in enumerate(entities): if publish['sg_type'] == type_code: errors.append('Publish %d is already a %s.' % (publish['id'], type_code)) continue link = publish['sg_link'] owner = publish.get('sg_link.Task.entity') owner_name = owner.name if owner else str(link) future_name = 'Republish %s as %s - %s:%s' % ( publish['sg_type'], type_code, owner_name, link.name) progress(title=title, message='Submitting %s/%s to Qube:\n<em>"%s"</em>' % (i + 1, len(entities), future_name)) maya_scene = get_maya_scene(publish) thumbnail = os.path.join(os.path.dirname(maya_scene), 'thumbnail.jpg') thumbnail = thumbnail if os.path.exists(thumbnail) else '' if type_code == 'maya_scene': future = executor.submit_ext( 'sgpublish.commands.publish:main', args=[('--link', '%(type)s:%(id)d' % publish['sg_link'], '--code', publish['code'], '--type', type_code, '--thumbnail', thumbnail, maya_scene)], name=future_name, priority=8000, ) futures.append(future) elif type_code == 'maya_geocache': future = executor.submit_ext( 'mayatools.geocache.exporter:main', args=[( '--publish-link', '%(type)s:%(id)d' % publish['sg_link'], '--publish-name', publish['code'], '--publish-thumbnail', thumbnail, maya_scene, )], name=future_name, interpreter='maya2014_python', priority=8000, ) futures.append(future) elif type_code == 'maya_camera': future = executor.submit_ext( 'mayatools.camera.exporter:main', args=[( '--publish-link', '%(type)s:%(id)d' % publish['sg_link'], '--publish-name', publish['code'], '--publish-thumbnail', thumbnail, maya_scene, )], name=future_name, interpreter='maya2014_python', priority=8000, ) futures.append(future) else: errors.append('Unknown publish type %r.' % type_code) messages = [] if futures: messages.append('Submitted to Qube as %s' % ', '.join(str(f.job_id) for f in futures)) if errors: messages.extend('<span style="color:red">%s</span>' % e for e in errors) notify(title=title, message='\n'.join(messages))
def run_play(entity_type, selected_ids, **kwargs): sgfs = SGFS() entities = sgfs.session.get(entity_type, selected_ids) if entity_type == 'PublishEvent': sgfs.session.fetch(entities, ('code', 'sg_type', 'path_to_frames', 'path_to_movie', 'path', 'link.Task.entity')) elif entity_type == 'Version': sgfs.session.fetch( entities, ('code', 'path_to_frames', 'path_to_movie', 'entity')) else: alert('Cannot play %s entities in RV.' % entity_type) return chunks = [] for entity in entities: skipped_exts = [] for path_key in ('path_to_frames', 'path_to_movie', 'path'): path = (entity.get(path_key) or '').strip() if not path: continue ext = os.path.splitext(path)[1] if ext not in PLAYABLE_EXTS: skipped_exts.append(ext) continue notify('Opening %s in RV...' % path) # Convert any %04d into #### rv_path = re.sub(r'(?:%0?(\d)[sd])', lambda m: '#' * int(m.group(1)), path) # Go looking for audio. if entity_type == 'PublishEvent': shot = entity['link.Task.entity'] else: shot = entity['entity'] if shot: audio = sgfs.session.find_one('PublishEvent', [ ('sg_type', 'is', 'audio'), ('link.Task.entity', 'is', shot), ], ['path']) else: audio = None # Open it (optionally with audio). if audio: chunks.extend(('[', rv_path, audio['path'], ']')) else: chunks.append(rv_path) break else: if skipped_exts: alert( """We don't know how to play %s %d ("%s") with %s extensions.""" % (entity['sg_type'] + ' Publish' if entity_type == 'PublishEvent' else entity_type, entity['id'], entity['code'], '/'.join(sorted(skipped_exts)))) else: alert("""%s %d ("%s") has nothing to play.""" % ( entity['sg_type'].title() + ' Publish' if entity_type == 'PublishEvent' else entity_type, entity['id'], entity['code'], )) return if chunks: # -l -> use lookahead cache # -play -> play immediately rvlink(['-l'] + chunks) else: alert("We don't know how to play %s entities." % entity_type)
import re import difflib import sys from maya import cmds, mel from sgfs import SGFS from sgfs.ui import product_select from uitools.qt import Qt, QtCore, QtGui import sgfs.ui.scene_name.widget as scene_name import sgpublish.check.maya from . import utils from ..attributes import copy_attributes sgfs = SGFS() def silk(name): # __file__ = "$KS_TOOLS/key_base/3d/maya/python/geocache/importer.py" # icons = "$KS_TOOLS/key_base/art/icons" icons = os.path.abspath( os.path.join(__file__, '..', '..', 'art', 'icons', 'silk')) return os.path.join(icons, name + '.png') def silk_icon(name, size=16): icon = QtGui.QIcon(silk(name)) if size != 16: icon = QtGui.QIcon(icon.pixmap(size, size)) return icon
def republish(entity_type, selected_ids, type_name, type_code): # no fancy UI needed here assert entity_type == 'PublishEvent' assert selected_ids progress('Fetching entities...') sgfs = SGFS() entities = [ sgfs.session.merge(dict(type=entity_type, id=id_)) for id_ in selected_ids ] sgfs.session.fetch(entities, ('code', 'sg_link', 'sg_link.Task.entity', 'sg_type', 'sg_path', 'created_by.HumanUser.login')) futures = [] errors = [] executor = Executor() for i, publish in enumerate(entities): if publish['sg_type'] == type_code: errors.append('Publish %d is already a %s.' % (publish['id'], type_code)) continue link = publish['sg_link'] owner = publish.get('sg_link.Task.entity') owner_name = owner.name if owner else str(link) future_name = 'Republish %s as %s - %s:%s' % ( publish['sg_type'], type_code, owner_name, link.name) progress('Submitting %s/%s to Qube:\n<em>"%s"</em>' % (i + 1, len(entities), future_name)) maya_scene = get_maya_scene(publish) # Run the job as the original user. qb_extra = {} login = publish.get('created_by.HumanUser.login') if login: qb_extra['user'] = login.split('@')[0] if type_code == 'maya_scene': future = executor.submit_ext('sgpublish.commands.create:main', args=[ ('--template', str(publish['id']), '--type', type_code, maya_scene) ], name=future_name, priority=8000, **qb_extra) futures.append(future) elif type_code == 'maya_geocache': future = executor.submit_ext('mayatools.geocache.exporter:main', args=[( '--publish-template', str(publish['id']), maya_scene, )], name=future_name, interpreter='maya2014_python', priority=8000, **qb_extra) futures.append(future) elif type_code == 'maya_camera': future = executor.submit_ext('mayatools.camera.exporter:main', args=[( '--publish-template', str(publish['id']), maya_scene, )], name=future_name, interpreter='maya2014_python', priority=8000, **qb_extra) futures.append(future) else: errors.append('Unknown publish type %r.' % type_code) messages = [] if futures: messages.append('Submitted to Qube as %s' % ', '.join(str(f.job_id) for f in futures)) if errors: messages.extend('<span style="color:red">%s</span>' % e for e in errors) notify('; '.join(messages))
def __init__(self, link=None, type=None, name=None, version=None, parent=None, directory=None, sgfs=None, template=None, makedirs=True, defer_entities=False, **kwargs): if not sgfs: if isinstance(template, Entity): sgfs = SGFS(session=template.session) elif isinstance(link, Entity): sgfs = SGFS(session=link.session) else: sgfs = SGFS() self.sgfs = sgfs if template: template = sgfs.session.merge(template) to_fetch = ['sg_link', 'sg_type', 'code', 'sg_version'] to_fetch.extend(_kwarg_to_field.itervalues()) template.fetch(to_fetch) tpl_link, tpl_type, tpl_name, tpl_version = template.get( ('sg_link', 'sg_type', 'code', 'sg_version')) link = link or tpl_link type = type or tpl_type name = name or tpl_name version = version or tpl_version kwargs.setdefault('source_publish', template) kwargs.setdefault('source_publishes', [template]) for key, field in _kwarg_to_field.iteritems(): kwargs.setdefault(key, template.get(field)) if not kwargs.get('thumbnail_path'): # We certainly jump through a lot of hoops to do this... # Perhaps this should be sgfs.get_entity_tags(entity) publish_path = sgfs.path_for_entity(template) if publish_path: tags = sgfs.get_directory_entity_tags(publish_path) tags = [tag for tag in tags if tag['entity'] == template] if tags: meta = tags[0].get('sgpublish', {}) thumbnail = meta.get('thumbnail') if thumbnail: kwargs['thumbnail_path'] = os.path.join( publish_path, thumbnail) if not (link and type and name): raise ValueError('requires link, type, and name') self._type = str(type) self._link = self.sgfs.session.merge(link) self._name = str(name) self._parent = parent invalid = re.sub(r'[\w\.,:;?&%=/-]', '', self._name) if invalid: raise ValueError( 'Publish name has invalid characters {!r}.'.format(''.join( sorted(set(invalid))))) # Get information about the promotion for review. self._review_version_entity = None self._review_version_fields = kwargs.pop('review_version_fields', None) # To only allow us to commit once. self._committed = False # Will be set into the tag. self.metadata = {} # Files to copy on commit; (src_path, dst_path) self._files = [] self.lock_permissions = True # Set attributes from kwargs. for name in ( 'created_by', 'description', 'extra_fields', 'frames_path', 'lock_permissions', 'movie_path', 'movie_url', 'path', 'source_publish', 'source_publishes', 'thumbnail_path', 'trigger_event', ): setattr(self, name, kwargs.pop(name, None)) if kwargs: raise TypeError('too many kwargs: %r' % sorted(kwargs)) # Required for normalizing. self._directory = None # Get everything into the right type before sending it to Shotgun. self._normalize_attributes() # Prep for async processes. We can do a lot of "frivolous" Shotgun # queries at the same time since we must do at least one. executor = concurrent.futures.ThreadPoolExecutor(8) futures = [] # Figure out the version number (async). if version is None: futures.append(executor.submit(self._set_automatic_version)) else: self._version = int(version) # Grab all data on the link (assuming that is all that is used when # creating publish templates). futures.append(executor.submit(self.link.fetch_core)) # First stage of the publish: create an "empty" PublishEvent. initial_data = { 'code': self.name, 'created_by': self.created_by, 'description': self.description, 'project': self.link.project(), 'sg_link': self.link, 'sg_path_to_frames': self.frames_path, 'sg_path_to_movie': self.movie_path, 'sg_qt': self.movie_url, 'sg_source_publish': self.source_publish or None, # singular 'sg_source_publishes': self.source_publishes or [], # multiple 'sg_trigger_event_id': self.trigger_event['id'] if self.trigger_event else None, 'sg_type': self.type, 'sg_version': 0, # Signifies that this is "empty". } initial_data.update(self.extra_fields) initial_data['type'] = 'PublishEvent' initial_data['id'] = 0 # HACK! Although sgsession supports this. self.entity = self.sgfs.session.merge(initial_data) if not defer_entities: future = self.assert_entities(_executor=executor) if future is not None: futures.append(future) # Lets have our async processes catch up. for future in futures: future.result() # Manually forced directory. if directory is not None: self._directory_supplied = True self._directory = os.path.abspath(directory) # Make the directory so that tools which want to manually copy files # don't have to. if makedirs: utils.makedirs(self._directory) else: self._directory_supplied = False self._directory = self.pick_unique_directory(makedirs=makedirs) # If the directory is tagged with existing entities, then we cannot # proceed. This allows one to retire a publish and then overwrite it. tags = self.sgfs.get_directory_entity_tags(self._directory) if any(tag['entity'].exists() for tag in tags): raise ValueError('directory is already tagged: %r' % self._directory)
def create_versions_for_publish(publish, version_fields, sgfs=None): sgfs = sgfs or SGFS(session=publish.session) generic_data = generic_version_from_publish(publish, sgfs=sgfs) versions = [] # N.B. This used to be 4 threads, but it was causing collisions in # Shotgun's servers. with ThreadPoolExecutor(1) as executor: creation_futures = [] for fields in version_fields: for key, value in generic_data.iteritems(): fields.setdefault(key, value) # Create/update the Version entity. # We allow the user to pass through their own entity for rare cases # when they need to modify existing ones. version_entity = fields.pop('__version_entity__', None) if version_entity is not None: future = executor.submit(sgfs.session.update, 'Version', version_entity['id'], fields) creation_futures.append((fields, version_entity, future)) else: # Can't put this in the generic fields cause we are only # allowed to do it when creating an entity. fields['created_by'] = publish['created_by'] future = executor.submit(sgfs.session.create, 'Version', fields) creation_futures.append((fields, None, future)) final_futures = [] for fields, version_entity, future in creation_futures: version_entity = version_entity or future.result() versions.append(version_entity) # Share thumbnails if the user didn't provide them. if not fields.get('image'): final_futures.append( executor.submit( sgfs.session.share_thumbnail, entities=[version_entity.minimal], source_entity=publish.minimal, )) # Set the status/version on the task. # TODO: Make this optional when we revise the review process. final_futures.append( executor.submit( sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version_entity, }, )) # Set the latest version on the entity. # TODO: Make this optional when we revise the review process. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): final_futures.append( executor.submit( sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version_entity}, )) # Allow them to raise if they must. for future in final_futures: future.result() return versions
def __call__(self, *call_args, **kwargs): opts, args = self.opt_parser.parse_args() sgfs = SGFS(root=opts.root) return self.run(sgfs, opts, args, *call_args, **kwargs)
def submit(self): scene_path = cmds.file(q=True, sceneName=True) scene_name = os.path.splitext(os.path.basename(scene_path))[0] if self.location_method == 'publish': sgfs = SGFS() tasks = sgfs.entities_from_path(scene_path, ['Task']) if not tasks: raise ValueError("Scene is not saved under a Shotgun Task.") task = tasks[0] # TODO: Set a status. # TODO: Pull code, link, description, etc, from user. # TODO: Add metadata about the layers rendered. with sgpublish.Publisher( link=task, type='maya_render', name='Render', lock_permissions=False, ) as publisher: self.output_directory = publisher.directory maya_version = cmds.about(version=True) is_farmsoup = self.driver == 'farmsoup' if is_farmsoup: client = farmsoup.client.Client() group = client.group(name=self.name, ) base_resv = { 'maya{}.install'.format(maya_version): 1, 'maya{}.license'.format(maya_version): 1, } for camera, include_camera in sorted(self.cameras.items()): if not include_camera: continue for layer, include_layer in sorted(self.layers.items()): if not include_layer: continue renderer = self.renderers.get( layer) or self.renderers['masterLayer'] args = [ 'Render', '-V', maya_version, ] reservations = base_resv.copy() if self.reserve_renderer and renderer not in ( # Don't bother reserving the built-in ones. 'mayaSoftware', 'mayaHardware2', ): # These look like "arnold" and "redshift". reservations['maya{}-{}.install'.format( maya_version, renderer)] = 1 reservations['{}.install'.format(renderer)] = 1 reservations['{}.license'.format(renderer)] = 1 if renderer == 'redshift': args.extend(( '-r', 'redshift', # This must not be escaped! '-gpu', '{$FARMSOUP_RESERVED_GPUS_TOKENS}')) reservations['cpus'] = 1 reservations['gpus'] = 1 else: args.extend(( # Redshift doesn't understand -fnc. # Nothing really depends on this, so it isn't a big deal. '-fnc', 'name.#.ext', )) args.extend(( '-s', '$F' if is_farmsoup else str(self.start_frame), '-e', '$F_end' if is_farmsoup else str(self.end_frame), '-x', str(int(self.width)), '-y', str(int(self.height)), '-pad', '4', )) if self.skip_existing: args.extend(('-skipExistingFrames', 'true')) # We need to ask Maya to do the templating for us, because # otherwise it will decide that because there are multiple # render layers that there will be a naming collision, and so # it automatically adds directories for us. template_name = self.filename_pattern.format( scene=scene_name, layer='<RenderLayer>', camera='<Camera>', ) display_name = self.filename_pattern.format( scene=scene_name, layer=layer, camera=camera, ).replace(':', '_') args.extend(( '-cam', camera, '-rl', layer, # We're only escaping the ones that we need to, because # Redshift relies upon the envvars to pick GPUs at # render time. '-rd', sh_quote(self.output_directory), '-im', sh_quote(template_name), sh_quote(scene_path))) command = ' '.join(args) if is_farmsoup: job = group.job( name=display_name, reservations=reservations, ).setup_as_subprocess(command, shell=True) job.expand_via_range('F={}-{}/{}'.format( self.start_frame, self.end_frame, self.frame_chunk)) else: print ' '.join(args) # TODO: Add a job to set the Shotgun status on each once they are done. if is_farmsoup: client.submit(group) return group
def check_ref_namespaces(): # Description : Python script runs through all the references within the maya scenes. # Checks they match the Shotgun defined Maya namespace, and renames # the incorrect namespaces. sgfs = SGFS() scene_references = cmds.file( query=True, reference=True) # finds all the reference paths in the scene scene_references.sort(reverse=True) #sorts them in reverse paths_and_assets = [] all_assets = [] # Collect all the assets so we can fetch them all at once. for path in scene_references: #info to query the maya namespace from the shotgun webpage assets = sgfs.entities_from_path(path, ['Asset']) if not assets: raise ValueError("No Asset entities for {}".format(path)) asset = assets[0] paths_and_assets.append((path, asset)) all_assets.append(asset) # Fetch them all in one call. sgfs.session.fetch(all_assets, ['sg_default_reference_namespace']) # Now that we have loaded all the asset namespaces, calculate what the # correct namespaces are. correct_namespaces = [] # (path, correct_namespace) tuples. for path, asset in paths_and_assets: #split is to find the duplicate number duplicate_number = path.split("{") duplicate_number = duplicate_number[-1].split("}") #if statement is to separate the first reference from the duplicates, because the first namespace will #respect the maya namespace totally the duplicates will have a suffix "_#" if path == duplicate_number[0]: #query shotgun defined namespace correct_namespace = asset['sg_default_reference_namespace'] else: #query shotgun defined namespace + "_#" correct_namespace = asset[ 'sg_default_reference_namespace'] + "_" + duplicate_number[0] correct_namespaces.append((path, correct_namespace)) # Make a few passes at changing namespaces until they are all fixed. # This is to deal with situations in which two references have each other's # namespace. Maya will let us attempt to set duplicate namespaces, but will # silently change it on us. So we just ask nicely a few times. for round_i in xrange(10): num_fixed = 0 for path, correct_namespace in correct_namespaces: #query curent namespace current_namespace = cmds.file(path, query=1, namespace=True) #renames namespace if it is incorrect if current_namespace != correct_namespace: print ' {} should be {} for {}'.format( current_namespace, correct_namespace, path) cmds.file(path, edit=1, namespace=correct_namespace) num_fixed += 1 # Check again (just for debugging). new_namespace = cmds.file(path, query=1, namespace=True) if new_namespace != correct_namespace: print ' Missed! Now', new_namespace print "Changed {} in round {}.".format(num_fixed, round_i) # Everything is fixed; bail! if not num_fixed: break else: raise ValueError("Could not fix all references after many attempts.")
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) # Create/update the version. version = kwargs.pop('version_entity', None) fields.update(kwargs) if version is not None: sgfs.session.update('Version', version['id'], fields) else: fields['created_by'] = publish['created_by'] version = sgfs.session.create('Version', fields) futures = [] with ThreadPoolExecutor(4) as executor: # Share thumbnails. futures.append( executor.submit( sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, )) # Set the status/version on the task. futures.append( executor.submit( sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, )) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): futures.append( executor.submit( sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, )) # Allow them to raise if they must. for future in futures: future.result() return version
def __init__(self): self.sgfs = SGFS()
def _main(): import sgfs.commands.utils as command_utils optparser = optparse.OptionParser() optparser.add_option('-c', '--combobox', action="store_true", dest="combobox") optparser.add_option('-r', '--root', dest='root') opts, args = optparser.parse_args() global model, view, dialog sgfs = SGFS() if opts.root: root = command_utils.parse_spec(sgfs, opts.root.split()) model = Model(state_from_entity( sgfs.session.get(root['type'], root['id'])), sgfs=sgfs) else: model = Model(sgfs=sgfs) # entity = model.sgfs.session.get('Task', 43897) # entities = [] # while entity: # entities.append(entity) # entity = entity.parent() # print 'ENTITY', entities # model.register_node_type(functools.partial(ShotgunEntities, entities=entities)) # model.register_node_type(SGFSRoots) model.register_node_type(functools.partial(ShotgunPublishStream)) # model.register_node_type(functools.partial(ShotgunQuery, entity_types=('EventLogEntry', 'ActionMenuItem', 'Step', 'PublishEvent', 'Asset', 'Sequence', 'Shot', 'Task', 'Version', 'Tool', 'Ticket', 'Project', 'HumanUser'))) model.register_node_type( functools.partial(ShotgunQuery, entity_types=('Asset', 'Sequence', 'Shot', 'Project', 'Task'))) model.register_node_type( functools.partial(TemplateGlobPicker, entity_types=('Task', ), template='nuke_scripts_dir', glob='*.nk')) # model.register_node_type(functools.partial(DirectoryPicker, entity_types=('Task', ),)) if opts.combobox: view = ComboBoxView() else: view = ColumnView() view.setModel(model) type_ = None id_ = None if args: init = command_utils.parse_spec(model.sgfs, args) type_ = init.get('type') id_ = init.get('id') print type_, id_ if type_ and id_: entity = model.sgfs.session.get(type_, id_) init_state = state_from_entity(entity) index = model.index_from_state(init_state) if index: view.setCurrentIndex(index) else: print 'Could not get index for initial state.' view.setMinimumWidth(800) if not opts.combobox: view.setMaximumHeight(400) view.setPreviewVisible(False) view.setColumnWidths([1] * 10) dialog = QtGui.QDialog() dialog.setWindowTitle(sys.argv[0]) dialog.setLayout(QtGui.QHBoxLayout()) dialog.layout().addWidget(view) dialog.layout().addStretch() dialog.show() dialog.raise_()
def __init__(self): self._logs = {} self.sgfs = SGFS()