def main(): optparser = optparse.OptionParser( usage="%prog [options] entity_type id path") optparser.add_option( '--nocache', dest='cache', default=True, action='store_false', help='Do not add this path to the project level cache.') opts, args = optparser.parse_args() if len(args) != 3: optparser.print_usage() exit(1) entity_type, entity_id, path = args try: entity_id = int(entity_id) except ValueError: optparser.error('entity_id must be an integer') sgfs = SGFS() entity = sgfs.session.merge(dict(type=entity_type, id=entity_id)) # Cache a rich and full heirarchy. entities = entity.fetch_heirarchy() sgfs.session.fetch_core(entities) sgfs.tag_directory_with_entity(path, entity, cache=opts.cache)
def main(): optparser = optparse.OptionParser(usage="%prog [options] entity_type id path") optparser.add_option( "--nocache", dest="cache", default=True, action="store_false", help="Do not add this path to the project level cache.", ) opts, args = optparser.parse_args() if len(args) != 3: optparser.print_usage() exit(1) entity_type, entity_id, path = args try: entity_id = int(entity_id) except ValueError: optparser.error("entity_id must be an integer") sgfs = SGFS() entity = sgfs.session.merge(dict(type=entity_type, id=entity_id)) # Cache a rich and full heirarchy. entities = entity.fetch_heirarchy() sgfs.session.fetch_core(entities) sgfs.tag_directory_with_entity(path, entity, cache=opts.cache)
def _run(dry_run, entity_type, selected_ids, **kwargs): title='Preview Folders' if dry_run else 'Creating Folders' verb = 'previewing' if dry_run else 'creating' progress(message=('Previewing' if dry_run else 'Creating') + ' folders for %s %ss; please wait...' % (len(selected_ids), entity_type)) sgfs = SGFS() entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids]) heirarchy = sgfs.session.fetch_heirarchy(entities) sgfs.session.fetch_core(heirarchy) command_log = sgfs.create_structure(entities, dry_run=dry_run) if command_log: details = '\n'.join(command_log) if dry_run: alert(title='Folder Preview', message=details) else: notify( message='Created folders for %s %ss.' % (len(selected_ids), entity_type), details=details, ) else: notify(message='Folders are already up to date.')
def _fetch_existing_data(self): try: sgfs = SGFS() tasks = sgfs.entities_from_path(self._exporter.workspace) if not tasks: raise ValueError('No entities in workspace %r' % self._exporter.workspace) if any(x['type'] != 'Task' for x in tasks): raise ValueError('Non-Task entity in workspace %r' % self._exporter.workspace) publishes = sgfs.session.find( 'PublishEvent', [ ('sg_link.Task.id', 'in') + tuple(x['id'] for x in tasks), ('sg_type', 'is', self._exporter.publish_type), ('sg_version', 'greater_than', 0), # Skipped failures. ], [ 'code', 'sg_version' ] ) except Exception as e: self._task_combo.clear() self._task_combo.addItem('Loading Error! %s' % e, {}) raise else: self.loaded_publishes.emit(tasks, publishes)
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, "->", repr(path) paths.append(path) if not paths: notify("No paths for %s %s" % (entity_type, selected_ids)) return notify("Opened " + ", ".join(sorted(paths))) path = paths[0] if sys.platform.startswith("darwin"): call(["open", "-a", "Terminal", path]) else: terminal_cmd = "gnome-terminal" if "MATE_DESKTOP_SESSION_ID" in os.environ: terminal_cmd = "mate-terminal" call([terminal_cmd, "--working-directory", path])
def _run(dry_run, entity_type, selected_ids, **kwargs): title = 'Preview Folders' if dry_run else 'Creating Folders' verb = 'previewing' if dry_run else 'creating' progress(message=('Previewing' if dry_run else 'Creating') + ' folders for %s %ss; please wait...' % (len(selected_ids), entity_type)) sgfs = SGFS() entities = sgfs.session.merge( [dict(type=entity_type, id=id_) for id_ in selected_ids]) heirarchy = sgfs.session.fetch_heirarchy(entities) sgfs.session.fetch_core(heirarchy) command_log = sgfs.create_structure(entities, dry_run=dry_run) if command_log: details = '\n'.join(command_log) if dry_run: alert(title='Folder Preview', message=details) else: notify( message='Created folders for %s %ss.' % (len(selected_ids), entity_type), details=details, ) else: notify(message='Folders are already up to date.')
def check_paths(paths, only_published=True): sgfs = SGFS() res = [] for path in paths: publishes = sgfs.entities_from_path(path, 'PublishEvent') if only_published and not publishes: continue publish = publishes[0] if publishes else None if publish: siblings = sgfs.session.find('PublishEvent', [ ('sg_link', 'is', publish['sg_link']), ('code', 'is', publish['code']), ('sg_type', 'is', publish['sg_type']), ], ['sg_path']) siblings.sort(key=lambda x: x['sg_version']) latest = max(siblings, key=lambda pub: pub['sg_version']) else: siblings = [] latest = None res.append(ReferenceStatus( path=path, used=publish, latest=latest, is_latest=publish is latest if publish else False, all=siblings, )) return res
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opened ' + ', '.join(sorted(paths))) path = paths[0] if sys.platform.startswith('darwin'): call(['open', '-a', 'Terminal', path]) else: terminal_cmd = 'gnome-terminal' if 'MATE_DESKTOP_SESSION_ID' in os.environ: terminal_cmd = 'mate-terminal' call([terminal_cmd, '--working-directory', path])
def update(self, entity): # TODO: Do this async. by, at, desc = entity.fetch(('created_by.HumanUser.name', 'created_at', 'description'), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join(path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = QtGui.QPixmap(thumbnail_path) else: path = os.path.abspath(os.path.join( __file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png' )) pixmap = QtGui.QPixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth(165, Qt.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def _export(self, kwargs): if not self.safety_check(**kwargs): raise PublishSafetyError() task_data = self._task_combo.currentData() task = task_data.get('task') if not task: sgfs = SGFS() tasks = sgfs.entities_from_path(self._exporter.workspace, 'Task') if not tasks: raise ValueError('Could not find SGFS tagged entities') task = tasks[0] stream_data = self._name_combo.currentData() parent = stream_data.get('publish') publisher = self._exporter.publish( task, name=self.name(), description=self.description(), version=self.version(), parent=parent, thumbnail_path=self.thumbnail_path(), frames_path=self.frames_path(), movie_path=self.movie_path(), export_kwargs=kwargs, ) if self._promote_checkbox.isChecked(): # progress.setLabelText('Creating Version for Review...') promotion_fields = self._exporter.fields_for_review_version( publisher, **kwargs) print "PROMOTE", promotion_fields publisher.promote_for_review(**promotion_fields) # Create the timelog. minutes = self._timelog_spinbox.value() if minutes: # progress.setLabelText('Logging time...') publisher.sgfs.session.create( 'TimeLog', { 'project': publisher.entity.project(), 'entity': publisher.link, 'user': publisher.sgfs.session.guess_user(), 'duration': minutes, 'description': '%s_v%04d' % (publisher.name, publisher.version), 'date': datetime.datetime.utcnow().date(), }) # progress.hide() return publisher
def __init__(self, **kwargs): # Reasonable defaults. self.detail = '' self.entity_name = '' self.entity_type = None self.extension = '' self.revision = 1 self.step_name = kwargs.get('step_name') self.sub_directory = '' self.directory = 'scenes' self.version = 0 self.sep = ',' self._all_seps_class = '[%s]' % re.escape('-_,.') self._strip_seps_re = re.compile( r'(^%s+)|(%s+$)' % (self._all_seps_class, self._all_seps_class)) self._sgfs = SGFS() # Callbacks. self.warning = kwargs.pop('warning', self.warning) self.error = kwargs.pop('error', self.error) if self.error is False: self.error = self.warning self._step_names = [] # Parse given paths. self.workspace = kwargs.pop('workspace', None) if self.workspace is not None: self._parse_workspace(self.workspace) self.filename = kwargs.pop('filename', None) if self.filename is not None: self._parse_filename(self.filename) # Set kwargs. self.detail = kwargs.pop('detail', self.detail) self.entity_name = kwargs.pop('entity_name', self.entity_name) self.entity_type = kwargs.pop('entity_type', self.entity_type) self.extension = kwargs.pop('extension', self.extension) self.revision = int(kwargs.pop('revision', self.revision)) self.step_name = kwargs.pop('step_name', self.step_name) # "scenes_name" one is for backwards compatibility. self.directory = kwargs.pop('directory', kwargs.pop('scenes_name', self.directory)) self.sub_directory = kwargs.pop('sub_directory', self.sub_directory) self.version = int(kwargs.pop('version', self.version)) if kwargs: raise TypeError(('%s recieved too many kwargs: ' % self.__class__.__name__) + ', '.join(kwargs))
def assert_workspace(): scene_path = cmds.file(q=True, sceneName=True) if not scene_path: raise ValueError("Scene is not saved.") sgfs = SGFS() tasks = sgfs.entities_from_path(scene_path, ['Task']) if not tasks: raise ValueError("Scene is not in a task.") path = sgfs.path_for_entity(tasks[0]) workspace_path(path)
def _export(self, kwargs): if not self.safety_check(**kwargs): raise PublishSafetyError() task_data = self._task_combo.currentData() task = task_data.get('task') if not task: sgfs = SGFS() tasks = sgfs.entities_from_path(self._exporter.workspace, 'Task') if not tasks: raise ValueError('Could not find SGFS tagged entities') task = tasks[0] stream_data = self._name_combo.currentData() parent = stream_data.get('publish') publisher = self._exporter.publish(task, name=self.name(), description=self.description(), version=self.version(), parent=parent, thumbnail_path=self.thumbnail_path(), frames_path=self.frames_path(), movie_path=self.movie_path(), export_kwargs=kwargs, ) if self._promote_checkbox.isChecked(): # progress.setLabelText('Creating Version for Review...') promotion_fields = self._exporter.fields_for_review_version(publisher, **kwargs) print "PROMOTE", promotion_fields publisher.promote_for_review(**promotion_fields) # Create the timelog. minutes = self._timelog_spinbox.value() if minutes: # progress.setLabelText('Logging time...') publisher.sgfs.session.create('TimeLog', { 'project': publisher.entity.project(), 'entity': publisher.link, 'user': publisher.sgfs.session.guess_user(), 'duration': minutes, 'description': '%s_v%04d' % (publisher.name, publisher.version), 'date': datetime.datetime.utcnow().date(), }) # progress.hide() return publisher
def _run(dry_run, entity_type, selected_ids, **kwargs): sgfs = SGFS() entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids]) heirarchy = sgfs.session.fetch_heirarchy(entities) sgfs.session.fetch_core(heirarchy) commands = sgfs.create_structure(entities, dry_run=dry_run) notify( title='Preview Folders' if dry_run else 'Creating Folders', message='\n'.join(commands) or 'Everything is up to date.', )
def open_workspace(): path = workspace_path() sgfs = SGFS() entities = sgfs.entities_from_path(workspace_path(), ['Task', 'PublishEvent']) if entities: path = sgfs.path_for_entity(entities[0]) or path if sys.platform == 'darwin': call(['open', path]) else: call(['xdg-open', path])
def generic_version_from_publish(publish, sgfs=None): """Get the generic fields for a Version that is derived from a Publish. Only the fields that would be shared by multiple Versions derived from the same Publish. """ publish.fetch(GENERIC_FIELDS) fields = { 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_publish': publish, 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. } # Look up Maya frame information from the tag. sgfs = sgfs or SGFS(session=publish.session) publish_path = sgfs.path_for_entity(publish) tags = sgfs.get_directory_entity_tags(publish_path) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) return fields
class Importer(object): def __init__(self): self.sgfs = SGFS() @property def workspace(self): return os.getcwd() @property def existing_publish(self): path = self.existing_path if path is None: return entities = self.sgfs.entities_for_path(path, 'PublishEvent') if len(entities) > 1: raise RuntimeError('multiple publishes tagged in %r' % path) return entities[0] if entities else None @property def existing_path(self): # For the UI to repopulate. return None def import_publish(self, publish, **kwargs): """Passthrough to the :meth:`.import_`.""" path, directory = publish.fetch(('sg_path', 'sg_directory')) return self.import_(path or directory, **kwargs) def import_(self, path, **kwargs): raise NotImplementedError()
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() for id_ in selected_ids: entity = sgfs.session.merge({'type': entity_type, 'id': id_}) task, code, version, frames_path, movie_path = entity.fetch( ('sg_link', 'code', 'sg_version', 'sg_path_to_frames', 'sg_path_to_movie')) # Can't promote it without a movie. if not (frames_path or movie_path): notify('Version "%s_v%04d" does not have frames or a movie' % (code, version), sticky=True) continue # Make sure it doesn't already exist. existing = sgfs.session.find('Version', [ ('sg_task', 'is', task), ('code', 'is', '%s_v%04d' % (code, version)), ]) if existing: notify('Version "%s_v%04d" already exists' % (code, version), sticky=True) continue versions.promote_publish(entity) notify('Promoted to version "%s_v%04d"' % (entity['code'], entity['sg_version']))
class Importer(object): def __init__(self): self.sgfs = SGFS() @property def workspace(self): return os.getcwd() @property def existing_publish(self): path = self.existing_path if path is None: return entities = self.sgfs.entities_for_path(path, "PublishEvent") if len(entities) > 1: raise RuntimeError("multiple publishes tagged in %r" % path) return entities[0] if entities else None @property def existing_path(self): # For the UI to repopulate. return None def import_publish(self, publish, **kwargs): """Passthrough to the :meth:`.import_`.""" path, directory = publish.fetch(("sg_path", "sg_directory")) return self.import_(path or directory, **kwargs) def import_(self, path, **kwargs): raise NotImplementedError()
def run(): warnings = [] sgfs = SGFS() available = discover_caches(sgfs=sgfs) print() todo = pick_todo(available, sgfs=sgfs, warnings=warnings) print() for x in sorted(todo): try: merge(*x) except Exception as e: warnings.append(e) print() if warnings: print("{} WARNINGS:".format(len(warnings))) for i, x in enumerate(warnings): print(" #{}: {}".format(i + 1, x)) print() pm.warning("{} warnings while building scene. Open script editor for details.".format(len(warnings)))
def __init__(self): sg = Shotgun() self.sg = self.fix = fix = Fixture(sg) proj = fix.Project('Example Project') seqs = [proj.Sequence(code, project=proj) for code in ('AA', 'BB')] shots = [ seq.Shot('%s_%03d' % (seq['code'], i), project=proj) for seq in seqs for i in range(1, 3) ] steps = [ fix.find_or_create('Step', code=code, short_name=code) for code in ('Anm', 'Comp', 'Model') ] tasks = [ shot.Task(step['code'] + ' something', step=step, entity=shot, project=proj) for step in steps for shot in shots ] self.proj = minimal(proj) self.seqs = map(minimal, seqs) self.shots = map(minimal, shots) self.steps = map(minimal, steps) self.tasks = map(minimal, tasks) self.session = Session(self.sg) self.sgfs = SGFS(root=sandbox, session=self.session)
def update(self, entity): # TODO: Do this async. # We're also priming the sg_default_reference_namespace (assuming # it exists). by, at, desc, _ = entity.fetch(( 'created_by.HumanUser.name', 'created_at', 'description', 'sg_link.Task.entity.Asset.sg_default_reference_namespace', ), force=True) self._created_by_label.setText(str(by)) self._created_at_label.setText(str(at.strftime('%y-%m-%d %I:%M %p'))) self._description_label.setText(str(desc)) sgfs = SGFS(session=entity.session) path = sgfs.path_for_entity(entity) tags = sgfs.get_directory_entity_tags(path) tags = [t for t in tags if t['entity'] is entity] tag = tags[0] maya_data = tag.get('maya', {}) time_range = '%s - %s' % (maya_data.get('min_time'), maya_data.get('max_time')) self._timeRangeLabel.setText(time_range) if entity not in self._pixmaps: thumbnail_path = tag.get('sgpublish', {}).get('thumbnail') if tags else None thumbnail_path = thumbnail_path or os.path.join( path, '.sgfs.thumbnail.jpg') if os.path.exists(thumbnail_path): pixmap = Q.Pixmap(thumbnail_path) else: path = os.path.abspath( os.path.join(__file__, '..', '..', '..', '..', 'art', 'no-thumbnail.png')) pixmap = Q.Pixmap(path) self._pixmaps[entity] = pixmap.scaledToWidth( 165, Q.SmoothTransformation) self._thumbnail.setPixmap(self._pixmaps[entity]) self._thumbnail.setFixedSize(self._pixmaps[entity].size())
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() entity = sgfs.session.merge(dict(type=entity_type, id=selected_ids[0])) path = sgfs.path_for_entity(entity) if not path: notify('No folders for %s %s' % (entity['type'], entity['id'])) return print entity, '->', repr(path) # Signal to maya what entity this is. env = dict(os.environ) env['SGFS_ENTITY_TYPE'] = entity['type'] env['SGFS_ENTITY_ID'] = str(entity['id']) call(['maya_launcher'], cwd=path, env=env)
def open_parent_in_shotgun(): """Shelf tool to open Asset or Shot for current workspace.""" entities = SGFS().entities_from_path(workspace_path(), ['Asset', 'Shot']) if not entities: cmds.error('No entities for workspace.') return if sys.platform == 'darwin': call(['open', entities[0].url]) else: call(['xdg-open', entities[0].url])
def __init__(self, exporter): super(Widget, self).__init__() self._exporter = exporter self._existing_streams = set() basename = os.path.basename(exporter.filename_hint) basename = os.path.splitext(basename)[0] basename = re.sub(r'[^\w-]+', '_', basename) basename = re.sub(r'_*[rv]\d+', '', basename) basename = _strip_seps(basename) # TODO: Strip entity_name and step_name with SGFS scene_name functions. if self._exporter.workspace: sgfs = SGFS() tasks = sgfs.entities_from_path(self._exporter.workspace, ['Task']) if tasks: task = tasks[0] task_name, entity, step = task.fetch(('content', 'entity', 'step')) if entity: basename = _strip_prefix(basename, entity.fetch('code')) if step: basename = _strip_prefix(basename, step.fetch('short_name')) # Default to something reasonable. basename = ( basename or task_name or (step.get('short_name') if step else None) or (entity.get('code') if entity else None) or '' ) self._basename = basename self._setup_ui() # First screenshot. self.take_full_screenshot()
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opening:\n' + '\n'.join(sorted(paths))) for path in set(paths): if sys.platform.startswith('darwin'): call(['open', path]) else: call(['xdg-open', path])
def run(entity_type, selected_ids, **kwargs): sgfs = SGFS() paths = [] for id_ in selected_ids: entity = sgfs.session.merge(dict(type=entity_type, id=id_)) path = sgfs.path_for_entity(entity) if path: print entity, '->', repr(path) paths.append(path) if not paths: notify('No paths for %s %s' % (entity_type, selected_ids)) return notify('Opened ' + ', '.join(sorted(paths))) for path in set(paths): if sys.platform.startswith('darwin'): call(['open', path]) else: call(['xdg-open', path])
def check_paths(paths, only_published=True): sgfs = SGFS() res = [] for path in paths: publishes = sgfs.entities_from_path(path, 'PublishEvent') if only_published and not publishes: continue publish = publishes[0] if publishes else None if publish: siblings = sgfs.session.find('PublishEvent', [ ('sg_link', 'is', publish['sg_link']), ('code', 'is', publish['code']), ('sg_type', 'is', publish['sg_type']), ], ['sg_path']) siblings.sort(key=lambda x: x['sg_version']) latest = max(siblings, key=lambda pub: pub['sg_version']) else: siblings = [] latest = None res.append( ReferenceStatus( path=path, used=publish, latest=latest, is_latest=publish is latest if publish else False, all=siblings, )) return res
def __init__(self, root_state=None, sgfs=None, shotgun=None, session=None): super(Model, self).__init__() self._root_state = root_state or {} self._root = None self.sgfs = sgfs or SGFS(shotgun=shotgun, session=session) # Force a more reasonable timeout. Note that this does change the # global parameters on the shotgun object. self.sgfs.session.shotgun.close() self.sgfs.session.shotgun.config.timeout_secs = 5 self.sgfs.session.shotgun.config.max_rpc_attempts = 1 self.threadpool = ThreadPool(8, lifo=True) self._node_types = []
def extract_publisher_kwargs(args, sgfs=None, delete=True): kwargs = {} for key, value in args.__dict__.items(): if key.startswith('publisher_'): if value is not None: kwargs[key[10:]] = value if delete: delattr(args, key) sgfs = sgfs or SGFS() if 'link' in kwargs: kwargs['link'] = parse_spec(sgfs, kwargs['link']) if 'template' in kwargs: kwargs['template'] = parse_spec(sgfs, kwargs['template'], entity_types=['PublishEvent']) return kwargs
def run(entity_type, selected_ids, **kwargs): app = QtGui.QApplication([]) QtGui.QApplication.setWindowIcon(ui_utils.icon('fatcow/brick_go', as_icon=True)) sgfs = SGFS() publish = sgfs.session.merge({'type': entity_type, 'id': selected_ids[0]}) task, type_, _, _ = publish.fetch(('sg_link', 'sg_type', 'sg_path', 'sg_version'), force=True) if type_ != 'maya_scene': QtGui.QMessageBox.critical(None, 'Unknown Publish Type', 'Cannot process publishes of type %r.' % type_) exit(1) task['step'].fetch_core() # For the picker. dialog = Dialog(publish) dialog.show() dialog.raise_() exit(app.exec_())
def _populate_references(self): sgfs = SGFS() reference_statuses = check.check_paths(cmds.file(q=True, reference=True), only_published=False) for reference in reference_statuses: item = ReferenceItem(sgfs, reference) self._tree.addTopLevelItem(item) item.attach_to_tree(self._tree) geocaches = geocache_utils.get_existing_cache_mappings().keys() geocache_statuses = check.check_paths(geocaches, only_published=True) for geocache in geocache_statuses: item = GeocacheItem(sgfs, geocache) self._tree.addTopLevelItem(item) item.attach_to_tree(self._tree) for i in range(7): self._tree.resizeColumnToContents(i) self._tree.setColumnWidth(i, self._tree.columnWidth(i) + 10)
def __init__(self, root_state=None, sgfs=None, shotgun=None, session=None): super(Model, self).__init__() self._root_state = root_state or {} self._root = None self.sgfs = sgfs or SGFS(shotgun=shotgun, session=session) # Force a more reasonable timeout. Note that this does change the # global parameters on the shotgun object. self.sgfs.session.shotgun.close() self.sgfs.session.shotgun.config.timeout_secs = 5 self.sgfs.session.shotgun.config.max_rpc_attempts = 1 self.threadpool = ThreadPool(8, lifo=True) self._node_types = [] # See comment above. for name in 'layoutChanged', 'layoutAboutToBeChanged': old = getattr(self, name) new = getattr(self, '_legacy_' + name) setattr(self, name, new) new.connect(old.emit)
def _main(): import sgfs.commands.utils as command_utils optparser = optparse.OptionParser() optparser.add_option('-c', '--combobox', action="store_true", dest="combobox") optparser.add_option('-r', '--root', dest='root') opts, args = optparser.parse_args() global model, view, dialog sgfs = SGFS() if opts.root: root = command_utils.parse_spec(sgfs, opts.root.split()) model = Model(state_from_entity( sgfs.session.get(root['type'], root['id'])), sgfs=sgfs) else: model = Model(sgfs=sgfs) # entity = model.sgfs.session.get('Task', 43897) # entities = [] # while entity: # entities.append(entity) # entity = entity.parent() # print 'ENTITY', entities # model.register_node_type(functools.partial(ShotgunEntities, entities=entities)) # model.register_node_type(SGFSRoots) model.register_node_type(functools.partial(ShotgunPublishStream)) # model.register_node_type(functools.partial(ShotgunQuery, entity_types=('EventLogEntry', 'ActionMenuItem', 'Step', 'PublishEvent', 'Asset', 'Sequence', 'Shot', 'Task', 'Version', 'Tool', 'Ticket', 'Project', 'HumanUser'))) model.register_node_type( functools.partial(ShotgunQuery, entity_types=('Asset', 'Sequence', 'Shot', 'Project', 'Task'))) model.register_node_type( functools.partial(TemplateGlobPicker, entity_types=('Task', ), template='nuke_scripts_dir', glob='*.nk')) # model.register_node_type(functools.partial(DirectoryPicker, entity_types=('Task', ),)) if opts.combobox: view = ComboBoxView() else: view = ColumnView() view.setModel(model) type_ = None id_ = None if args: init = command_utils.parse_spec(model.sgfs, args) type_ = init.get('type') id_ = init.get('id') print type_, id_ if type_ and id_: entity = model.sgfs.session.get(type_, id_) init_state = state_from_entity(entity) index = model.index_from_state(init_state) if index: view.setCurrentIndex(index) else: print 'Could not get index for initial state.' view.setMinimumWidth(800) if not opts.combobox: view.setMaximumHeight(400) view.setPreviewVisible(False) view.setColumnWidths([1] * 10) dialog = QtGui.QDialog() dialog.setWindowTitle(sys.argv[0]) dialog.setLayout(QtGui.QHBoxLayout()) dialog.layout().addWidget(view) dialog.layout().addStretch() dialog.show() dialog.raise_()
import re import difflib import sys from maya import cmds, mel from sgfs import SGFS from sgfs.ui import product_select from uitools.qt import Qt, QtCore, QtGui import sgfs.ui.scene_name.widget as scene_name import sgpublish.check.maya from . import utils from ..attributes import copy_attributes sgfs = SGFS() def silk(name): # __file__ = "$KS_TOOLS/key_base/3d/maya/python/geocache/importer.py" # icons = "$KS_TOOLS/key_base/art/icons" icons = os.path.abspath( os.path.join(__file__, '..', '..', 'art', 'icons', 'silk')) return os.path.join(icons, name + '.png') def silk_icon(name, size=16): icon = QtGui.QIcon(silk(name)) if size != 16: icon = QtGui.QIcon(icon.pixmap(size, size)) return icon
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) # Create/update the version. version = kwargs.pop('version_entity', None) fields.update(kwargs) if version is not None: sgfs.session.update('Version', version['id'], fields) else: fields['created_by'] = publish['created_by'] version = sgfs.session.create('Version', fields) futures = [] with ThreadPoolExecutor(4) as executor: # Share thumbnails. futures.append( executor.submit( sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, )) # Set the status/version on the task. futures.append( executor.submit( sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, )) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): futures.append( executor.submit( sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, )) # Allow them to raise if they must. for future in futures: future.result() return version
def submit(self): scene_path = cmds.file(q=True, sceneName=True) scene_name = os.path.splitext(os.path.basename(scene_path))[0] if self.location_method == 'publish': sgfs = SGFS() tasks = sgfs.entities_from_path(scene_path, ['Task']) if not tasks: raise ValueError("Scene is not saved under a Shotgun Task.") task = tasks[0] # TODO: Set a status. # TODO: Pull code, link, description, etc, from user. # TODO: Add metadata about the layers rendered. with sgpublish.Publisher( link=task, type='maya_render', name='Render', lock_permissions=False, ) as publisher: self.output_directory = publisher.directory maya_version = cmds.about(version=True) is_farmsoup = self.driver == 'farmsoup' if is_farmsoup: client = farmsoup.client.Client() group = client.group(name=self.name, ) base_resv = { 'maya{}.install'.format(maya_version): 1, 'maya{}.license'.format(maya_version): 1, } for camera, include_camera in sorted(self.cameras.items()): if not include_camera: continue for layer, include_layer in sorted(self.layers.items()): if not include_layer: continue renderer = self.renderers.get( layer) or self.renderers['masterLayer'] args = [ 'Render', '-V', maya_version, ] reservations = base_resv.copy() if self.reserve_renderer and renderer not in ( # Don't bother reserving the built-in ones. 'mayaSoftware', 'mayaHardware2', ): # These look like "arnold" and "redshift". reservations['maya{}-{}.install'.format( maya_version, renderer)] = 1 reservations['{}.install'.format(renderer)] = 1 reservations['{}.license'.format(renderer)] = 1 if renderer == 'redshift': args.extend(( '-r', 'redshift', # This must not be escaped! '-gpu', '{$FARMSOUP_RESERVED_GPUS_TOKENS}')) reservations['cpus'] = 1 reservations['gpus'] = 1 else: args.extend(( # Redshift doesn't understand -fnc. # Nothing really depends on this, so it isn't a big deal. '-fnc', 'name.#.ext', )) args.extend(( '-s', '$F' if is_farmsoup else str(self.start_frame), '-e', '$F_end' if is_farmsoup else str(self.end_frame), '-x', str(int(self.width)), '-y', str(int(self.height)), '-pad', '4', )) if self.skip_existing: args.extend(('-skipExistingFrames', 'true')) # We need to ask Maya to do the templating for us, because # otherwise it will decide that because there are multiple # render layers that there will be a naming collision, and so # it automatically adds directories for us. template_name = self.filename_pattern.format( scene=scene_name, layer='<RenderLayer>', camera='<Camera>', ) display_name = self.filename_pattern.format( scene=scene_name, layer=layer, camera=camera, ).replace(':', '_') args.extend(( '-cam', camera, '-rl', layer, # We're only escaping the ones that we need to, because # Redshift relies upon the envvars to pick GPUs at # render time. '-rd', sh_quote(self.output_directory), '-im', sh_quote(template_name), sh_quote(scene_path))) command = ' '.join(args) if is_farmsoup: job = group.job( name=display_name, reservations=reservations, ).setup_as_subprocess(command, shell=True) job.expand_via_range('F={}-{}/{}'.format( self.start_frame, self.end_frame, self.frame_chunk)) else: print ' '.join(args) # TODO: Add a job to set the Shotgun status on each once they are done. if is_farmsoup: client.submit(group) return group
def __init__(self, link=None, type=None, name=None, version=None, parent=None, directory=None, sgfs=None, template=None, **kwargs ): if not sgfs: if isinstance(template, Entity): sgfs = SGFS(session=template.session) elif isinstance(link, Entity): sgfs = SGFS(session=link.session) else: sgfs = SGFS() self.sgfs = sgfs if template: template = sgfs.session.merge(template) to_fetch = ['sg_link', 'sg_type', 'code', 'sg_version'] to_fetch.extend(_kwarg_to_field.itervalues()) template.fetch(to_fetch) tpl_link, tpl_type, tpl_name, tpl_version = template.get(('sg_link', 'sg_type', 'code', 'sg_version')) link = link or tpl_link type = type or tpl_type name = name or tpl_name version = version or tpl_version kwargs.setdefault('source_publish', template) kwargs.setdefault('source_publishes', [template]) for key, field in _kwarg_to_field.iteritems(): kwargs.setdefault(key, template.get(field)) if not kwargs.get('thumbnail_path'): # We certainly jump through a lot of hoops to do this... # Perhaps this should be sgfs.get_entity_tags(entity) publish_path = sgfs.path_for_entity(template) if publish_path: tags = sgfs.get_directory_entity_tags(publish_path) tags = [tag for tag in tags if tag['entity'] == template] if tags: meta = tags[0].get('sgpublish', {}) thumbnail = meta.get('thumbnail') if thumbnail: kwargs['thumbnail_path'] = os.path.join(publish_path, thumbnail) if not (link and type and name): raise ValueError('requires link, type, and name') self._type = str(type) self._link = self.sgfs.session.merge(link) self._name = str(name) self._parent = parent if re.search(r'[^\w-]', self._name): raise ValueError('name cannot have spaces or special characters', self._name) # Get information about the promotion for review. self._review_version_entity = None self._review_version_fields = kwargs.pop('review_version_fields', None) # To only allow us to commit once. self._committed = False # Will be set into the tag. self.metadata = {} # Files to copy on commit; (src_path, dst_path) self._files = [] # Set attributes from kwargs. for name in ( 'created_by', 'description', 'frames_path', 'movie_path', 'movie_url', 'path', 'source_publish', 'source_publishes', 'thumbnail_path', 'trigger_event', 'extra_fields', ): setattr(self, name, kwargs.pop(name, None)) if kwargs: raise TypeError('too many kwargs: %r' % sorted(kwargs)) # Required for normalizing. self._directory = None # Get everything into the right type before sending it to Shotgun. self._normalize_attributes() # Prep for async processes. We can do a lot of "frivolous" Shotgun # queries at the same time since we must do at least one. executor = concurrent.futures.ThreadPoolExecutor(8) futures = [] # Figure out the version number (async). if version is None: futures.append(executor.submit(self._set_automatic_version)) else: self._version = int(version) # Grab all data on the link (assuming that is all that is used when # creating publish templates). futures.append(executor.submit(self.link.fetch_core)) # Create the review version stub (async). if self._review_version_fields is not None: futures.append(executor.submit(self._get_review_version)) # First stage of the publish: create an "empty" PublishEvent. initial_data = { 'code': self.name, 'created_by': self.created_by, 'description': self.description, 'project': self.link.project(), 'sg_link': self.link, 'sg_path_to_frames': self.frames_path, 'sg_path_to_movie': self.movie_path, 'sg_qt': self.movie_url, 'sg_source_publish': self.source_publish or None, # singular 'sg_source_publishes': self.source_publishes or [], # multiple 'sg_trigger_event_id': self.trigger_event['id'] if self.trigger_event else None, 'sg_type': self.type, 'sg_version': 0, # Signifies that this is "empty". } initial_data.update(self.extra_fields) try: self.entity = self.sgfs.session.create('PublishEvent', initial_data) except ShotgunFault: if not self.link.exists(): raise RuntimeError('%s %d (%r) has been retired' % (link['type'], link['id'], link.get('name'))) else: raise # Lets have our async processes catch up. for future in futures: future.result() # Manually forced directory. if directory is not None: self._directory_supplied = True # Make it if it doesn't already exist, but don't care if it does. self._directory = os.path.abspath(directory) else: self._directory_supplied = False # Find a unique name using the template result as a base. base_path = self.sgfs.path_from_template(link, '%s_publish' % type, dict( publish=self, # For b/c. publisher=self, PublishEvent=self.entity, self=self.entity, # To mimick Shotgun templates. )) unique_iter = ('%s_%d' % (base_path, i) for i in itertools.count(1)) for path in itertools.chain([base_path], unique_iter): try: os.makedirs(path) except OSError as e: if e.errno != 17: # File exists raise else: self._directory = path break # Make the directory so that tools which want to manually copy files # don't have to. utils.makedirs(self._directory) # If the directory is tagged with existing entities, then we cannot # proceed. This allows one to retire a publish and then overwrite it. tags = self.sgfs.get_directory_entity_tags(self._directory) if any(tag['entity'].exists() for tag in tags): raise ValueError('directory is already tagged: %r' % self._directory)
def SGFS(self, **kwargs): kwargs.setdefault('root', self.sandbox) kwargs.setdefault('session', self.session) return SGFS(**kwargs)
def promote_publish(publish, **kwargs): publish.fetch(( 'code', 'sg_version', 'created_by', 'description', 'sg_link', 'sg_link.Task.entity', 'sg_path_to_frames', 'sg_path_to_movie', 'sg_qt', 'project', )) fields = { 'code': '%s_v%04d' % (publish['code'], publish['sg_version']), 'created_by': publish['created_by'], 'description': publish['description'], 'entity': publish['sg_link']['entity'], 'project': publish['project'], 'sg_path_to_frames': publish['sg_path_to_frames'], 'sg_path_to_movie': publish['sg_path_to_movie'], 'sg_publish': publish, 'sg_qt': publish['sg_qt'], 'sg_task': publish['sg_link'], 'user': publish['created_by'], # Artist. # Just because the old "Submit Version" tool had these. 'sg_frames_aspect_ratio': 1.0, 'sg_movie_aspect_ratio': 1.0, # I should be able to do this as a very deep fetch. 'sg_department': publish['sg_link']['step'].fetch('code') or 'Daily', } # Look up Maya frame information from the tag. sgfs = SGFS(session=publish.session) tags = sgfs.get_directory_entity_tags(sgfs.path_for_entity(publish)) if tags and 'maya' in tags[0]: min_time = tags[0]['maya']['min_time'] max_time = tags[0]['maya']['max_time'] fields.update({ 'sg_first_frame': int(min_time), 'sg_last_frame': int(max_time), 'frame_count': int(max_time - min_time + 1), }) fields.update(kwargs) # Create the new version. version = sgfs.session.create('Version', fields) with ThreadPoolExecutor(4) as executor: # Share thumbnails. executor.submit(sgfs.session.share_thumbnail, entities=[version.minimal], source_entity=publish.minimal, ) # Set the status/version on the task. executor.submit(sgfs.session.update, 'Task', publish['sg_link']['id'], { 'sg_status_list': 'rev', 'sg_latest_version': version, }, ) # Set the latest version on the entity. entity = publish['sg_link'].fetch('entity') if entity['type'] in ('Asset', 'Shot'): executor.submit(sgfs.session.update, entity['type'], entity['id'], {'sg_latest_version': version}, ) return version
def __init__(self): self.sgfs = SGFS()
def __init__(self, publish, sgfs=None): super(Dialog, self).__init__() self._sgfs = sgfs or SGFS(session=publish.session) self._publish = publish self._setup_ui()