def callback(event): sg = Session() version = sg.merge(event.entity) if event.event_type == 'Shotgun_Version_New': shot_name, project = version.fetch(('entity.Shot.name', 'project')) sg.update('Shot', version['entity']['id'], dict(sg_latest_version=version)) m = re.match(r'([A-Z]{2})\d+', shot_name) if not m: print 'Shot name does not match specs.' return seq_code = m.group(1) update_playlist(sg, project, seq_code) elif event.event_type == 'Shotgun_Version_Change': shot = version.fetch('entity') sg.share_thumbnail([shot], source_entity=version, filmstrip_thumbnail=event['attribute_name'] == 'filmstrip_image')
def callback(event): # Must be setting it to a non-zero version. # NOTE: We MUST check the meta for this, otherwise we are liable to # schedule this job multiple times as the `entity` field is always # up to date. version = event.meta.get('new_value') if not version: log.info('Publish is still being created; skipping') return sg = Session(connect()) entity = sg.merge(event)['entity'] if not entity: log.info('Publish appeares to have been retired; skipping') return entity.fetch(('sg_link', 'sg_link.Task.step.Step.short_name', 'sg_type')) # For now, we only run for the Testing Sandbox. #if event['project']['id'] != 66: # log.info('Project %r in not Testing Sandbox; skipping' % (event['project'].get('name') or event['project']['id'])) # return # Our first job, is to create camera and geocache publishes from generic maya scenes. pub_type = entity.get('sg_type') if pub_type != 'maya_scene': log.info('sg_type %r is not maya_scene; skipping' % pub_type) return step_code = entity.get('sg_link.Task.step.Step.short_name') if step_code not in ('Anim', 'Roto', 'Rotomation'): log.info('sg_link.step.short_code %s is not Anim or Roto; skipping' % step_code) return # TODO: Make sure they don't already exist. log.info('Delegating to sgactions') call_in_subprocess('%s:republish' % __file__, [entity['id']])
def callback(event): # Must be setting it to a non-zero version. # NOTE: We MUST check the meta for this, otherwise we are liable to # schedule this job multiple times as the `entity` field is always # up to date. version = event.meta.get('new_value') if not version: log.info('Publish is still being created; skipping') return sg = Session(connect()) entity = sg.merge(event)['entity'] if not entity: log.info('Publish appeares to have been retired; skipping') return entity.fetch(('sg_link', 'sg_link.Task.step.Step.short_name', 'sg_type')) # For now, we only run for the Testing Sandbox. #if event['project']['id'] != 66: # log.info('Project %r in not Testing Sandbox; skipping' % (event['project'].get('name') or event['project']['id'])) # return # Our first job, is to create camera and geocache publishes from generic maya scenes. pub_type = entity.get('sg_type') if pub_type != 'maya_scene': log.info('sg_type %r is not maya_scene; skipping' % pub_type) return step_code = entity.get('sg_link.Task.step.Step.short_name') if step_code not in ('Anim', 'Roto', 'Rotomation'): log.info('sg_link.step.short_code %s is not Anim or Roto; skipping' % step_code) return # TODO: Make sure they don't already exist. log.info('Delegating to sgactions') call_in_subprocess('%s:republish' % __file__, [entity['id']])
def handle_event(self, event): # Must be setting it to a non-zero version. # NOTE: We MUST check the meta for this, otherwise we are liable to # schedule this job multiple times as the `entity` field is always # up to date. version = event.meta.get('new_value') if not version: self.log.debug('Publish is still being created; skipping') return # Make a clean one every time so that we don't slowly fill up memory. sg = Session() publish = sg.merge(event)['entity'] if not publish: self.log.warning('Publish appears to have been deleted; skipping') return _, login, step_code, step_name, publish_type = publish.fetch(( 'code', 'created_by.HumanUser.login', 'sg_link.Task.step.Step.code', 'sg_link.Task.step.Step.short_name', 'sg_type', )) steps = set((step_code.title(), step_name.title())) related = None for src_types, dst_types, src_steps, func, args, kwargs in self._funcs: # Make sure it is the right type. if publish_type not in src_types: self.log.debug('sg_type %r is not %s; skipping' % (publish_type, '/'.join(sorted(src_types)))) continue # Make sure it is from the correct step. # We've title-cased all step names at this point, and are comparing # against both the step code and name, so this should be forgiving. if src_steps and not src_steps.intersection(steps): self.log.debug('step %s is not %s; skipping' % ('/'.join(sorted(steps)), '/'.join(sorted(src_steps)))) continue # Make sure we haven't already derived it, or are in progress of # deriving it. if related is None: related = get_related_publishes(publish, fields=['code', 'sg_type']) skip = False for x in related: if x['sg_type'] in dst_types: self.log.warning('Derived %s publish %d "%s", already exists; skipping' % ( x['sg_type'], x['id'], x['code'], )) skip = True if skip: continue # If it is a string, dispatch it to Qube. if isinstance(func, basestring): # Run it as the correct user; assume their Shotgun login matches. login = publish.get('created_by.HumanUser.login') user = login.split('@')[0] if login else None qube_args = [publish.minimal] qube_args.extend(args or ()) qube_name = 'Republish %s %s "%s" as %s' % ( publish['sg_type'], publish['id'], publish['code'], '/'.join(sorted(dst_types)) ) import qbfutures future = qbfutures.submit_ext(func, args=qube_args, kwargs=kwargs or {}, name=qube_name, user=user, priority=8000, ) self.log.info('Qube job %d: %s' % (future.job_id, qube_name)) else: func(publish, *(args or ()), **(kwargs or {})) # Only run the first one! return