def launch(self, event): '''Callback method for custom action.''' selection = event['data'].get('selection', []) ####################################################################### job = ftrack.createJob(description="Creating Folders", status="running") try: ftrack.EVENT_HUB.publishReply(event, data={ 'success': True, 'message': 'Folder Creation Job Started!' }) for entity in selection: if entity['entityType'] == 'task': entity = ftrack.Task(entity['entityId']) else: entity = ftrack.Project(entity['entityId']) self.createFoldersFromEntity(entity) # inform the user that the job is done job.setStatus('done') except: job.setStatus('failed') raise ####################################################################### return {'success': True, 'message': 'Created Folders Successfully!'}
def createNotes(self, selection, text, category): entityCount = len(selection) logging.info('Creating notes on {0} entities'.format(entityCount)) job = ftrack.createJob( 'Creating notes ({0} of {1})'.format(1, entityCount), 'running' ) try: for index, item in enumerate(selection, start=1): entityType = item['entityType'] entityId = item['entityId'] entity = None if index != 1: job.setDescription('Creating notes ({0} of {1})'.format(index, entityCount)) if entityType == 'show': entity = ftrack.Project(entityId) elif entityType == 'task': entity = ftrack.Task(entityId) elif entityType == 'assetversion': entity = ftrack.AssetVersion(entityId) if not entity: logging.warning( u'Entity ({0}, {1}) not a valid type, skipping..' .format(entityId, entityType) ) entity.createNote(text, category) except Exception: job.setStatus('failed') logging.info('Note creation completed.') job.setStatus('done')
def create_job(event): values = event['data']['values'] job = ftrack.createJob('Generating Titles', 'queued', ftrack.User(id=event['source']['user']['id'])) job.setStatus('running') image_magick_dir = r'K:\development\tools\image-magick' errors = '' # Generateting sources and destinations for item in event['data']['selection']: try: entity = ftrack.AssetVersion(item['entityId']) # adding path to errors path = '' parents = entity.getParents() parents.reverse() for p in parents: path += p.getName() + '/' path += 'v' + str(entity.getVersion()).zfill(3) input_file = entity.getComponent().getFilesystemPath() output_file = ntpath.basename(input_file) # get version string version_string = '.v' + version_get(input_file, 'v')[1] if values['strip_version'] == 'True': output_file = output_file.replace(version_string, '') # get titles text south_west_text = 'Task: "%s"' % path[:-5] south_east_text = 'Version: "%s"' % version_string[1:] north_west_text = 'Status: "%s"' % entity.getStatus().getName() if entity.getAsset().getType().getShort() == 'img': src = os.listdir(os.path.dirname(input_file))[0] input_file = os.path.join(os.path.dirname(input_file), src) output_file = re.sub(r'.%04d', '', output_file) output_file = os.path.splitext(output_file)[0] + '.png' output_file = os.path.join(values['output_directory'], output_file) generate_title(image_magick_dir, output_file, input_file, '25', south_west_text, south_east_text, north_west_text) except: errors += path + '\n' errors += traceback.format_exc() + '\n' # generate error report if errors: temp_txt = os.path.join(values['output_directory'], 'errors.txt') f = open(temp_txt, 'w') f.write(errors) f.close() job.setStatus('done')
def transferComponents(self, selection, sourceLocation, targetLocation, userId=None, ignoreComponentNotInLocation=False, ignoreLocationErrors=False): '''Transfer components in *selection* from *sourceLocation* to *targetLocation*. if *ignoreComponentNotInLocation*, ignore components missing in source location. If *ignoreLocationErrors* is specified, ignore all locations- related errors. Reports progress back to *userId* using a job. ''' job = ftrack.createJob('Transfer components (Gathering...)', 'running', user=userId) try: components = self.getComponentsInLocation(selection, sourceLocation) amount = len(components) self.logger.info('Transferring {0} components'.format(amount)) for index, component in enumerate(components, start=1): self.logger.info('Transferring component ({0} of {1})'.format( index, amount)) job.setDescription('Transfer components ({0} of {1})'.format( index, amount)) try: targetLocation.addComponent(component, manageData=True) except ftrack.ComponentInLocationError: self.logger.info( 'Component ({}) already in target location'.format( component)) except ftrack.ComponentNotInLocationError: if ignoreComponentNotInLocation or ignoreLocationErrors: self.logger.exception( 'Failed to add component to location') else: raise except ftrack.LocationError: if ignoreLocationErrors: self.logger.exception( 'Failed to add component to location') else: raise job.setStatus('done') self.logger.info( 'Transfer complete ({0} components)'.format(amount)) except Exception: self.logger.exception('Transfer failed') job.setStatus('failed')
def buildLightingScene(mayapy, task, folder, user, filename, shotName): job = ftrack.createJob('Building Lighting scene file for shot ' '{0}'.format(shotName), 'queued', user) job.setStatus('running') cmd = '%s /home/natasha/dev/LVFX-pipeline/scripts/lt_build_scene.py ' \ '-taskid %s -taskDir %s' % (mayapy, task, folder) process = subprocess.Popen(cmd, stdout=subprocess.PIPE,stderr=subprocess.PIPE, shell=True) process.wait() if os.path.exists(filename): job.setStatus('done') else: job.setStatus('failed')
def create_job(self, event): job = ftrack.createJob("Generating Feedback", "queued", ftrack.User(id=event["source"]["user"]["id"])) job.setStatus("running") try: f = self.generate_feedback(event) job.createAttachment(f, fileName=f) os.remove(f) except: self.logger.error(traceback.format_exc()) job.setStatus("failed") else: job.setStatus("done")
def create_job(event): job = ftrack.createJob("Create Structure", "queued", ftrack.User(id=event["source"]["user"]["id"])) job.setStatus("running") try: for item in event["data"]["selection"]: # Geting any object types entity_id = item["entityId"] entity = session.get("TypedContext", entity_id) if not entity: entity_type = item["entityType"].lower() if entity_type == "show": entity = session.get("Project", entity_id) if entity_type == "assetversion": entity = session.get("AssetVersion", entity_id) if entity_type == "component": entity = session.get("Component", entity_id) templates = ftrack_template.discover_templates() valid_templates = ftrack_template.format( {}, templates, entity, return_mode="all" ) print "Creating Directories/Files:" for path, template in valid_templates: if template.isfile: if not os.path.exists(os.path.dirname(path)): print os.path.dirname(path) os.makedirs(os.path.dirname(path)) if not os.path.exists(path): print path shutil.copy(template.source, path) else: if not os.path.exists(path): print path os.makedirs(path) except: print traceback.format_exc() job.setStatus("failed") else: job.setStatus("done")
def launch(self, event): '''Callback method for action.''' selection = event['data'].get('selection', []) self.logger.info(u'Launching action with selection {0}'.format(selection)) job = ftrack.createJob(description="Push thumbnails to parents", status="running") try: ftrack.EVENT_HUB.publishReply( event, data={ 'success': True, 'message': 'Created job for updating thumbnails!' } ) for entity in selection: if entity['entityType'] == 'assetversion': entity = ftrack.AssetVersion(entity['entityId']) try: parent = entity.getTask() except: parent = entity.getParent().getParent() elif entity['entityType'] == 'task': entity = ftrack.Task(entity['entityId']) parent = entity.getParent() thumbid = entity.get('thumbid') if thumbid: parent.set('thumbid', value=thumbid) # inform the user that the job is done job.setStatus('done') except: # fail the job if something goes wrong job.setStatus('failed') raise return { 'success': True, 'message': 'Created job for updating thumbnails!' }
def run(self): path = '' for p in reversed(self.version.getParents()[:-2]): path += p.getName() + '/' path += 'v' + str(self.version.getVersion()).zfill(3) job = ftrack.createJob('Uploading %s' % path, 'running', self.user) path = self.version.getComponent().getFilesystemPath() self.delete_file('ftrackreview-mp4') self.delete_component('ftrackreview-mp4') self.delete_file('ftrackreview-webm') self.delete_component('ftrackreview-webm') try: component = self.version.createComponent( 'ftrackreview-mp4', path=path, location=ftrack.Location('ftrack.server')) # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. shot = self.version.getAsset().getParent() meta_data = json.dumps({ 'frameIn': int(shot.get('fstart')), 'frameOut': int(shot.get('fend')), 'frameRate': int(shot.get('fps')) }) component.setMeta(key='ftr_meta', value=meta_data) print 'uploaded: %s' % path except: job.setStatus('failed') print traceback.format_exc() else: job.setStatus('done') finally: self.version.publish()
def upload_files_as_thumbnails(self, files): '''Upload the files as thumbnails.''' job = ftrack.createJob('Creating thumbnails.', 'running') try: for ftrack_path, file_path in files: try: entity = ftrack.getFromPath(ftrack_path) except ftrack.FTrackError: print 'Could not find entity with path "{}"'.format( ftrack_path) continue entity.createThumbnail(file_path) except Exception: # Except anything and fail the job. job.setStatus('failed') job.setDescription('Creating thumbnails failed.') job.setStatus('done') job.setDescription('Creating thumbnails done.')
def create_job(event): job = ftrack.createJob("Version Up Tasks", "queued", ftrack.User(id=event["source"]["user"]["id"])) job.setStatus("running") try: for item in event["data"]["selection"]: task = ftrack.Task(item["entityId"]) asset = task.getParent().createAsset(task.getName(), "scene", task=task) asset.createVersion(taskid=task.getId()) asset.publish() except: job.setStatus("failed") else: job.setStatus("done")
def run(self): path = '' for p in reversed(self.version.getParents()[:-2]): path += p.getName() + '/' path += 'v' + str(self.version.getVersion()).zfill(3) job = ftrack.createJob('Uploading %s' % path, 'running', self.user) path = self.version.getComponent().getFilesystemPath() self.delete_file('ftrackreview-mp4') self.delete_component('ftrackreview-mp4') self.delete_file('ftrackreview-webm') self.delete_component('ftrackreview-webm') try: component = self.version.createComponent( 'ftrackreview-mp4', path=path, location=ftrack.Location('ftrack.server')) # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. shot = self.version.getAsset().getParent() meta_data = json.dumps({'frameIn': int(shot.get('fstart')), 'frameOut': int(shot.get('fend')), 'frameRate': int(shot.get('fps'))}) component.setMeta(key='ftr_meta', value=meta_data) print 'uploaded: %s' % path except: job.setStatus('failed') print traceback.format_exc() else: job.setStatus('done') finally: self.version.publish()
def callback(event): for entity in event['data'].get('entities', []): # Filter non-assetversions if entity.get('entityType') == 'task' and entity['action'] == 'update': if 'statusid' not in entity.get('keys'): return # Find task if it exists task = None try: task = ftrack.Task(id=entity.get('entityId')) except: return new_status = ftrack.Status(entity["changes"]["statusid"]["new"]) # To Pending Changes if new_status.getName().lower() == "pending changes": user = ftrack.User(id=event["source"]["user"]["id"]) job = ftrack.createJob("Version Up Tasks", "queued", user) job.setStatus("running") try: asset = task.getParent().createAsset(task.getName(), "scene", task=task) asset.createVersion(taskid=task.getId()) asset.publish() except: job.setStatus("failed") else: job.setStatus("done")
def launch(self, event): msg = 'Breakdown successfull. Click Job for details.' ftrack.EVENT_HUB.publishReply(event, data={ 'success': True, 'message': msg }) temp_dir = tempfile.gettempdir() file_path = os.path.join(temp_dir, 'ftrack_version_breakdown.txt') with open(file_path, 'w') as f: output = '' v = ftrack.AssetVersion('6dec6756-8f94-11e5-929c-42010af00048') output += '/'.join([ v.getParent().getParent().getName(), v.getParent().getName(), str(v.getVersion()).zfill(3) ]) output += ':\n' count = 0 sessions = [] project_id = v.getParents()[-1].getId() for session in ftrack.getReviewSessions(project_id): for obj in session.getObjects(): if obj.get('version_id') == v.getId(): count += 1 sessions.append(session.get('name')) output += '\tSession Usage:\t{0}\n'.format(count) output += '\tSessions:\t\t{0}\n'.format(list(set(sessions))) f.write(output) user = ftrack.User(id=event['source']['user']['id']) job = ftrack.createJob('Breakdown', 'done', user) job.createAttachment(file_path)
def launch(self, event): msg = "Breakdown successfull. Click Job for details." ftrack.EVENT_HUB.publishReply(event, data={ "success": True, "message": msg }) selection = event["data"]["selection"] temp_dir = tempfile.gettempdir() file_path = os.path.join(temp_dir, "ftrack_version_breakdown.txt") with open(file_path, "w") as f: output = "" v = ftrack.AssetVersion(selection[0]["entityId"]) versions = v.getAsset().getVersions() ids = [] data = { "None": { "name": "Uncategorized", "notes": [], "versions": [] } } for cate in ftrack.getNoteCategories(): data[cate.get("entityId")] = { "name": cate.get("name"), "notes": [], "versions": [] } for v in versions: ids.append(v.getId()) for note in v.getNotes(): if note.get("categoryid"): data[note.get("categoryid")]["notes"].append(note) data[note.get("categoryid")]["versions"].append(v) else: data["None"]["versions"].append(v) data["None"]["notes"].append(note) output += "/".join( [v.getParent().getParent().getName(), v.getParent().getName()]) output += ":\n" count = 0 sessions = [] project_id = v.getParents()[-1].getId() for session in ftrack.getReviewSessions(project_id): for obj in session.getObjects(): if obj.get("version_id") in ids: count += 1 sessions.append(session.get("name")) output += "\tReview Session Usage:\t{0}\n".format(count) output += "\tReview Sessions:\t{0}\n".format(list(set(sessions))) output += "\tNotes:\n" for entry in data: output += "\t\t" + data[entry]["name"] + ":\n" amount = len(data[entry]["notes"]) output += "\t\t\tNotes Amount:{0}\n".format(amount) output += "\t\t\tVersions " amount = len(set(data[entry]["versions"])) output += "Amount:{0}\n".format(amount) for note in data[entry]["notes"]: index = data[entry]["notes"].index(note) version_string = data[entry]["versions"][index] version_string = version_string.getVersion() version_string = "v" + str(version_string).zfill(3) output += "\t\t\t{0}:\n".format(version_string) text = note.getText().replace("\n", " ") output += "\t\t\t\t{0}\n".format(text) f.write(output) user = ftrack.User(id=event["source"]["user"]["id"]) job = ftrack.createJob("Breakdown", "done", user) try: job.createAttachment(file_path) except: self.logger.info(traceback.format_exc())
def create_job(event): user_id = event["source"]["user"]["id"] ftrack_user = ftrack.User(id=user_id) job = ftrack.createJob("Collecting Assets", "queued", ftrack_user) job.setStatus("running") values = event["data"]["values"] errors = "" # collecting sources and destinations for item in event["data"]["selection"]: try: entity = ftrack.AssetVersion(item["entityId"]) # adding path to errors parent_path = "" parents = entity.getParents() parents.reverse() for p in parents: parent_path += p.getName() + "/" parent_number = int(values["parent_number"]) parent_prefix = "" for p in reversed(list(reversed(parents))[:parent_number]): parent_prefix += p.getName() + "." component_name = values["component_name"] try: component = entity.getComponent(name=component_name) except: continue src = get_file_for_component(component) # copying sources to destinations if entity.getAsset().getType().getShort() == "img": dir_name = entity.getParent().getParent().getName() if parent_prefix: dir_name = parent_prefix asset_dir = os.path.join(values["collection_directory"], dir_name) if os.path.exists(asset_dir): # delete existing files shutil.rmtree(asset_dir) os.makedirs(asset_dir) for f in os.listdir(os.path.dirname(src)): path = os.path.join(os.path.dirname(src), f) basename = format_basename(src, values["file_formatting"]) basename = parent_prefix + re.sub(r".%04d", "", basename) dst = os.path.join(asset_dir, basename) shutil.copy(path, dst) else: basename = format_basename(src, values['file_formatting']) basename = parent_prefix + basename dst = os.path.join(values["collection_directory"], basename) shutil.copy(src, dst) except: errors += parent_path + "\n" errors += traceback.format_exc() + "\n" # generate error report if errors: temp_txt = os.path.join(values["collection_directory"], "errors.txt") f = open(temp_txt, "w") f.write(errors) f.close() job.setStatus("done")
def launch(self, event): msg = "Breakdown successfull. Click Job for details." ftrack.EVENT_HUB.publishReply(event, data={"success": True, "message": msg}) selection = event["data"]["selection"] temp_dir = tempfile.gettempdir() file_path = os.path.join(temp_dir, "ftrack_version_breakdown.txt") with open(file_path, "w") as f: output = "" v = ftrack.AssetVersion(selection[0]["entityId"]) versions = v.getAsset().getVersions() ids = [] data = {"None": {"name": "Uncategorized", "notes": [], "versions": []}} for cate in ftrack.getNoteCategories(): data[cate.get("entityId")] = {"name": cate.get("name"), "notes": [], "versions": []} for v in versions: ids.append(v.getId()) for note in v.getNotes(): if note.get("categoryid"): data[note.get("categoryid")]["notes"].append(note) data[note.get("categoryid")]["versions"].append(v) else: data["None"]["versions"].append(v) data["None"]["notes"].append(note) output += "/".join([v.getParent().getParent().getName(), v.getParent().getName()]) output += ":\n" count = 0 sessions = [] project_id = v.getParents()[-1].getId() for session in ftrack.getReviewSessions(project_id): for obj in session.getObjects(): if obj.get("version_id") in ids: count += 1 sessions.append(session.get("name")) output += "\tReview Session Usage:\t{0}\n".format(count) output += "\tReview Sessions:\t{0}\n".format(list(set(sessions))) output += "\tNotes:\n" for entry in data: output += "\t\t" + data[entry]["name"] + ":\n" amount = len(data[entry]["notes"]) output += "\t\t\tNotes Amount:{0}\n".format(amount) output += "\t\t\tVersions " amount = len(set(data[entry]["versions"])) output += "Amount:{0}\n".format(amount) for note in data[entry]["notes"]: index = data[entry]["notes"].index(note) version_string = data[entry]["versions"][index] version_string = version_string.getVersion() version_string = "v" + str(version_string).zfill(3) output += "\t\t\t{0}:\n".format(version_string) text = note.getText().replace("\n", " ") output += "\t\t\t\t{0}\n".format(text) f.write(output) user = ftrack.User(id=event["source"]["user"]["id"]) job = ftrack.createJob("Breakdown", "done", user) try: job.createAttachment(file_path) except: self.logger.info(traceback.format_exc())
def cptSync(self, xferFile, xferValue, user, queue): # Remove trailing '/' xferFile = xferFile.rstrip('/') rsyncCmd = '' xferMsg = 'User: %s \n\n' \ 'File(s): %s \n\n' % (user.getUsername(), xferFile) direction = 'CPT -> JHB' tmpDir = '/data/production/tmp_files' if not os.path.exists(tmpDir): os.makedirs(tmpDir) job = ftrack.createJob('Syncing {0}'.format(xferFile), 'queued', user) job.setStatus('running') if xferValue == 0: # CPT -> JHB jhbDir = os.path.dirname(xferFile) if not os.path.exists(jhbDir): os.makedirs(jhbDir) # replace mount name as queue runs on file server if queue: jhbDir = jhbDir.replace('/data/production', '/mnt/production') xferFile2 = xferFile.replace(' ', '\\ ') rsyncCmd = 'rsync -avuzrh --exclude=incrementalSave ' \ '[email protected]:"%s" "%s/"' % (xferFile2, jhbDir) xferMsg += 'Direction: CPT -> JHB \n\n' direction = 'CPT -> JHB' elif xferValue == 1: # JHB -> CPT cptDir = os.path.dirname(xferFile) cptDir2 = cptDir.replace(' ', '\\ ') # replace mount name as queue runs on file server if queue: xferFile = xferFile.replace('/data/production', '/mnt/production') rsyncCmd = 'rsync -avuzrh --exclude=incrementalSave ' \ '--rsync-path="mkdir -p \\"%s\\" && rsync" "%s" [email protected]:"%s/"' % ( cptDir, xferFile, cptDir2) xferMsg += 'Direction: JHB -> CPT \n\n' direction = 'JHB -> CPT' print '\n' + rsyncCmd filebase = os.path.basename(xferFile) if queue: file = os.path.join(tmpDir, str(uuid.uuid4())) with open(file, 'w') as f: f.write(rsyncCmd) job.setDescription('Sync queued {0} from {1}'.format( filebase, direction)) job.setStatus('done') else: process = subprocess.Popen(rsyncCmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) out, err = process.communicate() logging.info(out) exitcode = process.returncode if str(exitcode) != '0': logging.info(err) job.setDescription('Sync Failed for {0} from {1}'.format( filebase, direction)) job.setStatus('failed') xferMsg += 'Status: Failed. Please re-try. \n\n' else: job.setDescription('Sync Complete for {0} from {1}'.format( filebase, direction)) job.setStatus('done') xferMsg += 'Status: Success. Transfer Complete \n'
def launch(self, event): """Callback method for DJVView action.""" # Launching application if "values" in event["data"]: applicationIdentifier = event["data"]["applicationIdentifier"] application = self.applicationStore.getApplication( applicationIdentifier ) context = event["data"].copy() context["source"] = event["source"] command = self.launcher._getApplicationLaunchCommand( application, context ) success = True message = '{0} application started.'.format(application['label']) command.append(event["data"]["values"]["path"]) try: options = dict( env={}, close_fds=True ) # Ensure subprocess is detached so closing connect will not # also close launched applications. if sys.platform == 'win32': options['creationflags'] = subprocess.CREATE_NEW_CONSOLE else: options['preexec_fn'] = os.setsid self.logger.debug( 'Launching {0} with options {1}'.format(command, options) ) process = subprocess.Popen(command, **options) except (OSError, TypeError): self.logger.exception( '{0} application could not be started with command "{1}".' .format(applicationIdentifier, command) ) success = False message = '{0} application could not be started.'.format( application['label'] ) else: self.logger.debug( '{0} application started. (pid={1})'.format( applicationIdentifier, process.pid ) ) return { 'success': success, 'message': message } data = event["data"] data["items"] = [] # Starting a job to show user the progress of scanning for files. job = ftrack.createJob("DJV: Scanning for files.", "queued", ftrack.User(id=event["source"]["user"]["id"])) job.setStatus("running") try: ftrack.EVENT_HUB.publish( ftrack.Event( topic='djvview.launch', data=data ), synchronous=True ) session = get_shared_session() session.event_hub.publish( ftrack_api.event.base.Event( topic='djvview.launch', data=data ), synchronous=True ) except: job.setStatus("failed") else: job.setStatus("done") return { "items": [ { "label": "Items to view", "type": "enumerator", "name": "path", "data": sorted( data["items"], key=itemgetter("label"), reverse=True ) } ] }
def launch(self, event): '''Callback method for action.''' selection = event['data'].get('selection', []) userId = event['source']['user']['id'] self.logger.info( u'Launching action with selection: {0}'.format(selection)) ################################################################################# if 'values' in event['data']: job = ftrack.createJob(description="Rename Components", status="running") try: ftrack.EVENT_HUB.publishReply(event, data={ 'success': True, 'message': 'Renaming components...' }) self.renameSelection(selection, event['data']['values'].get('old_name'), event['data']['values'].get('new_name')) # inform the user that the job is done job.setStatus('done') except: # fail the job if something goes wrong job.setStatus('failed') raise # TODO: get all existing component names / DONE (turned off because of speed) # components = self.getComponentsInSelection(selection) # # componentNames = [] # for component in components: # name = component.getName() # if name not in componentNames: # componentNames.append(name) # componentMenu = [] # # for name in componentNames: # componentMenu.append({'label': name, 'value': name}) # return { # 'items': [ # { # 'value': 'Rename components', # 'type': 'label' # }, { # 'value': '---', # 'type': 'label' # }, { # 'label': 'Old Name', # 'type': 'enumerator', # 'name': 'old_name', # 'value': componentMenu[0]['value'], # 'data': componentMenu, # }, { # 'label': 'New name', # 'type': 'text', # 'name': 'new_name', # 'value': 'New Name', # } # ] # } return { 'items': [{ 'value': 'Rename components', 'type': 'label' }, { 'value': '---', 'type': 'label' }, { 'label': 'Old Name', 'type': 'text', 'name': 'old_name', 'value': 'Old Name', }, { 'label': 'New name', 'type': 'text', 'name': 'new_name', 'value': 'New Name', }] }
def launch(self, event): data = event['data'] selection = data.get('selection', []) session = ftrack_api.Session() job = None if ('ftrack' in sys.modules) and (len(selection) > 1): userId = event['source']['user']['id'] job = ftrack.createJob(description='Gathering Shot Information', status='running', user=userId) item = session.query( 'select name, object_type.name, descendants, ancestors, custom_attributes, status.name from TypedContext where id is {}' .format(selection[0]['entityId'])).first() project_id = item['project_id'] slates = get_slates(session, selection, project_id) try: master_dict = get_task_outputs(session, selection, oneitem=item, job=job) except TypeError: retFail = { 'success': False, 'message': 'Try Again, CIS succesfully failed for no reason! @ master dict' } return retFail template_files_dir = "L:/HAL/LUX_SLATE/nuke_templates" template_files = get_nk_files_from_folder(template_files_dir) template_files_selection = { 'label': 'Slate Template', 'type': 'enumerator', 'name': 'my_enumerator', 'data': [{ 'label': i, 'value': i } for i in template_files] } slate_settings = [ { 'value': '<hr>', 'type': 'label' }, { 'value': '<i><b>CHOOSE YOUR SLATES</b></i>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' }, { 'label': '<font color="green">Slates</font>', 'type': 'enumerator', 'value': '', #proj['custom_attributes']['resolution'],#slate_codecs[9].get('label'),#slate_txt_positions[6].get('label'), 'data': slates, 'multi_select': True, 'name': 'slates' }, { 'value': '<hr>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' }, { 'value': '<i><b>OVERLAYS & LABELS</b></i>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' }, { 'label': 'Date', 'type': 'date', 'name': 'date', 'value': datetime.date.today().isoformat() }, { 'label': 'Description Text', 'type': 'text', 'value': '', #'Client Review', 'name': 'description_text' }, { 'label': 'Custom Text', 'type': 'text', 'value': '', #'', 'name': 'custom_text' }, { 'value': '<hr>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' }, { 'value': '<i><b>SUBMISSION RESULTS</b></i>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' } ] shot_items = slate_settings for shot in master_dict: asset_name = shot['asset_name'] index = shot['index'] version_items = [] frame_range = '{0}-{1}'.format(str(shot['first_frame']), str(shot['last_frame'])) text = 'Could not locate rendered files.....\r' success_msg = 'FAILED' if shot['success']: success_msg = 'SUCCESSFUL' text = ''' Offline sequence : {0}\r Online sequence : {1}\r Slate Location : {2}\r '''.format(shot['image_file'], shot['metad_file'], shot['slate_output']) if shot['matte_file'] != '': #text = text + 'Mattes sequence : {0}\r'.format( shot['matte_file'] ) text = ''' Offline sequence : {0}\r Online sequence : {1}\r Matte sequence : {3}\r Slate Location : {2}\r '''.format(shot['image_file'], shot['metad_file'], shot['slate_output'], shot['matte_file']) text = text + '<hr>' vers_ext = '' if asset_name != '': vers_ext = '_' + asset_name.split('_')[-1] for i in shot['versions']: new_entry = {} new_entry['label'] = i new_entry['value'] = i version_items.append(new_entry) title = { 'value': '<b>{0}{1}</b> : {3}<i>{2}</i>'.format(shot['item_title'], vers_ext, success_msg, ' ' * 2), 'type': 'label' } message = {'value': text, 'type': 'label'} framerange = { 'label': 'Frame Range', 'type': 'text', 'value': frame_range, 'name': 'framerange_{0}'.format(index) } if len(version_items) == 0: version_items.append({'label': 'None Found..', 'value': None}) versions = { 'label': 'Version', 'type': 'enumerator', 'value': version_items[0].get('label'), 'data': version_items, 'name': 'version_{0}'.format(index) } shot_items.append(title) shot_items.append(versions) shot_items.append(framerange) shot_items.append(message) totalSlatedItems = 0 retUI = {'items': shot_items} if 'values' in data: # Do something with the values or return a new form. values = data['values'] commit = False self.logger.info(u'Got values: {0}'.format(values)) slates = [str(i) for i in values['slates']] if slates == 'Empty': slates = [] ret_fail = {'success': False, 'message': 'No slates chosen.'} return ret_fail if len(slates) > 1: ftrack_slates = session.query( 'select name, custom_attributes, project.id from Slate where name in {} and project.id is {}' .format(tuple(slates), project_id)).all() if len(slates) == 1: ftrack_slates = session.query( 'select name, custom_attributes, project.id from Slate where name is "{}" and project.id is {}' .format(slates[0], project_id)).all() for fslate in ftrack_slates: #iterate through the ftrack slate items that are chosen slate_custom_attributes = fslate['custom_attributes'] if not values['description_text']: values['description_text'] = slate_custom_attributes[ 'slate_overlays_description_text'] if not values['custom_text']: values['custom_text'] = slate_custom_attributes[ 'slate_overlays_custom_text'] totalSlatedItems = 0 shot_related_params = [] for value in values: if ('_' in value): if (value.split('_')[-1]).isdigit(): shot_related_params.append(value) for item in master_dict: #print item for p in shot_related_params: p_index = int(p.split('_')[-1]) if item['index'] == p_index: if p.split('_')[0] == 'version': item['version'] = values[p] if p.split('_')[0] == 'framerange': first_last = values[p].split('-') item['first_frame'] = str(first_last[0]) item['last_frame'] = str(first_last[-1]) item['duration'] = str( int(first_last[-1]) - int(first_last[0]) + 1) if item['success']: totalSlatedItems += 1 asset_name = item['asset_name'] item['slate_type'] = slate_custom_attributes[ 'slate_type'] item['slate_name'] = fslate['name'] item['asset_name'] = asset_name item['codec'] = slate_custom_attributes['slate_codec'] item['format'] = slate_custom_attributes[ 'slate_format'] item['slate_frame'] = slate_custom_attributes[ 'slate_slateframe'] item['overlays'] = slate_custom_attributes[ 'slate_overlays'] item['colorspace_in'] = slate_custom_attributes[ 'slate_colorspace_in'] item['colorspace_out'] = slate_custom_attributes[ 'slate_colorspace_out'] item['data_type'] = slate_custom_attributes[ 'slate_data_type'] item['image_file'] = item['image_file'].replace( item['asset_name'], item['version']) item['metad_file'] = item['metad_file'].replace( item['asset_name'], item['version']) item['slate_output'] = item['slate_output'].replace( item['asset_name'], item['version']) if item['slate_type'] == 'mattes': item['image_file'] = item['matte_file'].replace( item['asset_name'], item['version']) item['asset_name'] = item['version'] firstframe = item['first_frame'] lastframe = item['last_frame'] inputImage = item['image_file'] inputTCImage = item['metad_file'] outputImage = item['slate_output'] user = getpass.getuser() temp_dir = "L:\\tmp\\deadline_submission_scripts\\slates" if not os.path.isdir(temp_dir): os.makedirs(temp_dir) template = slate_custom_attributes['slate_template'] slate_descriptor = template.split('.')[0] slate_descriptor = slate_descriptor.replace( 'Generic_Slate_', '') slate_descriptor = slate_descriptor.replace( 'Manual_Slate_', '') template_path = os.path.join(template_files_dir, template).replace( '\\', '/') item['slate_template_name'] = slate_descriptor if slate_custom_attributes['slate_rename']: item['asset_name'] = translate( slate_custom_attributes[ 'slate_assetname_rename'], [values, item]) slate_overlays = { 'slate_top_left': translate( slate_custom_attributes[ 'slate_overlays_top_left'], [values, item]), 'slate_top_right': translate( slate_custom_attributes[ 'slate_overlays_top_right'], [values, item]), 'slate_top_center': translate( slate_custom_attributes[ 'slate_overlays_top_center'], [values, item]), 'slate_bottom_left': translate( slate_custom_attributes[ 'slate_overlays_bottom_left'], [values, item]), 'slate_bottom_right': translate( slate_custom_attributes[ 'slate_overlays_bottom_right'], [values, item]), 'slate_bottom_center': translate( slate_custom_attributes[ 'slate_overlays_bottom_center'], [values, item]), 'slate_frame_project_title': translate( slate_custom_attributes[ 'slate_slateframe_project_title'], [values, item]), 'slate_frame_description': translate( slate_custom_attributes[ 'slate_slateframe_description'], [values, item]), 'slate_frame_shot_title': translate( slate_custom_attributes[ 'slate_slateframe_shot_title'], [values, item]), 'slate_frame_range_info': translate( slate_custom_attributes[ 'slate_slateframe_range_info'], [values, item]), 'slate_frame_timecode_info': translate( slate_custom_attributes[ 'slate_slateframe_timecode_info'], [values, item]), 'slate_frame_version_info': translate( slate_custom_attributes[ 'slate_slateframe_version_info'], [values, item]) } item['slate_overlays'] = slate_overlays str_slate_type = item['slate_name'].replace(' ', '') slatefile = os.path.join( temp_dir, '{}_{}_slate.nk'.format( item['asset_name'], str_slate_type)).replace('\\', '/') job_info = os.path.join( temp_dir, '{}_{}_nuke_job_info.job'.format( item['asset_name'], str_slate_type)).replace('\\', '/') plugin_info = os.path.join( temp_dir, '{}_{}_nuke_plugin_info.job'.format( item['asset_name'], str_slate_type)).replace('\\', '/') item['slate_output'] = slate_custom_attributes[ 'slate_output'] item['slate_output'] = translate( item['slate_output'], [values, item]) slate_frame_subtract = 1 if item['slate_frame'] == False: slate_frame_subtract = 0 frames = "{0}-{1}".format(firstframe, lastframe) sframes = "{0}-{1}".format( str(int(firstframe) - slate_frame_subtract), lastframe) CreateNukeSlateFile(slatefile, template_path, item) CreateNukeJob( slatefile, sframes, item['asset_name'] + ' ' + item['slate_name'], item['slate_output'], user, job_info, plugin_info) SubmitJobToDeadline(job_info, plugin_info, slatefile) retSuccess = { 'success': True, 'message': 'Slating {0}/{1} items!'.format(totalSlatedItems, len(master_dict)) } if not totalSlatedItems: retSuccess['success'] = False return retSuccess if ('ftrack' in sys.modules) and (len(selection) > 1): job.setStatus('done') job.setDescription('Gathered {} items'.format(len(selection))) return retUI
def launch(self, event): data = event['data'] selection = data.get('selection', []) session = ftrack_api.Session() job = None if ('ftrack' in sys.modules) and (len(selection) > 1): userId = event['source']['user']['id'] job = ftrack.createJob(description='Gathering Shot Information', status='running', user=userId) #item = session.query('select name, object_type.name, descendants, ancestors, custom_attributes from TypedContext where id is {}'.format( selection[0]['entityId'] )).first() item = session.query( 'select name, object_type.name, descendants, ancestors, custom_attributes, status.name from TypedContext where id is {}' .format(selection[0]['entityId'])).first() project_id = item['project_id'] slates = get_slates(session, selection, project_id) try: master_dict = get_task_outputs(session, selection, oneitem=item, job=job) except TypeError: retFail = { 'success': False, 'message': 'Try Again, CIS succesfully failed for no reason! @ master dict' } return retFail template_files_dir = "L:/HAL/LUX_SLATE/nuke_templates" template_files = get_nk_files_from_folder(template_files_dir) template_files_selection = { 'label': 'Slate Template', 'type': 'enumerator', 'name': 'my_enumerator', 'data': [{ 'label': i, 'value': i } for i in template_files] } slate_settings = [ { 'value': '<hr>', 'type': 'label' }, { 'label': ' + <i>Output</i>', 'type': 'text', 'value': '{defaultoutput}/CompPackage_{date}/{shotname}/', #overlays_dict.get('slate_output_offline'),#{defaultoutput}/{slatename}/{format}_{codec}/, 'name': 'package_output' }, { 'value': '<hr>', 'type': 'label' }, { 'value': '<i><b>OVERLAYS & LABELS</b></i>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' }, { 'label': 'Date', 'type': 'date', 'name': 'date', 'value': datetime.date.today().isoformat() }, { 'label': 'Description Text', 'type': 'text', 'value': '', #'Client Review', 'name': 'description_text' }, { 'label': 'Custom Text', 'type': 'text', 'value': '', #'', 'name': 'custom_text' }, { 'value': '<hr>', 'type': 'label' }, { 'value': '<i><b>SUBMISSION RESULTS</b></i>', 'type': 'label' }, { 'value': '<hr>', 'type': 'label' } ] shot_items = slate_settings for shot in master_dict: asset_name = shot['asset_name'] index = shot['index'] version_items = [] frame_range = '{0}-{1}'.format(str(shot['first_frame']), str(shot['last_frame'])) text = 'Could not locate rendered files.....\r' success_msg = 'FAILED' if shot['success']: success_msg = 'SUCCESSFUL' text = ''' Offline sequence : {0}\r Online sequence : {1}\r Slate Location : {2}\r '''.format(shot['image_file'], shot['metad_file'], shot['slate_output']) if shot['matte_file'] != '': #text = text + 'Mattes sequence : {0}\r'.format( shot['matte_file'] ) text = ''' Offline sequence : {0}\r Online sequence : {1}\r Matte sequence : {3}\r Slate Location : {2}\r '''.format(shot['image_file'], shot['metad_file'], shot['slate_output'], shot['matte_file']) text = text + '<hr>' vers_ext = '' if asset_name != '': vers_ext = '_' + asset_name.split('_')[-1] for i in shot['versions']: new_entry = {} new_entry['label'] = i new_entry['value'] = i version_items.append(new_entry) title = { 'value': '<b>{0}{1}</b> : {3}<i>{2}</i>'.format(shot['item_title'], vers_ext, success_msg, ' ' * 2), 'type': 'label' } title = { 'value': '<b>{}</b>'.format(shot['shot_name']), 'type': 'label' } message = { 'value': '<b>Existing Versions : </b>' + ', '.join(shot['versions']), 'type': 'label' } framerange = { 'label': 'Frame Range', 'type': 'text', 'value': frame_range, 'name': 'framerange_{0}'.format(index) } br = {'value': '<hr>', 'type': 'label'} if len(version_items) == 0: version_items.append({'label': 'None Found..', 'value': None}) versions = { 'label': 'Version', 'type': 'enumerator', 'value': version_items[0].get('label'), 'data': version_items, 'name': 'version_{0}'.format(index) } comp_items = [] for i in shot['comp_scripts']: new_entry = {} new_entry['label'] = i new_entry['value'] = i comp_items.append(new_entry) if len(comp_items) == 0: comp_items.append({'label': 'None Found..', 'value': None}) versions_enum = { 'label': '<font color="green">Nuke Scripts</font>', 'type': 'enumerator', 'value': comp_items[-1].get( 'label' ), #proj['custom_attributes']['resolution'],#slate_codecs[9].get('label'),#slate_txt_positions[6].get('label'), 'data': comp_items, 'multi_select': True, 'name': 'version_{0}'.format(index) } shot_items.append(title) #shot_items.append(versions) shot_items.append(versions_enum) shot_items.append(framerange) shot_items.append(message) shot_items.append(br) totalSlatedItems = 0 totalPackagedItems = 0 retUI = {'items': shot_items} if 'values' in data: # Do something with the values or return a new form. values = data['values'] commit = False totalPackagedItems = 0 self.logger.info(u'Got values: {0}'.format(values)) shot_related_params = [] for value in values: if ('_' in value): if (value.split('_')[-1]).isdigit(): shot_related_params.append(value) for item in master_dict: #print item for p in shot_related_params: p_index = int(p.split('_')[-1]) if item['index'] == p_index: if p.split('_')[0] == 'version': item['version'] = values[p] if p.split('_')[0] == 'framerange': first_last = values[p].split('-') item['first_frame'] = str(first_last[0]) item['last_frame'] = str(first_last[-1]) item['duration'] = str( int(first_last[-1]) - int(first_last[0]) + 1) if item['success']: totalPackagedItems += 1 item['default_base_output'] = os.path.join( item['base_path'], "03_COORDINATION/PACKAGED_SHOTS") #print item['version'] for nk in item['version']: values['version'] = nk item['package_output'] = values['package_output'] item[ 'slate_name'] = item['shot_name'] + '_Shot_Package' item2 = item item2['version'] = nk item['package_output'] = translate( item['package_output'], [values, item2]) #item['image_file'] = item['image_file'].replace(item['asset_name'],item['version']) #item['metad_file'] = item['metad_file'].replace(item['asset_name'],item['version']) #item['slate_output'] = item['slate_output'].replace(item['asset_name'],item['version']) #item['asset_name'] = item['version'] nk_file = '{}05_COMP/{}/{}'.format( item['base_path'], item['shot_name'], nk) firstframe = item['first_frame'] lastframe = item['last_frame'] inputImage = item['image_file'] inputTCImage = item['slate_name'] outputImage = item['slate_output'] user = getpass.getuser() temp_dir = "L:\\tmp\\deadline_submission_scripts\\package_slates" if not os.path.isdir(temp_dir): os.makedirs(temp_dir) asset_name = item['asset_name'] _nuke_script = item['image_file'].split('.')[0] + '.nk' #_nuke_script = nk_file _replaced_script = os.path.join( os.path.dirname(item['slate_output']), os.path.basename(_nuke_script)) _nuke_script = nk_file _replaced_script = '{}/{}'.format( item['package_output'], nk) _replaced_script = _replaced_script.replace('//', '/') #print _nuke_script #print _replaced_script sys.path.append( '//qumulo/Libraries/HAL/LIVEAPPS/apps/Scripts/FTRACK/ftrack_hooks/package_hook/hook/' ) import node_parser_006 as nodeparser title = nk + ' Package' package_path = os.path.dirname(_replaced_script) date = datetime.datetime.now().strftime( "%d.%m.%Y %H:%M:%S") package_files = nodeparser.replace_nuke( _nuke_script, _replaced_script) #print package_files # print 'inputs' for i in package_files['inputs']: #print i['original'], '---->', i['new'] _i = i['original'].replace('"', '').replace("'", '') _o = (package_path + '/' + i['new']).replace( '//', '/').replace('\\', '/').replace('"', '').replace("'", '') io = [_i, _o] #print io weird = _o.split('SOURCED_ASSETS')[-1].split( '.')[0] #print weird #copyNukeFiles(_i,_o) submitPackagePyToDeadline(title, date, weird, io) #print 'outputs' for o in package_files['outputs']: #print o['original'], '---->', o['new'] _i = o['original'].replace('"', '').replace("'", '') _o = (package_path + '/' + o['new']).replace( '//', '/').replace('\\', '/').replace('"', '').replace("'", '') io = [_i, _o] weird = _o.split('SOURCED_ASSETS')[-1].split( '.')[0] #print io #print weird #copyNukeFiles(_i,_o) submitPackagePyToDeadline(title, date, weird, io) retSuccess = { 'success': True, 'message': 'Packaging {0}/{1} items'.format(totalPackagedItems, len(master_dict)) } if not totalPackagedItems: retSuccess['success'] = False return retSuccess if ('ftrack' in sys.modules) and (len(selection) > 1): job.setStatus('done') job.setDescription('Gathered {} items'.format(len(selection))) return retUI
def createPDF(userId=None, entityType=None, entity=None, values=None): description = u'Review summary' job = ftrack.createJob(description=description, status='running', user=userId) email = bool(values['email']) subject = values['subject'] message = values['message'] #replace {session} with session name subject = subject.replace("{session}", entity.get('name')) message = message.replace("{session}", entity.get('name')) try: html = "\ <html>\ <head>\ <style media='all'>\ @page { padding: 0pt}\ @page { margin: 0pt; }\ @page { size: A4}\ img { page-break-inside: avoid; }\ .break { clear:both; page-break-after:always; }\ td, th { page-break-inside: avoid; word-wrap: break-word; }\ </style>\ <link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css' media='all'>\ <link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap-theme.min.css' media='all'>\ </head>\ <body>\ <div class='jumbotron' style='padding:20px; padding-top;0px; margin-bottom:0px'>\ <h1>" + entity.get('name') + "</h1>\ <p>" + entity.get('description') + "</p>\ <div class='media' style='margin-top:5px; margin-bottom: 5px'>\ <div class='media-left'>\ <img width='40px' src='" + xstr( ftrack.User(entity.get('created_by')).getThumbnail() ) + "' class='media-object img-circle' style='width:40px;'>\ </div>\ <div class='media-body text-muted'>\ Created by " + ftrack.User( entity.get('created_by')).getName() + "<br/>\ " + str( entity.get('created_at').strftime('%A %d, %Y')) + "\ </div>\ </div>\ </div>\ <table class='table table-striped' style='' >\ <tr>\ <th style='min-width:3px; max-width:3px; width:3px'>#</th>\ <th>Media information</th>\ <th>Comments</th>\ </tr>" lst = getEntityChildren(entityType=entityType, entity=entity) for i, reviewSessionObject in enumerate(lst): html = html + "\ <tr>\ <td class=''>\ <h4 class='text-muted' style='margin-top:0px'>" + str( i + 1) + "</h4>\ </td>\ <td style='width:250px'>\ <div class='thumbnail'>\ <img class='img-responsive' src='" + xstr( reviewSessionObject[0].getThumbnail()) + "'>\ <div class='caption'>\ <small><strong>" + getName( entityType=entityType, entity=reviewSessionObject[0]) + "</strong></small>\ <p class='text-muted small'>" + getPath( entityType=entityType, entity=reviewSessionObject[0]) + "</p>\ <p>\ <div style='margin-top:10px'>\ <span class='text-success glyphicon glyphicon-thumbs-up' style='padding-right:5px' aria-hidden='true'></span><strong><span style='padding-right:10px'>0</span></strong><span class='text-danger glyphicon glyphicon-thumbs-down' style='padding-right:5px' aria-hidden='true'></span><strong>0</strong>\ </div>\ </p>\ </div>\ </div>\ </td>\ <td style=''>\ <small>\ <ul class='media-list'>" notes = reviewSessionObject[1].getNotes() if not len(notes): html = html + "\ <li class='media' style='max-width:430px;'>\ <p class='lead text-muted text-center' style='padding-top:60px'>\ Bummer, there are no comments here!\ </p>\ </li>" for note in notes: html = html + "\ <li class='media' style='max-width:430px;'>\ <div class='media-left'>\ <img src='https://www.ftrack.com/wp-content/uploads/haz2.png' class='media-object img-circle' style='width:40px'>\ </div>\ <div class='media-body'>\ <h4 class='media-heading'>\ Collaborator\ </h4>\ <small class='text-muted'>" + str( note.getDate().strftime('%I:%M%p, %A %d, %Y')) + "</small>\ <p>" + note.getText() + "</p>" frame = note.getMeta('reviewFrame') if frame is not None: html = html + "\ <p><span class='label label-primary'>Frame " + str( json.loads(frame)['number']) + "</span></p>" attachments = note.getAttachments() for a in attachments: html = html + "\ <img src='" + a.getURL( ) + "' class='' style='max-width:120px; margin-bottom:5px'>" replies = note.getNotes() for reply in replies: html = html + "\ <div class='media' style='max-width:380px;'>\ <div class='media-left'>\ <img src='https://www.ftrack.com/wp-content/uploads/fl.png' class='media-object img-circle' style='width:40px'>\ </div>\ <div class='media-body'>\ <h4 class='media-heading'>\ Collaborator\ </h4>\ <small class='text-muted'>" + str( reply.getDate().strftime( '%I:%M%p, %A %d, %Y')) + "</small>\ <p>" + reply.getText( ) + "</p>\ </div>\ </div>" html = html + "\ </div>\ </li>" html = html + "\ <br/>\ </ul>\ </small>\ </td>\ </tr>" html = html + "\ </table>\ </body>\ </html>" # html alternative to create PDF (see below) filename = "review-session-{0}.html".format(str(uuid.uuid1())) html_file = open(filename, "w") html_file.write(html.encode("utf-8")) html_file.close() # signup for docraptor (free trial) or use other PDF generator library # install docraptor with "pip install python-docraptor" # docraptor = DocRaptor(ADD YOUR API KEY HERE) # filename = "review-session-{0}.pdf".format(str(uuid.uuid1())) # resp = docraptor.create({ # 'document_content': html, # 'document_type':'pdf', # 'test': False, # 'strict': 'none', # 'async': True, # 'prince_options': {'media': 'screen', 'insecure':False, 'input':'html'} # }) # status_id = resp['status_id'] # resp = docraptor.status(status_id) # while resp['status'] != 'completed': # time.sleep(3) # resp = docraptor.status(status_id) # f = open(filename, "w+b") # f.write(docraptor.download(resp['download_key']).content) # f.seek(0) job.createAttachment(f, fileName=filename) job.setStatus('done') if email: sendEmail(userId=userId, subject=subject, message=message, recipients=[ftrack.User(userId).getEmail()], filename="filename") os.remove(filename) except: job.setStatus('failed')
def launch(self, event): start_time = time.time() data = event['data'] userId = event['source']['user']['id'] selection = data.get('selection', []) job = ftrack.createJob(description='Updating Paths', status='running', user=userId) try: total = 0 sel_count = 0 update = False for entity in selection: sel_count += 1 entityId = entity['entityId'] selType = entity['entityType'] if selType in ['Slates']: continue items = self.session.query( 'select custom_attributes, object_type.name, type.name, parent.name, link, name from TypedContext where ancestors.id is "{0}" or project_id is "{0}"' .format(entityId)).all() sel = self.session.query( 'select custom_attributes, object_type.name, type.name, parent.name, link, name from TypedContext where id is "{0}"' .format(entityId)).first() if sel == None: sel = self.session.query( 'select custom_attributes, full_name from Project where id is "{0}"' .format(entityId)).first() proj = sel else: proj = self.session.query( 'select custom_attributes, full_name from Project where id is "{0}"' .format(sel['project_id'])).first() #episodes are found from Projet Looking downwards instead of finding it in the items iterator. episodes = self.session.query( 'select name from Episode where project_id is "{0}"'. format(proj['id'])).all() episode_names = [e['name'] for e in episodes] project_path = proj['custom_attributes']['Project_Path'] project_name = proj['full_name'] #print 'Project:',proj #print 'Selected:', sel #print 'Nested:', items #print 'Episodes:', episodes if sel == proj: base_path = '{project_path}{project_name}/'.format( project_path=project_path, project_name=project_name) if proj['custom_attributes']['base_path'] != base_path: proj['custom_attributes']['base_path'] = base_path update = True else: items.append(sel) items_length = len(items) item_count = 0 for i in items: item_count += 1 total += 1 if len(selection) > 1: job.setDescription( 'Updated {}/{} paths from selection {}/{}'.format( item_count, items_length, sel_count, len(selection))) else: job.setDescription('Updated {}/{} paths'.format( item_count, items_length)) episode = '' if i['object_type']['name'] == 'Episode': episode = i['name'] else: episode = [ x['name'] for x in i['ancestors'] if x['name'] in episode_names ] if len(episode) > 0: episode = episode[0] else: episode = '' base_path = '{project_path}{project_name}/{episode}/'.format( project_path=project_path, project_name=project_name, episode=episode).replace('//', '/') if i['custom_attributes']['base_path'] != base_path: i['custom_attributes']['base_path'] = base_path update = True if i['object_type']['name'] == 'Sequence': base_cis = '{base_path}09_QT/EDITORIAL/_base_cis/'.format( base_path=base_path) if i['custom_attributes']['base_cis'] != base_cis: i['custom_attributes']['base_cis'] = base_cis update = True base_edl = '{base_path}09_QT/EDITORIAL/EDL/'.format( base_path=base_path) if i['custom_attributes']['base_edl'] != base_edl: i['custom_attributes']['base_edl'] = base_edl update = True if i['object_type']['name'] == 'Task': shot_name = i['parent'][ 'name'] #this may break when it comes to asset builds since they are not nested under a shot.. task_type = i['type']['name'] task_name = i['name'] path = '' #base_path out_path = '' #base_path if task_type.lower() in [ 'compositing', 'precomp', 'cleanplate', 'retime', 'rotoscoping', 'paintout' ]: comp_out_dir = '02_OUTPUT/03_comp' if task_type.lower() != task_name.lower(): comp_out_dir = '02_OUTPUT/01_precomp/{task_name}'.format( task_name=task_name) path = '{base_path}{dept_name}/{shot_name}/'.format( base_path=base_path, shot_name=shot_name, dept_name='05_COMP') out_path = '{base_path}{dept_name}/{shot_name}/{comp_out_dir}/'.format( base_path=base_path, shot_name=shot_name, dept_name='05_COMP', comp_out_dir=comp_out_dir) if task_type.lower() in ['matchmove', 'tracking']: path = '{base_path}{dept_name}/scenes/{shot_name}/tracking/'.format( base_path=base_path, shot_name=shot_name, dept_name='04_3D') out_path = '{base_path}{dept_name}/{shot_name}/TRAC/'.format( base_path=base_path, shot_name=shot_name, dept_name='06_RENDERS') if task_type.lower() in ['animation']: path = '{base_path}{dept_name}/scenes/{shot_name}/anim/'.format( base_path=base_path, shot_name=shot_name, dept_name='04_3D') out_path = '{base_path}{dept_name}/{shot_name}/ANIM/'.format( base_path=base_path, shot_name=shot_name, dept_name='06_RENDERS') if task_type.lower() in ['layout']: path = '{base_path}{dept_name}/scenes/{shot_name}/layout/'.format( base_path=base_path, shot_name=shot_name, dept_name='04_3D') out_path = '{base_path}{dept_name}/{shot_name}/LYT/'.format( base_path=base_path, shot_name=shot_name, dept_name='06_RENDERS') if task_type.lower() in ['lighting']: path = '{base_path}{dept_name}/scenes/{shot_name}/lighting/'.format( base_path=base_path, shot_name=shot_name, dept_name='04_3D') out_path = '{base_path}{dept_name}/{shot_name}/FINL/'.format( base_path=base_path, shot_name=shot_name, dept_name='06_RENDERS') if task_type.lower() in ['fx']: path = '{base_path}{dept_name}/scenes/{shot_name}/fx/'.format( base_path=base_path, shot_name=shot_name, dept_name='04_3D') out_path = '{base_path}{dept_name}/{shot_name}/FX/'.format( base_path=base_path, shot_name=shot_name, dept_name='06_RENDERS') path = path.replace('//', '/') out_path = out_path.replace('//', '/') if i['custom_attributes'][ 'path'] != path: #only make changes if they dont already exist i['custom_attributes']['path'] = path update = True if i['custom_attributes'][ 'out_path'] != out_path: #only make changes if they dont already exist i['custom_attributes']['out_path'] = out_path update = True #We could expand this so it browses the directorty for the most applicable plate... for now, run it at plate level, and let get_version choose _O vs _R if i['object_type']['name'] == 'Shot': shot_name = i['name'] #print shot_name out_path = '{base_path}01_PLATES/{shot_name}/PLATE/'.format( base_path=base_path, shot_name=shot_name) #plate_path = out_path #plate_path = plate_path.replace('//', '/') out_path = out_path.replace('//', '/') #print out_path #print ''' if i['custom_attributes']['plate_path']!= plate_path: #only make changes if they dont already exist i['custom_attributes']['plate_path'] = plate_path update = True ''' if i['custom_attributes'][ 'out_path'] != out_path: #only make changes if they dont already exist i['custom_attributes']['out_path'] = out_path update = True if update: self.session.commit() job.setStatus('done') job.setDescription('Updated {} paths'.format(total)) except Exception as e: job.setStatus('failed') job.setDescription('Error : {}'.format(e)) #print("--- %s seconds ---" % (time.time() - start_time)) return { 'success': True, 'message': 'Updating Ftrack links for {0} items nested under selected hierarchy!' .format(total) }
def uploadToFtrack(self, filename, assetName, comment, addSlate, taskid, shot, user): job = ftrack.createJob( 'Uploading media for shot {0}'.format(shot.getName()), 'queued', user) job.setStatus('running') try: fps = int(shot.getParent().get('fps')) except Exception: fps = 24.0 filename = '/' + filename.strip('file:/') fname, fext = os.path.splitext(filename) if not os.path.exists(filename): result = self.cptSync(filename, job) job.setDescription('Uploading media for shot {0}'.format(shot.getName())) if result == 0: job.setDescription('File sync from CPT failed for shot {0}'.format(shot.getName())) job.setStatus('failed') return if addSlate == 'Yes': job.setDescription('Adding slate for shot {0}'.format(shot.getName())) slateFile = self.addSlateToMedia(filename, taskid, shot, user) if os.path.exists(slateFile): filename = slateFile job.setDescription('Uploading media for shot {0}'.format(shot.getName())) # If file is an image if fext in ['.jpeg', '.jpg', '.png', '.bmp', '.dpx']: self.publishImage(filename, taskid, shot, job) return # if file is a movie outfilemp4, outfilewebm, thumbnail, metadata = self.prepMediaFiles(filename) ff, lf = self.getFrameLength(filename) result = self.convertFiles(filename, outfilemp4, outfilewebm) status = ftrack.Status('Pending Internal Review') if result: self.createThumbnail(outfilemp4, thumbnail) asset = self.getAsset(shot, assetName) version = asset.createVersion("Upload for Internal Review", taskid) if comment is not '': note = version.createNote(comment) try: self.createAttachment(version, 'ftrackreview-mp4', outfilemp4, ff, lf, fps, metadata) self.createAttachment(version, 'ftrackreview-webm', outfilewebm, ff, lf, fps, metadata) except Exception, e: job.setDescription('Failed to Upload Media for shot {0}'.format(shot.getName())) job.setStatus('failed') logging.error(e) return version.createComponent(name='movie', path=filename) version.publish() version.setStatus(status) if os.path.exists(thumbnail): try: attachment = version.createThumbnail(thumbnail) task = ftrack.Task(taskid) task.setThumbnail(attachment) except Exception, e: job.setDescription('Failed to Upload Thumbnail for shot {0}'.format(shot.getName())) job.setStatus('failed') self.deleteFiles(outfilemp4, outfilewebm, thumbnail) logging.error(e) return
def createTimelogBreakdown(entityType=None, entity=None, userId=None, values=None): description = u'Generating timelog report' job = ftrack.createJob( description=description, status='running', user=userId ) try: html = "" range=values['date_range'] report=values['report_type'] filterOnUserId=values['user'] today = datetime.date.today() date_range = (today - datetime.timedelta(days=1), today) if range == "yesterday": date_range = (today - datetime.timedelta(days=2), today - datetime.timedelta(days=1)) if range == "this_week": date_range = getWeek(today) if range == "prev_week": date_range = getWeek(today - datetime.timedelta(weeks=1)) if range == "this_month": date_range = getMonth(today) if range == "prev_month": date_range = getMonth(today.replace(day=1) - datetime.timedelta(days=1)) if range == "this_year": date_range = (datetime.date(datetime.date.today().year, 1, 1), datetime.date(datetime.date.today().year, 12, 31)) if range == "prev_year": date_range = (datetime.date(datetime.date.today().year-1, 1, 1), datetime.date(datetime.date.today().year-1, 12, 31)) title = '' if filterOnUserId != 'all': title = entity.getName() + " <small>[" + ftrack.User(filterOnUserId).getName() + "] " + date_range[0].strftime('%d') + " " + date_range[0].strftime('%B') + ", " + date_range[0].strftime('%Y') + " - " + date_range[1].strftime('%d') + " " + date_range[1].strftime('%B') + ", " + date_range[1].strftime('%Y') + " </small>" else: title = entity.getName() + " <small>"+ date_range[0].strftime('%d') + " " + date_range[0].strftime('%B') + ", " + date_range[0].strftime('%Y') + " - " + date_range[1].strftime('%d') + " " + date_range[1].strftime('%B') + ", " + date_range[1].strftime('%Y') + " </small>" # HTML Header html = " <html><head>\ <style>\ @page { padding: 10pt}\ @page { margin: 20px; }\ @page { size: A3}\ @page { size: A3 landscape }\ body{margin:30px; padding: 10px}\ img { page-break-inside: avoid; }\ .break { clear:both; page-break-after:always; }\ td, th { page-break-inside: avoid; }\ </style>\ <link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css'>\ <link rel='stylesheet' href='https://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap-theme.min.css'>\ <script type='text/javascript' src='https://www.google.com/jsapi'></script>" # Load Google charts html = html + "\ <script type='text/javascript'>\ google.load('visualization', '1.0', {'packages':['timeline', 'calendar', 'corechart']});\ google.setOnLoadCallback(drawChart);" # Create Google Timeline object html = html + "\ var chartTimeline = null;\ var dataTableTimeline = null;\ var optionsTimeline = null;\ function drawChart() {\ var timeline = document.getElementById('timeline');\ chartTimeline = new google.visualization.Timeline(timeline);\ \ dataTableTimeline = new google.visualization.DataTable();\ dataTableTimeline.addColumn({ type: 'string', id: 'Task' });\ dataTableTimeline.addColumn({ type: 'string', id: 'User' });\ dataTableTimeline.addColumn({ type: 'date', id: 'Start' });\ dataTableTimeline.addColumn({ type: 'date', id: 'End' });\ dataTableTimeline.addRows([" # Helpers to keep track of accumulated timelogs/duration accumulatedSecondsPerTask = {} accumulatedSecondsPerDay = {} accumulatedNonBillableSecondsPerDay = {} accumulatedSecondsPerUser = {} billable = 0 totalSeconds = 0 # If selection is a task/folder, add [filtered] timelog events. if entityType != 'user': tasks = [] user=None if filterOnUserId != 'all': user = ftrack.User(filterOnUserId) tasks = getTasks(entityType=entityType, entity=entity) #tasks = getTasks(entityType=entityType, entity=entity, users=[user.getUsername()]) else: tasks = getTasks(entityType=entityType, entity=entity) job.setDescription("Generating timelog report") numberOfTasks = len(tasks) for i, task in enumerate(tasks): if numberOfTasks > 100 and i % 10 == 0: job.setDescription("Generating timelog report - {0:.2f}%".format((float(i)/numberOfTasks)*100)) timelogs = task.getTimelogs(start=date_range[0], end=date_range[1], includeChildren=False) if len(timelogs) == 0: continue taskname = task.getName() taskId = task.getId() isBillable = task.getType().get('isbillable') for timelog in timelogs: u = timelog.getUser() if user != None: if u.getId() != filterOnUserId: continue start=timelog.get('start') duration = timelog.get('duration') end=start + datetime.timedelta(seconds = duration) # a fail safe - Google charts will crash if end < start if end < start: end = start + datetime.timedelta(seconds = 60) # accumulate helpers totalSeconds += duration if isBillable: billable += duration else: if start.date() in accumulatedNonBillableSecondsPerDay: accumulatedNonBillableSecondsPerDay[start.date()] += duration else: accumulatedNonBillableSecondsPerDay[start.date()] = duration #create or update dictionary keys/values if start.date() in accumulatedSecondsPerDay: accumulatedSecondsPerDay[start.date()] += duration else: accumulatedSecondsPerDay[start.date()] = duration if taskId in accumulatedSecondsPerTask: accumulatedSecondsPerTask[taskId] += duration else: accumulatedSecondsPerTask[taskId] = duration if u.getId() in accumulatedSecondsPerUser: accumulatedSecondsPerUser[u.getId()] += duration else: accumulatedSecondsPerUser[u.getId()] = duration # add row to timeline object html=html + "\ [ '"+ taskname +"', '" + u.getName() + "', new Date(" + start.strftime('%Y') + "," + str(int(start.strftime('%m'))-1) + "," + start.strftime('%d') + ","+start.strftime('%H') + "," + start.strftime('%M') + "," + start.strftime('%S')+"), new Date(" + end.strftime('%Y') + "," + str(int(end.strftime('%m'))-1) + "," + end.strftime('%d') + ","+end.strftime('%H') + "," + end.strftime('%M') + "," + end.strftime('%S')+")]," #[ '{0}', '{1}', new Date({2},{3},{4},{5},{6},{7}), new Date({8},{9},{10},{11},{12},{13})],".format(taskname, u.getName(), start.strftime('%Y'), str(int(start.strftime('%m'))-1), start.strftime('%d'), start.strftime('%H'), start.strftime('%M'), start.strftime('%S'), end.strftime('%Y'), str(int(end.strftime('%m'))-1), end.strftime('%d'), end.strftime('%H'), end.strftime('%M'), end.strftime('%S')) #[ '"+ taskname +"', '" + u.getName() + "', new Date(" + start.strftime('%Y') + "," + str(int(start.strftime('%m'))-1) + "," + start.strftime('%d') + ","+start.strftime('%H') + "," + start.strftime('%M') + "," + start.strftime('%S')+"), new Date(" + end.strftime('%Y') + "," + str(int(end.strftime('%m'))-1) + "," + end.strftime('%d') + ","+end.strftime('%H') + "," + end.strftime('%M') + "," + end.strftime('%S')+")]," # If selection is a user, add [filtered] user timelog events. else: user = entity timelogs = user.getTimelogs(start=date_range[0], end=date_range[1], includeChildren=True) job.setDescription("Generating timelog report") for timelog in timelogs: try: taskId = timelog.get('context_id') task = ftrack.Task(taskId) isBillable = task.getType().get('isbillable') start=timelog.get('start') duration = timelog.get('duration') end=start + datetime.timedelta(seconds = duration) # a fail safe - Google charts will crash if end < start if end < start: end = start + datetime.timedelta(seconds = 60) # accumulate helpers totalSeconds += duration if isBillable: billable += duration else: if start.date() in accumulatedNonBillableSecondsPerDay: accumulatedNonBillableSecondsPerDay[start.date()] += duration else: accumulatedNonBillableSecondsPerDay[start.date()] = duration #create or update dictionary keys/values if start.date() in accumulatedSecondsPerDay: accumulatedSecondsPerDay[start.date()] += duration else: accumulatedSecondsPerDay[start.date()] = duration if taskId in accumulatedSecondsPerTask: accumulatedSecondsPerTask[taskId] += duration else: accumulatedSecondsPerTask[taskId] = duration if user.getId() in accumulatedSecondsPerUser: accumulatedSecondsPerUser[user.getId()] += duration else: accumulatedSecondsPerUser[user.getId()] = duration # add row to timeline object html=html + "\ [ '{0}', '', new Date({1},{2},{3},{4},{5},{6}), new Date({7},{8},{9},{10},{11},{12})],".format(task.getName(), start.strftime('%Y'), str(int(start.strftime('%m'))-1), start.strftime('%d'), start.strftime('%H'), start.strftime('%M'), start.strftime('%S'), end.strftime('%Y'), str(int(end.strftime('%m'))-1), end.strftime('%d'), end.strftime('%H'), end.strftime('%M'), end.strftime('%S')) #[ '" + task.getName() + "', '', new Date(" + start.strftime('%Y') + "," + str(int(start.strftime('%m'))-1) + "," + start.strftime('%d') + ","+start.strftime('%H') + "," + start.strftime('%M') + "," + start.strftime('%S')+"), new Date(" + end.strftime('%Y') + "," + str(int(end.strftime('%m'))-1) + "," + end.strftime('%d') + ","+end.strftime('%H') + "," + end.strftime('%M') + "," + end.strftime('%S')+")]," except: pass html = html + "]);\ optionsTimeline = {\ timeline: { rowLabelStyle: {fontName: 'Helvetica', fontSize: 10, color: '#000000'}},\ avoidOverlappingGridLines: false,\ enableInteractivity: true,\ };\ chartTimeline.draw(dataTableTimeline, optionsTimeline);" # Create Google Pie chart options used by all Pie Charts in document html = html + "\ var optionsPieChart = {\ legend: {position: 'labeled'},\ is3D: false,\ pieHole: 0,\ chartArea:{width:'92%',height:'92%'},\ pieSliceText: 'value'\ };" # Create Google Pie chart object - Total hours html = html + "\ var dataBillable = google.visualization.arrayToDataTable([\ ['Task', 'Hours'],\ ['Billable', {0:.2f}],\ ['Non-billable',{1:.2f}]]);\ \ var chartBillable = new google.visualization.PieChart(document.getElementById('billable'));\ chartBillable.draw(dataBillable, optionsPieChart);".format(billable/3600, (totalSeconds-billable)/3600) # Create Google Pie chart object - User who've logged most hours html = html + "\ var dataUserMostHours = google.visualization.arrayToDataTable([\ ['User', 'Hours']," # Loop through accumulatedSecondsPerUser and add records to chart u = sorted(accumulatedSecondsPerUser.items(),key=itemgetter(1),reverse=True) for i, t in enumerate(u): html = html + "\ ['{0}',{1:.2f}],".format(ftrack.User(t[0]).getName().encode('ascii', 'replace'), t[1]/3600) if i == 9: break html = html + "\ ]);\ var chartUserMostHours = new google.visualization.PieChart(document.getElementById('userMostHours'));\ chartUserMostHours.draw(dataUserMostHours, optionsPieChart);" # Create Google Pie chart object - Tasks with most hours logged html = html + "\ var dataTaskMostHours = google.visualization.arrayToDataTable([\ ['Task', 'Hours']," # Loop through accumulatedSecondsPerTask and add records to chart l = sorted(accumulatedSecondsPerTask.items(),key=itemgetter(1),reverse=True) for i, t in enumerate(l): html = html + "\ ['{0}',{1:.2f}],".format(ftrack.Task(t[0]).getName(), t[1]/3600) if i == 9: break html = html + "\ ]);\ var chartTaskMostHours = new google.visualization.PieChart(document.getElementById('taskMostHours'));\ chartTaskMostHours.draw(dataTaskMostHours, optionsPieChart);" # Create Google Calendar object - Heatmap with hours per day html = html + "\ var dataTableCalendar = new google.visualization.DataTable();\ dataTableCalendar.addColumn({ type: 'date', id: 'Date' });\ dataTableCalendar.addColumn({ type: 'number', id: 'Won/Loss' });\ dataTableCalendar.addRows([" # Loop through accumulatedSecondsPerDay and add records to chart for day, duration in accumulatedSecondsPerDay.items(): # adjusting months to match javascript months that starts on 0 html=html + "\ [ new Date({0},{1},{2}),{3:.2f}],".format(day.strftime('%Y'), str(int(day.strftime('%m'))-1), day.strftime('%d'), duration/3600) html= html + "]);\ var chartCalendar = new google.visualization.Calendar(document.getElementById('calendar'));\ var optionsCalendar = {\ title: '',\ height: 200,\ yearLabel: 'none'\ };\ \ chartCalendar.draw(dataTableCalendar, optionsCalendar);" # Create Google Area Chart object - Show looged hours per day html= html + "\ var data = google.visualization.arrayToDataTable([\ ['Date', 'Total hours','Non-billable hours']," for day, duration in sorted(accumulatedSecondsPerDay.items()): html=html + "\ ['{0}',{1:.2f}, {2:.2f}],".format(day.strftime('%d %b, %Y'), duration/3600, (accumulatedNonBillableSecondsPerDay[day]/3600 if day in accumulatedNonBillableSecondsPerDay.keys() else 0)) #[ new Date({0},{1},{2}),{3:.2f}, {4:.2f}],".format(day.strftime('%Y'), str(int(day.strftime('%m'))-1), day.strftime('%d'), duration/3600, (accumulatedNonBillableSecondsPerDay[day] if day in accumulatedNonBillableSecondsPerDay.keys() else 0)) html=html + "\ ]);\ \ var optionsAreaChart = {\ vAxis: {minValue: 0},\ isStacked: false,\ curveType: 'function',\ legend: { position: 'bottom' }\ };\ \ var chartAreaChart = new google.visualization.SteppedAreaChart(document.getElementById('areachart'));\ chartAreaChart.draw(data, optionsAreaChart);" html = html + "\ }" # add event listener to expand Timeline div based on SVG height attribute html = html + "\ window.addEventListener('load', function(){\ var svglist = document.getElementsByTagName('svg');\ var svg = svglist[svglist.length-1];\ console.log(svg.clientHeight || svg.parentNode.clientHeight);\ document.getElementById('timeline').setAttribute('style','height:'+(svg.clientHeight+60 || svg.parentNode.clientHeight+60)+'px');\ chartTimeline.draw(dataTableTimeline, optionsTimeline);\ });\ </script>" # HTML Body html= html + "\ </head><body>\ <ol class='breadcrumb'>" # Create Boothstrap Breadcrum parents = [] try: parents = entity.getParents() parents.reverse() except: pass for parent in parents: html = html + "<li>" + parent.getName() + "</li>" html= html + "\ <li class='active'>" + entity.getName() + "</li>\ </ol>\ <div class='page-header'>\ <h1>"+title+"</h1>\ </div>\ <div class='row'>\ <div class='col-sm-6 col-md-4'>\ <div class='panel panel-default'>\ <div class='panel-heading'><h3 class='panel-title'>"+ '{:.2f}'.format(totalSeconds/3600) + " total hours</h3></div>\ <div class='panel-body'>\ <div id='billable' style='text-align:center'></div>\ </div>\ </div>\ </div>\ <div class='col-sm-6 col-md-4'>\ <div class='panel panel-default'>\ <div class='panel-heading'><h3 class='panel-title'>Who logged most hours?</h3></div>\ <div class='panel-body'>\ <div id='userMostHours' style='text-align:center'></div>\ </div>\ </div>\ </div>\ <div class='col-sm-6 col-md-4'>\ <div class='panel panel-default'>\ <div class='panel-heading'><h3 class='panel-title'>Task with most hours logged</h3></div>\ <div class='panel-body'>\ <div id='taskMostHours' style='text-align:center'></div>\ </div>\ </div>\ </div>\ </div>\ <div class='panel panel-default'>\ <div class='panel-heading'><h3 class='panel-title'>Calendar heatmap</h3></div>\ <div class='panel-body' style='overflow:hidden;'>\ <div id='calendar' style='margin:auto; width:920px;'></div>\ </div>\ </div>\ <div class='panel panel-default'>\ <div class='panel-heading'><h3 class='panel-title'>Hours logged per day</h3></div>\ <div class='panel-body' style='overflow:hidden;'>\ <div id='areachart' style=''></div>\ </div>\ </div>\ <div class='break'></div>\ <div class='panel panel-default'>\ <div class='panel-heading'><h3 class='panel-title'>Timeline</h3></div>\ <div class='panel-body'>\ <div id='timeline' style='height:100%'></div>\ </div>\ </div>\ </body>\ </html>" filename = "timelogs-{0}.html".format(str(uuid.uuid1())) html_file= open(filename,"w") html_file.write(html.encode("utf-8")) html_file.close() job.createAttachment(filename, fileName=filename) job.setDescription("Timelog report") job.setStatus('done') os.remove(filename) except: print 'Failed:' job.setDescription("Timelog report") job.setStatus('failed')