コード例 #1
0
def GetIncompleteFrames(job):
    # second argument is invalidate. not sure if it's necessary
    tasks = RepositoryUtils.GetJobTasks(job, False).TaskCollectionTasks
    incomplete = []
    for task in tasks:
        if task.TaskStatus != 'Done':
            incomplete.extend(list(task.TaskFrameList))
    return incomplete
コード例 #2
0
def __main__():
    loglist = []
    for job in (tuple(RepositoryUtils.GetJobs(True)) +
                tuple(RepositoryUtils.GetDeletedJobs())):
        stats = JobUtils.CalculateJobStatistics(
            job, RepositoryUtils.GetJobTasks(job, True))
        loglist.append('\t'.join((
            job.JobId,
            job.JobUserName,
            datetime(
                job.JobCompletedDateTime.Year,
                job.JobCompletedDateTime.Month,
                job.JobCompletedDateTime.Day,
                job.JobCompletedDateTime.Hour,
                job.JobCompletedDateTime.Minute,
                job.JobCompletedDateTime.Second,
                job.JobCompletedDateTime.Ticks % 10**7 // 10,
            ).isoformat(),
            str(stats.TotalTaskRenderTime.Ticks),
        )) + '\n')
    with open('log_api.csv', 'w') as output:
        output.writelines(loglist)
    return None
コード例 #3
0
def process(frames):

    frame_nums = parse_frames(frames)
    if not frame_nums:
        scriptDialog.ShowMessageBox("Empty Frame List!", "Error")
        return

    jobIds = MonitorUtils.GetSelectedJobIds()

    for jobId in jobIds:
        job = RepositoryUtils.GetJob(jobId, True)
        tasks = RepositoryUtils.GetJobTasks(job, True)

        resume = list()
        for task in tasks:
            if task.TaskStatus != "Suspended":
                continue
            task_frames = set(task.TaskFrameList)
            if frame_nums.intersection(task_frames):
                resume.append(task)

        RepositoryUtils.ResumeTasks(job, resume)

    return True
コード例 #4
0
ファイル: stat_gather.py プロジェクト: M2ools/deadline_stat
    def OnJobFinished(self, job):
        print '=============== stat_gathering start ==============='

        # check plugin. If not Maya, ignore
        if 'maya' in job.JobPlugin.lower():
            print 'This is a Maya job.'

            # prepare data to store
            info = job.JobName.split('_') if '_' in job.JobName else []

            if len(info) >= 4:
                print 'Enough tokens in job name. Start gathering stats.'
                projects = util.read_json('project.json')

                if info[0] not in projects['projects']:
                    print 'Not in collecting projects. Skipping.'

                else:
                    jobname = job.JobName
                    jobid = job.JobId
                    project = info[0]
                    episode = info[1]
                    shot = '{}_{}'.format(info[2], info[3])
                    shot_unique = '{}_{}_{}'.format(project, episode, shot)
                    renderlayer = job.JobExtraInfo8
                    # convert version to int
                    version = job.JobExtraInfo9
                    version = int(version.replace('v', '') if version else 0)

                    stats = JobUtils.CalculateJobStatistics(
                        job, RepositoryUtils.GetJobTasks(job, True))

                    rendertime = to_sec(stats.AverageFrameRenderTimeAsString)
                    errortime = to_sec(stats.WastedErrorTimeAsString)
                    peakram = to_gb(stats.PeakRamUsage)
                    framecount = stats.FrameCount
                    errorcount = stats.ErrorReports

                    print 'JobId = {}'.format(jobid)
                    print 'project = {}'.format(project)
                    print 'episode = {}'.format(episode)
                    print 'shot = {}'.format(shot)
                    print 'version = {}'.format(version)
                    print 'renderlayer = {}'.format(renderlayer)

                    print 'WastedErrorTime in seconds = {}'.format(errortime)
                    print 'rendertime in seconds = {}'.format(rendertime)
                    print 'PeakRamUsage = {} Gb'.format(peakram)
                    print 'FrameCount = {}'.format(framecount)
                    print 'ErrorReports (error count) = {}'.format(errorcount)
                    print ''

                    tokens = pgdb.get_connection_tokens()
                    try:
                        print 'Connecting to database @{}'.format(
                            tokens['host'])
                        conn = pgdb.init_connection(**tokens)
                        print 'Connection success.'

                        curr = conn.cursor()

                        print 'Insert project'
                        curr.execute(
                            """
                            INSERT INTO project (nameshort) VALUES (%s)
                            ON CONFLICT (nameshort) DO NOTHING""", (project, ))
                        conn.commit()

                        print 'Insert episode'
                        curr.execute(
                            """
                            INSERT INTO episode (nameshort, project_id)
                            VALUES (
                                %s,
                                (SELECT project_id FROM project WHERE nameshort=%s))
                            ON CONFLICT (nameshort) DO NOTHING""",
                            (episode, project))
                        conn.commit()

                        print 'Insert shot'
                        curr.execute(
                            """
                            INSERT INTO shot (nameshort, nameunique, episode_id)
                            VALUES (
                                %s, %s,
                                (SELECT episode_id FROM episode WHERE nameshort=%s))
                            ON CONFLICT (nameunique) DO NOTHING""",
                            (shot, shot_unique, episode))
                        conn.commit()

                        print 'Insert render job.'
                        arg = {
                            'job_id': jobid,
                            'version': version,
                            'renderlayer': renderlayer,
                            'errortime': errortime,
                            'errorcount': errorcount,
                            'rendertime': rendertime,
                            'peakram': peakram,
                            'framecount': framecount,
                            'shot_unique': shot_unique
                        }
                        curr.execute(
                            """
                            INSERT INTO shot_render_stat
                            (job_id, version, renderlayer, errortime, errorcount, rendertime, peakram, framecount, shot_id)
                            VALUES (
                                %(job_id)s, %(version)s, %(renderlayer)s, %(errortime)s, %(errorcount)s,
                                %(rendertime)s, %(peakram)s, %(framecount)s, 
                                (SELECT shot_id FROM shot WHERE nameunique=%(shot_unique)s))
                            ON CONFLICT (job_id) DO UPDATE SET
                                errortime = %(errortime)s, errorcount = %(errorcount)s,
                                rendertime = %(rendertime)s, peakram = %(peakram)s, timestamp = now()""",
                            arg)
                        conn.commit()

                        curr.close()
                        conn.close()
                    except:
                        print 'Database processing error.'

            else:
                print 'not enough tokens in job name. Abort.'
        else:
            print 'Not a Maya job. Abort.'

        print '=============== stat_gather end ================='
            data = suph.replace(',', ' ').split()
            if 1 < len(data):
                suph_slave[data[0]] = float(data[1])

    with open(fldr + 'ch_group.txt') as input_ch:
        ch_group = {
            group.strip().lower() for group in input_ch if group.strip()}

    all_group = set()
    all_slave = set()
    all_job = []
    report_users = {}
    for job in (tuple(RepositoryUtils.GetJobs(True)) +
                tuple(RepositoryUtils.GetDeletedJobs())):
        all_group.add(job.JobGroup.lower())
        task_job = RepositoryUtils.GetJobTasks(job, True)
        ticks = 0.0
        for task in task_job:
            slave = task.TaskSlaveName.lower()
            all_slave.add(slave)
            if slave in suph_slave:
                suph = suph_slave[slave]
            else:
                suph = core
            if 0 < task.TaskRenderTime.Ticks < ticks_null:
                ticks += task.TaskRenderTime.Ticks * suph
        su = round(ticks // 360000 / 100000, 8)
        stats = JobUtils.CalculateJobStatistics(job, task_job)
        ch = round(
            stats.TotalTaskRenderTime.Ticks * core // 360000 / 100000, 8)
        ch = max(ch, 0.0)