Exemple #1
0
 def render_DELETE(self, request):
     request.setHeader('Content-type', 'application/json')
     content = request.content.read()
     if content:
         del_uuids = json.loads(content)
         self.inst.jobs.jobs = filter(lambda j: j.uuid not in del_uuids, self.inst.jobs.jobs)
         self.inst.jobs.cancel_job(del_uuids)
     return json.dumps(map(lambda j: j.uuid, self.inst.jobs.jobs))
Exemple #2
0
 def render_PUT(self, request):
     request.setHeader('Content-type', 'application/json')
     content = request.content.read()
     if content:
         obj = json.loads(content)
         uids = self.add_jobs(obj)
         return json.dumps(uids)
     else:
         return None
Exemple #3
0
 def render_DELETE(self, request):
     request.setHeader('Content-type', 'application/json')
     content = request.content.read()
     if content:
         del_uuids = json.loads(content)
         assert isinstance(del_uuids, (list, tuple))
         self.inst.jobs.jobs = filter(lambda x: x['uuid'] not in del_uuids, self.inst.jobs.jobs)
         self.inst.jobs.cancel(del_uuids)
     return json.dumps(self.inst.jobs.jobs)
Exemple #4
0
 def render_meta(self):
     tmpl = self.get_template()
     if 'meta' in tmpl.blocks:
         lines = []
         for line in tmpl.blocks['meta'](tmpl.new_context()):
             lines.append(line.strip())
         text = '\n'.join(lines)
         settings = sjson.loads(text)
         self.settings.update(settings)
         self.patch()
     if 'post_content' in tmpl.blocks:
         lines = []
         for line in tmpl.blocks['post_content'](tmpl.new_context()):
             lines.append(line)
         text = '\n'.join(lines)
         self.body = text
Exemple #5
0
 def _run(self, cases):
     for js, py in cases:
         r = sjson.loads(js)
         self.assertEquals(r, py)
 def _config(self, entry, config, caller):
     if 'render_meta' in self.environment.globals:
         settings = sjson.loads(config)
         entry.settings.update(settings)
     return ''
Exemple #7
0
def run_stat_parsing(options, stat_queue, result_queue):
    #signal.signal(signal.SIGINT, signal.SIG_IGN)

    try:
        agg_run_stats = {}
        best_runs = {}
        best_runs['best_error'] = 100000000.0
        best_runs['best_error_entry'] = None
        best_runs['best_ratio'] = 0.0
        best_runs['best_ratio_entry'] = None
        worst_runs = {}
        worst_runs['worst_error'] = -100000000.0
        worst_runs['worst_error_entry'] = None
        worst_runs['worst_ratio'] = 100000000.0
        worst_runs['worst_ratio_entry'] = None
        num_runs = 0
        total_compression_time = 0.0
        stats_summary_data = []
        stats_error_data = []

        while True:
            stat_filename = stat_queue.get()
            if stat_filename is None:
                break

            with open(stat_filename, 'r') as file:
                file_data = sjson.loads(file.read())
                runs = file_data['runs']
                for run_stats in runs:
                    run_stats['range_reduction'] = shorten_range_reduction(
                        run_stats['range_reduction'])
                    run_stats['filename'] = stat_filename
                    run_stats['clip_name'] = os.path.splitext(
                        os.path.basename(stat_filename))[0]

                    if 'segmenting' in run_stats:
                        run_stats['segmenting'][
                            'range_reduction'] = shorten_range_reduction(
                                run_stats['segmenting']['range_reduction'])
                        run_stats[
                            'desc'] = '{}, {}, Clip {}, Segment {}'.format(
                                run_stats['rotation_format'],
                                run_stats['translation_format'],
                                run_stats['range_reduction'],
                                run_stats['segmenting']['range_reduction'])
                        run_stats[
                            'csv_desc'] = '{} {} Clip {} Segment {}'.format(
                                run_stats['rotation_format'],
                                run_stats['translation_format'],
                                run_stats['range_reduction'],
                                run_stats['segmenting']['range_reduction'])
                    else:
                        run_stats['desc'] = '{}, {}, Clip {}'.format(
                            run_stats['rotation_format'],
                            run_stats['translation_format'],
                            run_stats['range_reduction'])
                        run_stats['csv_desc'] = '{} {} Clip {}'.format(
                            run_stats['rotation_format'],
                            run_stats['translation_format'],
                            run_stats['range_reduction'])

                    aggregate_stats(agg_run_stats, run_stats)
                    track_best_runs(best_runs, run_stats)
                    track_worst_runs(worst_runs, run_stats)

                    num_runs += 1
                    total_compression_time += run_stats['compression_time']

                    if options['csv_summary']:
                        #(name, raw_size, compressed_size, compression_ratio, compression_time, duration, num_animated_tracks, max_error)
                        num_animated_tracks = run_stats.get(
                            'num_animated_tracks', 0)
                        data = (run_stats['csv_desc'], run_stats['raw_size'],
                                run_stats['compressed_size'],
                                run_stats['compression_ratio'],
                                run_stats['compression_time'],
                                run_stats['duration'], num_animated_tracks,
                                run_stats['max_error'])
                        stats_summary_data.append(data)

                    if 'segments' in run_stats and len(
                            run_stats['segments']) > 0:
                        segment_index = 0
                        for segment in run_stats['segments']:
                            if 'error_per_frame_and_bone' in segment and len(
                                    segment['error_per_frame_and_bone']) > 0:
                                # Convert to array https://docs.python.org/3/library/array.html
                                # Lower memory footprint and more efficient
                                # Drop the data if we don't write the csv files, otherwise aggregate it
                                if options['csv_error']:
                                    #(name, segment_index, data)
                                    data = (
                                        run_stats['clip_name'], segment_index,
                                        segment['error_per_frame_and_bone'])
                                    stats_error_data.append(data)

                                # Data isn't needed anymore, discard it
                                segment['error_per_frame_and_bone'] = []

                            segment_index += 1

                result_queue.put(('progress', stat_filename))

        # Done
        results = {}
        results['agg_run_stats'] = agg_run_stats
        results['best_runs'] = best_runs
        results['worst_runs'] = worst_runs
        results['num_runs'] = num_runs
        results['total_compression_time'] = total_compression_time
        results['stats_summary_data'] = stats_summary_data
        results['stats_error_data'] = stats_error_data
        result_queue.put(('done', results))
    except KeyboardInterrupt:
        print('Interrupted')
Exemple #8
0
    if len(sys.argv) != 2:
        print(
            'Usage: python gen_full_error_stats.py <path/to/input_file.sjson>')
        sys.exit(1)

    input_sjson_file = sys.argv[1]
    if not input_sjson_file.endswith('.sjson'):
        print('Expected SJSON input file, found: {}'.format(input_sjson_file))
        sys.exit(1)

    if not os.path.exists(input_sjson_file):
        print('Input file not found: {}'.format(input_sjson_file))
        sys.exit(1)

    with open(input_sjson_file, 'r') as file:
        input_sjson_data = sjson.loads(file.read())

    input_data_type_def = {
        'names': ('clip_names', 'errors'),
        'formats': ('S128', 'f4')
    }
    columns_to_extract = (0, 3)

    output_csv_file_path = 'D:\\acl-dev\\tools\\graph_generation\\full_errors.csv'
    output_csv_file_path_top10 = 'D:\\acl-dev\\tools\\graph_generation\\full_errors_top10.csv'

    desired_percentiles = [x * 0.1 for x in range(0, 1001)]
    desired_percentiles_top10 = [90.0 + (x * 0.01) for x in range(0, 1001)]

    output_csv_data = []
    output_csv_data_top10 = []