Пример #1
0
    def run(self):
        """Runs subprocess with Popen.

        This method must not be called directly. Use blocking :py:meth:`~dlbs.Worker.work`
        method instead.
        """
        try:
            # Dump parameters to a log file or to standard output
            DictUtils.ensure_exists(self.params, 'exp.log_file', default_value='')
            if self.params['exp.log_file'].strip() == '':
                self.params['exp.log_file'] = '/dev/stdout'
            IOUtils.mkdirf(self.params['exp.log_file'])
            with open(self.params['exp.log_file'], 'a+') as log_file:
                self.__dump_parameters(log_file)
            # This is where we launch process. Keep in mind, that the log file that's
            # supposed to be created is exp.log_file or exp_log_file in the script.
            # Other output of the launching script will be printed by this pyhton code
            # to a stanard output.
            self.process = subprocess.Popen(self.command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=self.environ)
            while True:
                output = self.process.stdout.readline()
                if output == '' and self.process.poll() is not None:
                    break
                if output:
                    sys.stdout.write(output)
                    sys.stdout.flush()
            self.ret_code = self.process.poll()
        except Exception as err:
            logging.warn('Exception has been caught for experiment %s: %s', self.params.get('exp.id'), str(err))
            logging.warn(traceback.format_exc())
            self.ret_code = -1
Пример #2
0
 def build_tensorflow_synsets(imagenet_dir, synset_file):
     """Builds a textual file with one synset on a line"""
     IOUtils.mkdirf(synset_file)
     labels = ImageNetTools.get_labels()
     with open(synset_file, 'w') as fobj:
         for label in labels:
             fobj.write("%s\n" % label)
Пример #3
0
 def build_tensorflow_human_labels(imagenet_dir, human_labels_file):
     """Builds a textual file with one synset on a line"""
     IOUtils.mkdirf(human_labels_file)
     labels = ImageNetTools.get_labels()
     with open(human_labels_file, 'w') as fobj:
         for label in labels:
             fobj.write("%s\t%s\n" % (label, labels[label]['human_labels']))
Пример #4
0
 def build_mxnet_labels(imagenet_dir, labels_file):
     """Generates a textual file with the following content:
        0   45  n02093256/n02093256_3032.JPEG
        1   45  n02093256/n02093256_3353.JPEG
        ...
        image_index   image_class_label   image_path
     """
     IOUtils.mkdirf(labels_file)
     img_files = ImageNetTools.get_image_files(imagenet_dir)
     labels = ImageNetTools.get_labels()
     with open(labels_file, 'w') as fobj:
         for img_index, img_file in enumerate(img_files):
             synset, fname, finfo = ImageNetTools.get_file_info(img_file, labels)
             fobj.write("%d\t%d\t%s/%s\n" % (img_index, finfo['label'], synset, fname))
Пример #5
0
 def build_caffe_labels(imagenet_dir, labels_file):
     """Generates a textual file with the following content:
        img_0000.jpeg 1
        img_0001.jpeg 0
        ...
        mapping image file name to its class label
     """
     IOUtils.mkdirf(labels_file)
     img_files = ImageNetTools.get_image_files(imagenet_dir)
     labels = ImageNetTools.get_labels()
     with open(labels_file, 'w') as fobj:
         for img_file in img_files:
             synset, fname, finfo = ImageNetTools.get_file_info(img_file, labels)
             fobj.write("%s/%s %d\n" % (synset, fname, finfo['label']))
Пример #6
0
def main():
    """Does all log parsing work."""
    opts = parse_args()

    files = IOUtils.gather_files(opts['inputs'], "*.log", opts['recursive'])
    succeeded, failed = LogParser.parse_log_files(files, opts)

    def _dump_data(file_name, opts, data):
        with gzip.open(file_name, 'wb') if opts['_gz'] is True else open(file_name, 'w') as file_obj:
            json.dump({'data': data}, file_obj, indent=4)

    if opts['output_file'] is None:
        json.dump(succeeded, sys.stdout, indent=4, sort_keys=True)
        print ("")
    else:
        IOUtils.mkdirf(opts['output_file'])
        output_files = []
        if len(failed) > 0:
            _dump_data(opts['_failed_file'], opts, failed)
            output_files.append(opts['_failed_file'])

        num_benchmarks = len(succeeded)
        if opts['num_output_files'] is not None:
            opts['benchmarks_per_file'] = int(math.ceil(float(num_benchmarks) / opts['num_output_files']))

        if opts['benchmarks_per_file'] is not None:
            file_index = 0
            while True:
                start_index = file_index * opts['benchmarks_per_file']
                end_index = min(start_index + opts['benchmarks_per_file'], num_benchmarks)
                file_name = IOUtils.get_non_existing_file(
                    "%s_%d.%s" % (opts['_output_file_without_ext'], file_index, opts['_ext'])
                )
                _dump_data(
                    file_name,
                    opts,
                    succeeded[start_index:end_index]
                )
                output_files.append(file_name)
                if end_index >= num_benchmarks:
                    break
                file_index += 1
        else:
            _dump_data(opts['output_file'], opts, succeeded)
            output_files.append(opts['output_file'])
        print("Log parser summary.")
        print("Following files have been created:")
        json.dump(output_files, sys.stdout, indent=4, sort_keys=True)
        print ("")