예제 #1
0
파일: integrate.py 프로젝트: kif/pyFAI
    def prepare_write(self, data_info, engine):
        if data_info.source_filename:
            output_name = os.path.splitext(data_info.source_filename)[0]
        else:
            output_name = "array_%d" % data_info.data_id

        if self._is_2d:
            extension = ".azim"
        else:
            extension = ".dat"

        if data_info.frame_id is not None:
            output_name = "%s_%04d" % (output_name, data_info.frame_id)

        output_name = "%s%s" % (output_name, extension)

        if self._output_path:
            if os.path.isdir(self._output_path):
                basename = os.path.basename(output_name)
                outpath = os.path.join(self._output_path, basename)
            else:
                outpath = os.path.abspath(self._output_path)
        else:
            outpath = output_name

        if os.path.exists(outpath):
            if self._mode == HDF5Writer.MODE_DELETE:
                os.unlink(outpath)
        self._writer = DefaultAiWriter(outpath, engine)
        self._writer.init(fai_cfg=self._fai_cfg, lima_cfg=self._lima_cfg)
예제 #2
0
파일: integrate.py 프로젝트: kif/pyFAI
    def prepare_write(self, data_info, engine):
        if data_info.source_filename:
            output_name = os.path.splitext(data_info.source_filename)[0]
        else:
            output_name = "array_%d" % data_info.data_id

        if self._is_2d:
            extension = ".azim"
        else:
            extension = ".dat"

        if data_info.frame_id is not None:
            output_name = "%s_%04d" % (output_name, data_info.frame_id)

        output_name = "%s%s" % (output_name, extension)

        if self._output_path:
            if os.path.isdir(self._output_path):
                basename = os.path.basename(output_name)
                outpath = os.path.join(self._output_path, basename)
            else:
                outpath = os.path.abspath(self._output_path)
        else:
            outpath = output_name

        if os.path.exists(outpath):
            if self._mode == HDF5Writer.MODE_DELETE:
                os.unlink(outpath)
        self._writer = DefaultAiWriter(outpath, engine)
        self._writer.init(fai_cfg=self._fai_cfg, lima_cfg=self._lima_cfg)
예제 #3
0
 def create_file_header(self):
     try:
         # pyFAI version 0.12.0
         return self.pattern_geometry.makeHeaders(polarization_factor=self.polarization_factor)
     except AttributeError:
         # pyFAI after version 0.12.0
         from pyFAI.io import DefaultAiWriter
         return DefaultAiWriter(None, self.pattern_geometry).make_headers()
예제 #4
0
파일: integrate.py 프로젝트: kif/pyFAI
class MultiFileWriter(pyFAI.io.Writer):
    """Broadcast writing to differnet files for each frames"""

    def __init__(self, output_path, mode=HDF5Writer.MODE_ERROR):
        super(MultiFileWriter, self).__init__()
        if mode in [HDF5Writer.MODE_OVERWRITE, HDF5Writer.MODE_APPEND]:
            raise ValueError("Mode %s unsupported" % mode)
        self._writer = None
        self._output_path = output_path
        self._mode = mode

    def init(self, fai_cfg=None, lima_cfg=None):
        self._fai_cfg = fai_cfg
        self._lima_cfg = lima_cfg
        self._is_2d = self._fai_cfg.get("do_2D", False) is True

    def prepare_write(self, data_info, engine):
        if data_info.source_filename:
            output_name = os.path.splitext(data_info.source_filename)[0]
        else:
            output_name = "array_%d" % data_info.data_id

        if self._is_2d:
            extension = ".azim"
        else:
            extension = ".dat"

        if data_info.frame_id is not None:
            output_name = "%s_%04d" % (output_name, data_info.frame_id)

        output_name = "%s%s" % (output_name, extension)

        if self._output_path:
            if os.path.isdir(self._output_path):
                basename = os.path.basename(output_name)
                outpath = os.path.join(self._output_path, basename)
            else:
                outpath = os.path.abspath(self._output_path)
        else:
            outpath = output_name

        if os.path.exists(outpath):
            if self._mode == HDF5Writer.MODE_DELETE:
                os.unlink(outpath)
        self._writer = DefaultAiWriter(outpath, engine)
        self._writer.init(fai_cfg=self._fai_cfg, lima_cfg=self._lima_cfg)

    def write(self, data):
        self._writer.write(data)
        self._writer.close()
        self._writer = None

    def close(self):
        pass
예제 #5
0
파일: integrate.py 프로젝트: kif/pyFAI
class MultiFileWriter(pyFAI.io.Writer):
    """Broadcast writing to differnet files for each frames"""
    def __init__(self, output_path, mode=HDF5Writer.MODE_ERROR):
        super(MultiFileWriter, self).__init__()
        if mode in [HDF5Writer.MODE_OVERWRITE, HDF5Writer.MODE_APPEND]:
            raise ValueError("Mode %s unsupported" % mode)
        self._writer = None
        self._output_path = output_path
        self._mode = mode

    def init(self, fai_cfg=None, lima_cfg=None):
        self._fai_cfg = fai_cfg
        self._lima_cfg = lima_cfg
        self._is_2d = self._fai_cfg.get("do_2D", False) is True

    def prepare_write(self, data_info, engine):
        if data_info.source_filename:
            output_name = os.path.splitext(data_info.source_filename)[0]
        else:
            output_name = "array_%d" % data_info.data_id

        if self._is_2d:
            extension = ".azim"
        else:
            extension = ".dat"

        if data_info.frame_id is not None:
            output_name = "%s_%04d" % (output_name, data_info.frame_id)

        output_name = "%s%s" % (output_name, extension)

        if self._output_path:
            if os.path.isdir(self._output_path):
                basename = os.path.basename(output_name)
                outpath = os.path.join(self._output_path, basename)
            else:
                outpath = os.path.abspath(self._output_path)
        else:
            outpath = output_name

        if os.path.exists(outpath):
            if self._mode == HDF5Writer.MODE_DELETE:
                os.unlink(outpath)
        self._writer = DefaultAiWriter(outpath, engine)
        self._writer.init(fai_cfg=self._fai_cfg, lima_cfg=self._lima_cfg)

    def write(self, data):
        self._writer.write(data)
        self._writer.close()
        self._writer = None

    def close(self):
        pass
예제 #6
0
파일: integrate.py 프로젝트: vallsv/pyFAI
def integrate_shell(options, args):
    import json
    config = json.load(open(options.json))

    ai = pyFAI.worker.make_ai(config)
    worker = pyFAI.worker.Worker(azimuthalIntegrator=ai)
    # TODO this will init again the azimuthal integrator, there is a problem on the architecture
    worker.setJsonConfig(options.json)
    worker.safe = False  # all processing are expected to be the same.
    start_time = time.time()

    # Skip unexisting files
    image_filenames = []
    for item in args:
        if os.path.exists(item) and os.path.isfile(item):
            image_filenames.append(item)
        else:
            logger.warning("File %s do not exists. Ignored." % item)
    image_filenames = sorted(image_filenames)

    progress_bar = ProgressBar("Integration", len(image_filenames), 20)

    # Integrate files one by one
    for i, item in enumerate(image_filenames):
        logger.debug("Processing %s" % item)

        if len(item) > 100:
            message = os.path.basename(item)
        else:
            message = item
        progress_bar.update(i + 1, message=message)

        img = fabio.open(item)
        multiframe = img.nframes > 1

        custom_ext = True
        if options.output:
            if os.path.isdir(options.output):
                outpath = os.path.join(options.output, os.path.splitext(os.path.basename(item))[0])
            else:
                outpath = os.path.abspath(options.output)
                custom_ext = False
        else:
            outpath = os.path.splitext(item)[0]

        if custom_ext:
            if multiframe:
                outpath = outpath + "_pyFAI.h5"
            else:
                if worker.do_2D():
                    outpath = outpath + ".azim"
                else:
                    outpath = outpath + ".dat"
        if multiframe:
            writer = HDF5Writer(outpath)
            writer.init(config)

            for i in range(img.nframes):
                fimg = img.getframe(i)
                normalization_factor = get_monitor_value(fimg, options.monitor_key)
                data = img.data
                res = worker.process(data=data,
                                     metadata=fimg.header,
                                     normalization_factor=normalization_factor)
                if not worker.do_2D():
                    res = res.T[1]
                writer.write(res, index=i)
            writer.close()
        else:
            normalization_factor = get_monitor_value(img, options.monitor_key)
            data = img.data
            writer = DefaultAiWriter(outpath, worker.ai)
            worker.process(data,
                           normalization_factor=normalization_factor,
                           writer=writer)
            writer.close()

    progress_bar.clear()
    logger.info("Processing done in %.3fs !" % (time.time() - start_time))
예제 #7
0
파일: integrate.py 프로젝트: picca/pyFAI
def process(input_data, output, config, monitor_name, observer):
    """
    Integrate a set of data.

    :param List[str] input_data: List of input filenames
    :param str output: Filename of directory output
    :param dict config: Dictionary to configure `pyFAI.worker.Worker`
    :param IntegrationObserver observer: Observer of the processing
    :param:
    """
    worker = pyFAI.worker.Worker()
    worker_config = config.copy()

    json_monitor_name = worker_config.pop("monitor_name", None)
    if monitor_name is None:
        monitor_name = json_monitor_name
    elif json_monitor_name is not None:
        logger.warning("Monitor name from command line argument override the one from the configuration file.")
    worker.set_config(worker_config, consume_keys=True)
    worker.output = "raw"

    # Check unused keys
    for key in worker_config.keys():
        # FIXME this should be read also
        if key in ["application", "version"]:
            continue
        logger.warning("Configuration key '%s' from json is unused", key)

    worker.safe = False  # all processing are expected to be the same.
    start_time = time.time()

    if observer is not None:
        observer.worker_initialized(worker)

    # Skip invalide data
    valid_data = []
    for item in input_data:
        if isinstance(item, six.string_types):
            if os.path.isfile(item):
                valid_data.append(item)
            else:
                if "::" in item:
                    try:
                        fabio.open(item)
                        valid_data.append(item)
                    except Exception:
                        logger.warning("File %s do not exists. File ignored.", item)
                else:
                    logger.warning("File %s do not exists. File ignored.", item)
        elif isinstance(item, fabio.fabioimage.FabioImage):
            valid_data.append(item)
        elif isinstance(item, numpy.ndarray):
            valid_data.append(item)
        else:
            logger.warning("Type %s unsopported. Data ignored.", item)

    if observer is not None:
        observer.processing_started(len(valid_data))

    # Integrate files one by one
    for iitem, item in enumerate(valid_data):
        logger.debug("Processing %s", item)

        # TODO rework it as source
        if isinstance(item, six.string_types):
            kind = "filename"
            fabio_image = fabio.open(item)
            filename = fabio_image.filename
            multiframe = fabio_image.nframes > 1
        elif isinstance(item, fabio.fabioimage.FabioImage):
            kind = "fabio-image"
            fabio_image = item
            multiframe = fabio_image.nframes > 1
            filename = fabio_image.filename
        elif isinstance(item, numpy.ndarray):
            kind = "numpy-array"
            filename = None
            fabio_image = None
            multiframe = False

        if observer is not None:
            observer.processing_data(iitem + 1, filename=filename)

        if filename:
            output_name = os.path.splitext(filename)[0]
        else:
            output_name = "array_%d" % iitem

        if multiframe:
            extension = "_pyFAI.h5"
        else:
            if worker.do_2D():
                extension = ".azim"
            else:
                extension = ".dat"
        output_name = "%s%s" % (output_name, extension)

        if output:
            if os.path.isdir(output):
                basename = os.path.basename(output_name)
                outpath = os.path.join(output, basename)
            else:
                outpath = os.path.abspath(output)
        else:
            outpath = output_name

        if fabio_image is None:
            if item.ndim == 3:
                writer = HDF5Writer(outpath)
                writer.init(fai_cfg=config)
                for iframe, data in enumerate(item):
                    result = worker.process(data=data,
                                            writer=writer)
                    if observer is not None:
                        if observer.is_interruption_requested():
                            break
                        observer.data_result(iitem, result)
            else:
                data = item
                writer = DefaultAiWriter(outpath, worker.ai)
                result = worker.process(data=data,
                                        writer=writer)
                if observer is not None:
                    observer.data_result(iitem, result)
        else:
            if multiframe:
                writer = HDF5Writer(outpath, append_frames=True)
                writer.init(fai_cfg=config)

                for iframe in range(fabio_image.nframes):
                    fimg = fabio_image.getframe(iframe)
                    normalization_factor = get_monitor_value(fimg, monitor_name)
                    data = fimg.data
                    result = worker.process(data=data,
                                            metadata=fimg.header,
                                            normalization_factor=normalization_factor,
                                            writer=writer)
                    if observer is not None:
                        if observer.is_interruption_requested():
                            break
                        observer.data_result(iitem, result)
                writer.close()
            else:
                writer = DefaultAiWriter(outpath, worker.ai)

                normalization_factor = get_monitor_value(fabio_image, monitor_name)
                data = fabio_image.data
                result = worker.process(data,
                                        normalization_factor=normalization_factor,
                                        writer=writer)
                if observer is not None:
                    observer.data_result(iitem, result)
                writer.close()

        if observer is not None:
            if observer.is_interruption_requested():
                break

    if observer is not None:
        if observer.is_interruption_requested():
            logger.info("Processing was aborted")
            observer.processing_interrupted()
        else:
            observer.processing_succeeded()
        observer.processing_finished()
    logger.info("Processing done in %.3fs !", (time.time() - start_time))
    return 0
예제 #8
0
def integrate_shell(options, args):
    import json
    with open(options.json) as f:
        config = json.load(f)

    ai = pyFAI.worker.make_ai(config)
    worker = pyFAI.worker.Worker(azimuthalIntegrator=ai)
    # TODO this will init again the azimuthal integrator, there is a problem on the architecture
    worker.setJsonConfig(options.json)
    worker.safe = False  # all processing are expected to be the same.
    start_time = time.time()

    # Skip unexisting files
    image_filenames = []
    for item in args:
        if os.path.exists(item) and os.path.isfile(item):
            image_filenames.append(item)
        else:
            logger.warning("File %s do not exists. Ignored.", item)
    image_filenames = sorted(image_filenames)

    progress_bar = ProgressBar("Integration", len(image_filenames), 20)

    # Integrate files one by one
    for i, item in enumerate(image_filenames):
        logger.debug("Processing %s", item)

        if len(item) > 100:
            message = os.path.basename(item)
        else:
            message = item
        progress_bar.update(i + 1, message=message)

        img = fabio.open(item)
        multiframe = img.nframes > 1

        custom_ext = True
        if options.output:
            if os.path.isdir(options.output):
                outpath = os.path.join(
                    options.output,
                    os.path.splitext(os.path.basename(item))[0])
            else:
                outpath = os.path.abspath(options.output)
                custom_ext = False
        else:
            outpath = os.path.splitext(item)[0]

        if custom_ext:
            if multiframe:
                outpath = outpath + "_pyFAI.h5"
            else:
                if worker.do_2D():
                    outpath = outpath + ".azim"
                else:
                    outpath = outpath + ".dat"
        if multiframe:
            writer = HDF5Writer(outpath)
            writer.init(config)

            for i in range(img.nframes):
                fimg = img.getframe(i)
                normalization_factor = get_monitor_value(
                    fimg, options.monitor_key)
                data = img.data
                res = worker.process(data=data,
                                     metadata=fimg.header,
                                     normalization_factor=normalization_factor)
                if not worker.do_2D():
                    res = res.T[1]
                writer.write(res, index=i)
            writer.close()
        else:
            normalization_factor = get_monitor_value(img, options.monitor_key)
            data = img.data
            writer = DefaultAiWriter(outpath, worker.ai)
            worker.process(data,
                           normalization_factor=normalization_factor,
                           writer=writer)
            writer.close()

    progress_bar.clear()
    logger.info("Processing done in %.3fs !", (time.time() - start_time))
    return 0