Пример #1
0
    def __init__(self, target_type="Kodak", image=None, scale_factor=1.0):

        global GRAYSCALES
        self.grayscale_type = target_type

        for k in GRAYSCALES[target_type]:

            setattr(self, "_grayscale_{0}".format(k),
                    target_type in grayscale.GRAYSCALE_SCALABLE and
                    GRAYSCALES[target_type][k] * scale_factor or
                    GRAYSCALES[target_type][k])

            #print "Set self._grayscale_{0}".format(k)

        self._logger = logger.Logger("Analyse Grayscale")
        self._img = image
        #np.save("tmp_img.npy", image)

        #Variables from analysis
        self._grayscale_pos = None
        self._grayscaleSource = None
        self._grayscale_X = None
        self._sectionAreaSlices = []

        if image is not None:

            self._grayscale_pos, self._grayscaleSource = self.get_grayscale()

            self._grayscale_X = self.get_grayscale_X(self._grayscale_pos)

        else:
            self._logger.warning("No analysis run yet")
 def __init__(self, *args, **kwargs):
     self.logger = logger.Logger("RPC Server")
     self.logger.info("Starting server with {0} and {1}".format(args, kwargs))
     self._server = SimpleXMLRPCServer(*args, **kwargs)
     self._keepAlive = True
     self._running = False
     self._started = False
Пример #3
0
    def __init__(self, job, parent_pipe):

        self._job = job
        self._parent_pipe = pipes.ParentPipeEffector(parent_pipe)
        self._logger = logger.Logger("Fake Process {0}".format(job.id))
        self._logger.info("Running ({0}) with pid {1}".format(
            self.is_alive(), job.pid))

        self.abandoned = False
Пример #4
0
    def __init__(self, job, job_effector, parent_pipe, child_pipe):

        super(RpcJob, self).__init__()
        self._job = job
        self._job_effector = job_effector
        self._parent_pipe = pipes.ParentPipeEffector(parent_pipe)
        self._childPipe = child_pipe
        self._logger = logger.Logger("Job {0} Process".format(job.id))
        self.abandoned = False
Пример #5
0
 def __init__(self, path):
     self._logger = logger.Logger("MetaDataLoader")
     self._path = path
     self._sheet = -1
     self._entries = []
     self._row_iterators = []
     self._columns = []
     self._sheet_names = []
     self._headers = []
     """:type : [str]"""
Пример #6
0
    def run(self):
        def _communicator():

            while pipe_effector.keepAlive and job_running:
                pipe_effector.poll()
                sleep(0.07)

            _l.info("Will not recieve any more communications")

        job_running = True
        _l = logger.Logger("RPC Job {0} (proc-side)".format(self._job.id))

        pipe_effector = pipes.ChildPipeEffector(self._childPipe,
                                                self._job_effector(self._job))

        setproctitle.setproctitle("SoM {0}".format(
            pipe_effector.procEffector.TYPE.name))

        t = Thread(target=_communicator)
        t.start()

        _l.info("Communications thread started")

        effector_iterator = pipe_effector.procEffector

        _l.info("Starting main loop")

        while t.is_alive() and job_running:

            if pipe_effector.keepAlive:

                try:

                    effector_iterator.next()

                except StopIteration:

                    _l.info("Next returned stop iteration, job is done.")
                    job_running = False
                    # pipe_effector.keepAlive = False

                if t.is_alive():
                    pipe_effector.sendStatus(
                        pipe_effector.procEffector.status())
                sleep(0.05)

            else:
                _l.info("Job doesn't want to be kept alive")
                sleep(0.29)

        if t.is_alive():
            pipe_effector.sendStatus(pipe_effector.procEffector.status())
        t.join(timeout=1)
        _l.info("Job completed")
Пример #7
0
    def __init__(self,
                 file_path=None,
                 data=None,
                 meta_data=None,
                 scan_times=None):

        self._file_path = file_path
        self._loaded = (data is not None or meta_data is not None)
        self._data = data
        self._meta_data = meta_data
        self._scan_times = scan_times

        if file_path:
            self._logger = logger.Logger("XML-reader '{0}'".format(
                os.path.basename(file_path)))
            if not self.read():
                self._logger.error("XML Reader not fully initialized!")
            else:
                self._loaded = True
        else:
            self._logger = logger.Logger('XML-reader')
Пример #8
0
    def __one_init__(self, jobs):

        """

        :type jobs: scanomatic.server.jobs.Jobs
        """
        self._paths = paths.Paths()
        self._logger = logger.Logger("Job Queue")
        self._next_priority = rpc_job_models.JOB_TYPE.Scan
        self._queue = list(RPC_Job_Model_Factory.serializer.load(self._paths.rpc_queue))
        self._scanner_manager = ScannerPowerManager()
        self._jobs = jobs
        decorators.register_type_lock(self)
Пример #9
0
    def __one_init__(self):

        self._logger = logger.Logger("Jobs Handler")
        self._paths = paths.Paths()
        self._scanner_manager = scanner_manager.ScannerPowerManager()

        self._jobs = {}
        """:type : dict[scanomatic.models.rpc_job_models.RPCJobModel, scanomatic.server.rpcjob.RpcJob] """

        self._load_from_file()

        self._forcingStop = False
        self._statuses = []
Пример #10
0
 def locking_wrapper(self, *args, **kwargs):
     global _TYPE_LOCK
     object_type = type(self)
     _acquire(object_type)
     try:
         result = f(self, *args, **kwargs)
     except Exception as e:
         logger.Logger("Type Lock").critical(
             "Something failed attempting to call {0} with '{1}' as args and '{2}' as kwargs"
             .format(f, args, kwargs))
         _TYPE_LOCK[object_type].release()
         raise e
     _TYPE_LOCK[object_type].release()
     return result
Пример #11
0
    def __init__(self):

        config = app_config.Config()

        self.logger = logger.Logger("Server")
        self.admin = config.rpc_server.admin
        self._running = False
        self._started = False
        self._waitForJobsToTerminate = False
        self._server_start_time = None

        self._jobs = jobs.Jobs()
        self._queue = queue.Queue(self._jobs)

        self._scanner_manager = scanner_manager.ScannerPowerManager()
Пример #12
0
    def __init__(self, plate_shapes, *paths):

        self._logger = logger.Logger("MetaData")
        self._plate_shapes = plate_shapes
        self._data = tuple(None if shape is None else np.empty(shape, dtype=np.object) for shape in plate_shapes)
        self._headers = list(None for _ in plate_shapes)
        """:type self._headers: list[(int, int) | None]"""
        self._loading_plate = 0
        self._loading_offset = []
        self._paths = paths

        self._load(*paths)

        if not self.loaded:
            self._logger.warning("Not enough meta-data to fill all plates")
Пример #13
0
    def __one_init__(self):

        self._logger = logger.Logger("Scanner Manager")
        self._conf = app_config.Config()
        self._paths = paths.Paths()
        self._fixtures = fixtures.Fixtures()
        self._orphan_usbs = set()

        self._scanners = self._initiate_scanners()
        self._pm = None
        self._scanner_queue = []

        self._reported_sane_missing = STATE.Unknown

        Thread(target=self._load_pm).start()

        decorators.register_type_lock(self)
Пример #14
0
    def __init__(self, host, port, userID=None, log_level=None):

        self._logger = logger.Logger("Client Proxy")
        if log_level is not None:
            self._logger.level = log_level
        self._userID = userID
        self._adminMethods = ('communicateWith', 'createFeatureExtractJob',
                              'createAnalysisJob', 'removeFromQueue',
                              'reestablishMe', 'flushQueue', 'serverRestart',
                              'serverShutDown')

        self._client = None
        self._host = None
        self._port = None

        self.host = host
        self.port = port
Пример #15
0
class GridCellSizes(object):

    _LOGGER = logger.Logger("Grid Cell Sizes")

    _APPROXIMATE_GRID_CELL_SIZES = {
        (8, 12): (212, 212),
        (16, 24): (106, 106),
        (32, 48): (53.64928854, 52.69155633),
        (64, 96): (40.23696640, 39.5186672475),
    }

    @staticmethod
    def get(item):
        """

        :type item: tuple
        """
        if not isinstance(item, tuple):
            GridCellSizes._LOGGER.error(
                "Grid formats can only be tuples {0}".format(type(item)))
            return None

        approximate_size = None
        # noinspection PyTypeChecker
        reverse_slice = slice(None, None, -1)

        for rotation in IMAGE_ROTATIONS:

            if rotation is IMAGE_ROTATIONS.Unknown:
                continue

            elif item in GridCellSizes._APPROXIMATE_GRID_CELL_SIZES:
                approximate_size = GridCellSizes._APPROXIMATE_GRID_CELL_SIZES[
                    item]
                if rotation is IMAGE_ROTATIONS.Portrait:
                    approximate_size = approximate_size[reverse_slice]
                break
            else:
                item = item[reverse_slice]

        if not approximate_size:
            GridCellSizes._LOGGER.warning(
                "Unknown pinning format {0}".format(item))

        return approximate_size
Пример #16
0
    def __init__(self, sourceValues=None, targetValues=None, polyCoeffs=None):

        self._logger = logger.Logger("Image Transpose")
        self._source = sourceValues
        self._target = targetValues
        self._polyCoeffs = polyCoeffs

        if (self._polyCoeffs is None and self._target is not None
                and self._source is not None):

            try:
                self._polyCoeffs = np.polyfit(self._source, self._target, 3)
            except Exception, e:
                self._logger.critical(
                    "Could not produce polynomial from source " +
                    "{0} and target {1}".format(self._source, self._target))

                raise e
Пример #17
0
    def __init__(self, output_directory, xml_model):
        """

        :type xml_model: scanomatic.models.analysis_model.XMLModel
        """
        self._directory = output_directory
        self._formatting = xml_model
        self._logger = logger.Logger("XML writer")
        self._paths = Paths()

        self._outdata_full = os.sep.join((output_directory, "analysis.xml"))
        self._outdata_slim = os.sep.join(
            (output_directory, "analysis_slimmed.xml"))

        self._file_handles = {'full': None, 'slim': None}
        self._open_tags = list()

        self._initialized = self._open_outputs(file_mode='w')
Пример #18
0
    def __init__(self, job, logger_name="Process Effector"):
        """

        :type job: scanomatic.models.rpc_job_models.RPCjobModel
        :type logger_name: str
        :return:
        """

        self._job = job
        self._job_label = job.id
        self._logger = logger.Logger(logger_name)

        self._fail_vunerable_calls = tuple()

        self._specific_statuses = {}

        self._allowed_calls = {
            'pause': self.pause,
            'resume': self.resume,
            'setup': self.setup,
            'status': self.status,
            'email': self.email,
            'stop': self.stop
        }

        self._allow_start = False
        self._running = False
        self._started = False
        self._stopping = False
        self._paused = False

        self._messages = []

        self._iteration_index = None
        self._pid = os.getpid()
        self._pipe_effector = None
        self._start_time = None
        decorators.register_type_lock(self)
Пример #19
0
    def __init__(self, pipe, loggerName="Pipe effector"):

        self._logger = logger.Logger(loggerName)

        #The actual communications object
        self._pipe = pipe

        #Calls this side accepts
        self._allowedCalls = dict()

        #Calls that the other side will accept according to other side
        self._allowedRemoteCalls = None

        #Flag indicating if other side is missing
        self._hasContact = True

        #Sends that faild get stored here
        self._sendBuffer = []

        #Calls that should trigger special reaction if pipe is not working
        #Reaction will depend on if server or client side
        self._failVunerableCalls = []

        self._pid = os.getpid()
__author__ = 'martin'

import os
import glob
import re

import scanomatic.io.logger as logger
import scanomatic.io.paths as paths

_logger = logger.Logger("Legacy compatibility")


def patch_image_file_names_by_interval(path, interval=20.0):
    """

    :param path: Directory containing the images.
    :type path: str
    :param interval: Interval between images
    :type interval: float
    :return: None
    """

    pattern = re.compile(r"(.*)_\d{4}\.tiff")
    sanity_threshold = 3

    source_pattern = "{0}_{1}.tiff"
    target_pattern = paths.Paths().experiment_scan_image_pattern

    images = tuple(
        os.path.basename(i) for i in glob.glob(os.path.join(path, '*.tiff')))
Пример #21
0
import scanomatic.io.logger as logger
from scanomatic.models.factories.fixture_factories import FixtureFactory
from scanomatic.models.factories.compile_project_factory import CompileImageAnalysisFactory
from scanomatic.image_analysis.first_pass_image import FixtureImage

#
# GLOBALS
#

_logger = logger.Logger("1st Pass Analysis")

#
# EXCEPTIONS
#


class MarkerDetectionFailed(Exception):
    pass


#
# FUNCTION
#


def analyse(compile_image_model, fixture_settings, issues):
    """
    :type fixture_settings: scanomatic.io.fixtures.FixtureSettings
    :type compile_image_model: scanomatic.models.compile_project_model.CompileImageModel
    :type issues: dict
    :rtype : scanomatic.models.compile_project_model.CompileImageAnalysisModel
Пример #22
0
class GridArray(object):

    _LOGGER = logger.Logger("Grid Array")

    def __init__(self, image_identifier, pinning, analysis_model):

        self._paths = paths.Paths()

        self._identifier = _create_grid_array_identifier(image_identifier)
        self._analysis_model = analysis_model
        self._pinning_matrix = pinning

        self._guess_grid_cell_size = None
        self._grid_cell_size = None
        self._grid_cells = {}
        """:type:dict[tuple|scanomatic.image_analysis.grid_cell.GridCell]"""
        self._grid = None
        self._grid_cell_corners = None

        self._features = AnalysisFeaturesFactory.create(
            index=self._identifier[-1], shape=tuple(pinning), data=set())
        self._first_analysis = True

    @property
    def features(self):
        return self._features

    @property
    def grid_cell_size(self):
        return self._grid_cell_size

    @property
    def index(self):
        return self._identifier[-1]

    @property
    def image_index(self):
        return self._identifier[0]

    @image_index.setter
    def image_index(self, value):
        self._identifier[0] = value

    def set_grid(self, im, analysis_directory=None, offset=None, grid=None):

        self._LOGGER.info(
            "Setting manual re-gridding for plate {0} using offset {1} on reference grid {2}"
            .format(self.index + 1, offset, grid))

        if not offset:
            return self.detect_grid(im,
                                    analysis_directory=analysis_directory,
                                    grid_correction=offset)

        try:
            grid = np.load(grid)
        except IOError:
            self._LOGGER.error("No grid file named '{0}'".format(grid))
            self._LOGGER.info("Invoking grid detection instead")
            return self.detect_grid(im,
                                    analysis_directory=analysis_directory,
                                    grid_correction=offset)

        self._init_grid_cells(
            _get_grid_to_im_axis_mapping(self._pinning_matrix, im))

        spacings = ((grid[0, 1:] - grid[0, :-1]).ravel().mean(),
                    (grid[1, :, 1:] - grid[1, :, :-1]).ravel().mean())

        if offset and not all(o == 0 for o in offset):

            # The direction of the first axis is flipped to make offsetting more logical from user perspective
            # this inversion must be matched by equal inversion in detect_grid

            o = offset[0] * -1
            delta = spacings[0]
            if o > 0:

                grid[0, :-o] = grid[0, o:]
                for idx in range(-o, 0):
                    grid[0, idx] = grid[0, idx - 1] + delta

            elif o < 0:

                grid[0, -o:] = grid[0, :o]
                for idx in range(-o)[::-1]:
                    grid[0, idx] = grid[0, idx + 1] - delta

            o = offset[1]
            delta = spacings[1]
            if o > 0:

                grid[1, :, :-o] = grid[1, :, o:]
                for idx in range(-o, 0):
                    grid[1, :, idx] = grid[1, :, idx - 1] + delta

            elif o < 0:

                grid[1, :, -o:] = grid[1, :, :o]
                for idx in range(-o)[::-1]:
                    grid[1, :, idx] = grid[1, :, idx + 1] - delta

        self._grid = grid

        if not self._is_valid_grid_shape():

            raise InvalidGridException(
                "Grid shape {0} missmatch with pinning matrix {1}".format(
                    self._grid.shape, self._pinning_matrix))

        self._grid_cell_size = map(lambda x: int(round(x)), spacings)
        self._set_grid_cell_corners()
        self._update_grid_cells()

        if analysis_directory is not None:

            np.save(
                os.path.join(analysis_directory,
                             self._paths.grid_pattern.format(self.index + 1)),
                self._grid)

            np.save(
                os.path.join(
                    analysis_directory,
                    self._paths.grid_size_pattern.format(self.index + 1)),
                self._grid_cell_size)
        return True

    def detect_grid(self, im, analysis_directory=None, grid_correction=None):

        self._LOGGER.info(
            "Detecting grid on plate {0} using grid correction {1}".format(
                self.index + 1, grid_correction))

        # The direction of the first axis is flipped to make offsetting more logical from user perspective
        # this inversion must be matched by equal inversion in set_grid

        if grid_correction:
            grid_correction = list(grid_correction)
            grid_correction[0] *= -1

        self._init_grid_cells(
            _get_grid_to_im_axis_mapping(self._pinning_matrix, im))

        spacings = self._calculate_grid_and_get_spacings(
            im, grid_correction=grid_correction)

        if self._grid is None or np.isnan(spacings).any():

            error_file = os.path.join(
                self._analysis_model.output_directory,
                self._paths.experiment_grid_error_image.format(self.index))

            np.save(error_file, im)
            self._LOGGER.warning("Failed to detect grid on plate {0}".format(
                self.index))

            return False

        if not self._is_valid_grid_shape():

            raise InvalidGridException(
                "Grid shape {0} missmatch with pinning matrix {1}".format(
                    self._grid.shape, self._pinning_matrix))

        self._grid_cell_size = map(lambda x: int(round(x)), spacings)
        self._set_grid_cell_corners()
        self._update_grid_cells()

        if analysis_directory is not None:

            np.save(
                os.path.join(analysis_directory,
                             self._paths.grid_pattern.format(self.index + 1)),
                self._grid)

            np.save(
                os.path.join(
                    analysis_directory,
                    self._paths.grid_size_pattern.format(self.index + 1)),
                self._grid_cell_size)

        return True

    def _calculate_grid_and_get_spacings(self, im, grid_correction=None):

        validate_parameters = False
        expected_spacings = self._guess_grid_cell_size
        expected_center = tuple([s / 2.0 for s in im.shape])

        draft_grid, _, _, _, spacings, adjusted_values = grid.get_grid(
            im,
            expected_spacing=expected_spacings,
            expected_center=expected_center,
            validate_parameters=validate_parameters,
            grid_shape=self._pinning_matrix,
            grid_correction=grid_correction)

        dx, dy = spacings

        self._grid, _ = grid.get_validated_grid(im, draft_grid, dy, dx,
                                                adjusted_values)

        return spacings

    def _is_valid_grid_shape(self):

        return all(g == i
                   for g, i in zip(self._grid.shape[1:], self._pinning_matrix))

    def _set_grid_cell_corners(self):

        self._grid_cell_corners = np.zeros(
            (2, 2, self._grid.shape[1], self._grid.shape[2]))

        # For all sets lower values boundaries
        self._grid_cell_corners[
            0, 0, :, :] = self._grid[0] - self._grid_cell_size[0] / 2.0
        self._grid_cell_corners[
            1, 0, :, :] = self._grid[1] - self._grid_cell_size[1] / 2.0

        # For both dimensions sets higher value boundaries
        self._grid_cell_corners[
            0, 1, :, :] = self._grid[0] + self._grid_cell_size[0] / 2.0
        self._grid_cell_corners[
            1, 1, :, :] = self._grid[1] + self._grid_cell_size[1] / 2.0

    def _update_grid_cells(self):

        for grid_cell in self._grid_cells.itervalues():

            grid_cell.set_grid_coordinates(self._grid_cell_corners)

    def _init_grid_cells(self, dimension_order=(0, 1)):

        self._pinning_matrix = (self._pinning_matrix[dimension_order[0]],
                                self._pinning_matrix[dimension_order[1]])
        pinning_matrix = self._pinning_matrix

        self._guess_grid_cell_size = GridCellSizes.get(pinning_matrix)
        self._grid = None
        self._grid_cell_size = None
        self._grid_cells.clear()
        self._features.data.clear()

        polynomial_coeffs = get_calibration_polynomial_coeffs()
        focus_position = (self._analysis_model.focus_position[0],
                          self._analysis_model.focus_position[2],
                          self._analysis_model.focus_position[1]
                          ) if self._analysis_model.focus_position else None

        for row in xrange(pinning_matrix[0]):

            for column in xrange(pinning_matrix[1]):
                cur_position = (self.index, row, column)
                if not self._analysis_model.suppress_non_focal or focus_position == cur_position:

                    is_focus = focus_position == cur_position if focus_position else False
                    grid_cell = GridCell([self._identifier, (row, column)],
                                         polynomial_coeffs,
                                         save_extra_data=is_focus)
                    self._features.data.add(grid_cell.features)
                    self._grid_cells[grid_cell.position] = grid_cell

    def clear_features(self):
        for grid_cell in self._grid_cells.itervalues():
            grid_cell.clear_features()

    def analyse(self, im, image_model):
        """

        :type image_model: scanomatic.models.compile_project_model.CompileImageAnalysisModel
        """

        index = image_model.image.index
        self.image_index = index
        self._LOGGER.info("Processing {0}, index {1}".format(
            self._identifier, index))

        # noinspection PyBroadException
        try:
            transpose_polynomial = image_basics.Image_Transpose(
                sourceValues=image_model.fixture.grayscale.values,
                targetValues=getGrayscale(
                    image_model.fixture.grayscale.name)['targets'])

        except Exception:

            transpose_polynomial = None

        if self._grid is None:
            if not self.detect_grid(im):
                self.clear_features()
                return

        m = self._analysis_model

        for grid_cell in self._grid_cells.itervalues():

            if grid_cell.save_extra_data:
                self._LOGGER.info(
                    "Starting analysis of extra monitored position {0}".format(
                        grid_cell.position))
            _analyse_grid_cell(grid_cell, im, transpose_polynomial, index,
                               None, m)

        self._LOGGER.info("Plate {0} completed".format(self._identifier))
Пример #23
0
    def __one_init__(self):

        self.logger = logger.Logger("Server Manager")
        self._start_som_server()
        self._start_rpc_server()
Пример #24
0
import glob
import numpy as np
import os

from scanomatic.io.paths import Paths
from scanomatic.models.factories.compile_project_factory import CompileImageAnalysisFactory
from scanomatic.io import logger
from scanomatic.image_analysis.image_basics import load_image_to_numpy

_logger = logger.Logger("Image loader")


def _get_project_compilation(analysis_directory, file_name=None):

    experiment_directory = os.sep.join(analysis_directory.split(os.sep)[:-1])
    if file_name:
        project_compilation = os.path.join(experiment_directory, file_name)
    else:
        experiment_name = experiment_directory.split(os.sep)[-1]

        project_compilation = os.path.join(
            experiment_directory,
            Paths().project_compilation_pattern.format(experiment_name))

    if not os.path.isfile(project_compilation):

        candidates = glob.glob(
            os.path.join(experiment_directory,
                         Paths().project_compilation_pattern.format("*")))

        if not candidates:
Пример #25
0
import numpy as np
import matplotlib.pyplot as plt

#
#   INTERNAL DEPENDENCIES
#

import scanomatic.io.logger as logger

#
#   GLOBALS
#

_L = logger.Logger("Strain Handler Module")

_PLATE_STRING = "Plate {0}"

#
#   MEMEBER METHODS
#


def loadCSV2Numpy(path, measure=-1, delim='\t', dtype=np.float):
    """Loads a csv-file produced by QC as a numpy array of plates.

    Args:

        path (string):  The path to the csv-file

    Kwargs:
Пример #26
0
    def __init__(self, path=None, image=None, pattern_image_path=None,
                 scale=1.0, resource_paths=None):

        self._path = path
        self._img = None
        self._pattern_img = None
        self._load_error = None
        self._transformed = False
        self._conversion_factor = 1.0 / scale
        self._logger = logger.Logger("Resource Image Analysis")

        if os.path.isfile(pattern_image_path) is False and resource_paths is not None:

            pattern_image_path = os.path.join(resource_paths.images, os.path.basename(
                pattern_image_path))

        if pattern_image_path:

            try:

                pattern_img = plt.imread(pattern_image_path)

            except:

                self._logger.error(
                    "Could not open orientation guide image at " +
                    str(pattern_image_path))

                self._load_error = True

            if self._load_error is not True:

                if len(pattern_img.shape) > 2:

                    pattern_img = pattern_img[:, :, 0]

                self._pattern_img = pattern_img

        if image is not None:

            self._img = np.asarray(image)

        if path:

            if not(self._img is None):

                self._logger.warning(
                    "Won't load from path since actually submitted")

            else:

                try:

                    self._img = plt.imread(path)

                except:

                    self._logger.error("Could not open image at " + str(path))
                    self._load_error = True

        if self._load_error is not True:

            if len(self._img.shape) > 2:

                self._img = self._img[:, :, 0]
Пример #27
0
import signal as signal
import grayscale
import scanomatic.io.logger as logger

#
# GLOBALS
#

DEFAULT_GRAYSCALE = grayscale.getDefualtGrayscale()

GRAYSCALE_NAMES = grayscale.getGrayscales()

GRAYSCALES = {gsName: grayscale.getGrayscale(gsName) for
              gsName in GRAYSCALE_NAMES}

_logger = logger.Logger("Resource Image")
'''
DEFAULT_GRAYSCALE = 'Kodak'

GRAYSCALES = {
    'Kodak': {
        'targets': [0, 2, 4, 6, 10, 14, 18, 22, 26, 30, 34, 38,
                    42, 46, 50, 54, 58, 62, 66, 70, 74, 78, 82],
        'width': 55,
        'min_width': 30,
        'sections': 23,
        'lower_than_half_width': 350,
        'higher_than_half_width': 150,
        'length': 28.3,  # 28.57 was previous
    },
    'SilverFast': {
Пример #28
0
except ImportError:
    from PIL import Image
import numpy as np

#
# SCANNOMATIC LIBRARIES
#

import scanomatic.io.logger as logger
import scanomatic.io.app_config as app_config

#
# GLOBALS
#

_logger = logger.Logger("Resource Analysis Support")

#
# FUNCTIONS
#


def save_image_as_png(from_path, **kwargs):

    file, ext = os.path.splitext(from_path)
    Image.open(from_path).save(os.path.extsep.join((file, "png")), **kwargs)


def get_first_rotated(A, B):
    """Evaluates if both have the same orientation (lanscape or standing)
    returns the first so it matches the orientation of the second
Пример #29
0
import scanomatic.io.logger as logger

from types import StringTypes
import smtplib
import socket
import requests
from struct import unpack

try:
    from email import MIMEText, MIMEMultipart
except ImportError:
    from email.MIMEMultipart import MIMEMultipart
    from email.MIMEText import MIMEText

_logger = logger.Logger("Mailer")

_IP = None


def ip_is_local(ip):
    """Determines if ip is local

    Code from http://stackoverflow.com/a/8339939/1099682

    :param ip: and ip-adress
    :type ip : str
    :return: bool
    """
    f = unpack('!I', socket.inet_pton(socket.AF_INET, ip))[0]
    private = (
        [2130706432, 4278190080
Пример #30
0
ORTH_T1 = 0.15
ORTH_T2 = 0.3
GS_ROUGH_INTENSITY_T1 = (256 * 1 / 4)
GS_ROUGH_INTENSITY_T2 = 125
GS_ROUGH_INTENSITY_T3 = 170
SPIKE_UP_T = 1.2
SPIKE_BEST_TOLLERANCE = 0.05
SAFETY_PADDING = 0.2
SAFETY_COEFF = 0.5
NEW_GS_ALG_L_DIFF_T = 0.1
NEW_GS_ALG_L_DIFF_SPIKE_T = 0.3
NEW_GS_ALG_SPIKES_FRACTION = 0.8
NEW_SAFETY_PADDING = 0.2
DEBUG_DETECTION = False

_logger = logger.Logger("Analyze Grayscale")

#
# CLASSES
#


def get_ortho_trimmed_slice(im, grayscale):
    half_width = grayscale['width'] / 2
    im_scaled = im / float(im.max()) - 0.5
    kernel = np.array(grayscale['targets']).repeat(grayscale['length'])
    kernel = kernel.reshape((kernel.size, 1))
    if kernel.size > im.shape[0]:
        return np.array([])

    kernel_scaled = kernel / float(kernel.max()) - 0.5