Beispiel #1
0
def _get_project_compilation(analysis_directory, file_name=None):

    experiment_directory = os.sep.join(analysis_directory.split(os.sep)[:-1])
    if file_name:
        project_compilation = os.path.join(experiment_directory, file_name)
    else:
        experiment_name = experiment_directory.split(os.sep)[-1]

        project_compilation = os.path.join(
            experiment_directory,
            Paths().project_compilation_pattern.format(experiment_name))

    if not os.path.isfile(project_compilation):

        candidates = glob.glob(
            os.path.join(experiment_directory,
                         Paths().project_compilation_pattern.format("*")))

        if not candidates:
            _logger.error(
                "Could not find any project.compilation file in '{0}'".format(
                    experiment_directory))
            raise ValueError()
        elif len(candidates) != 1:
            _logger.error(
                "Found several project.compilation files in '{0}', unsure which to use."
                .format(experiment_directory) +
                "Either remove one of {0} or specify compilation-file in function call"
                .format(candidates))
            raise ValueError()

        project_compilation = candidates[0]

    return project_compilation
Beispiel #2
0
    def _fixture_remove(name):
        """Remove a fixture by name

        Args:
            name: The name of the fixture to remove

        Returns: json-object with keys
            "success" if removed else
            "reason" to explain why not.

        """
        name = Paths().get_fixture_name(name)
        known_fixtures = tuple(Paths().get_fixture_name(f) for f in rpc_client.get_fixtures())
        if name not in known_fixtures:
            return jsonify(success=False, reason="Unknown fixture")
        source = Paths().get_fixture_path(name)
        path, ext = os.path.splitext(source)
        i = 0
        pattern = "{0}.deleted{1}"
        while os.path.isfile(pattern.format(path, i)):
            i += 1
        try:
            shutil.move(source, pattern.format(path, i))
        except IOError:
            return jsonify(success=False, reason="Error while removing")
        return jsonify(success=True, reason="Happy")
def get_data_file_path(file_path=None, label=''):
    if file_path is None:
        if label:
            file_path = Paths().analysis_calibration_data.format(label + ".")
        else:
            file_path = Paths().analysis_calibration_data.format(label)

    return file_path
Beispiel #4
0
def _load_grid_info(analysis_directory, plate):
    # grids number +1
    grid = np.load(
        os.path.join(analysis_directory,
                     Paths().grid_pattern.format(plate + 1)))
    grid_size = np.load(
        os.path.join(analysis_directory,
                     Paths().grid_size_pattern.format((plate + 1))))
    return grid, grid_size
Beispiel #5
0
    def get_compile_instructions(project=None):

        base_url = "/api/compile/instructions"

        path = convert_url_to_path(project)

        model = CompileProjectFactory.serializer.load_first(path)
        """:type model: CompileInstructionsModel"""

        if model is None:
            scan_instructions = [
                convert_path_to_url("/api/scan/instructions", p) for p in glob(
                    os.path.join(path,
                                 Paths().scan_project_file_pattern.format(
                                     "*")))
            ]

        else:
            scan_instructions = [
                convert_path_to_url("/api/scan/instructions", p) for p in glob(
                    os.path.join(os.path.dirname(path),
                                 Paths().scan_project_file_pattern.format(
                                     "*")))
            ]

        compile_instructions = [
            convert_path_to_url(base_url, c) for c in glob(
                os.path.join(
                    path,
                    Paths().project_compilation_instructions_pattern.format(
                        "*")))
        ]

        if model is not None:

            return jsonify(**json_response(
                ["urls", "compile_instructions", "scan_instructions"],
                dict(instructions={
                    'fixture': model.fixture_name,
                    'fixture_type': model.fixture_type.name,
                    'compilation': [dict(**i) for i in model.images],
                    'email': model.email,
                },
                     compile_instructions=compile_instructions,
                     scan_instructions=scan_instructions,
                     **get_search_results(path, base_url))))

        else:
            return jsonify(**json_response(
                ["urls", "compile_instructions", "scan_instructions"],
                dict(compile_instructions=compile_instructions,
                     scan_instructions=scan_instructions,
                     **get_search_results(path, base_url))))
Beispiel #6
0
    def setup(self, job, redirect_logging=True):

        if self._running:
            self.add_message("Cannot change settings while running")
            return

        self._redirect_logging = redirect_logging

        if not self._analysis_job.output_directory:
            AnalysisModelFactory.set_default(
                self._analysis_job,
                [self._analysis_job.FIELD_TYPES.output_directory])
            self._logger.info("Using default '{0}' output directory".format(
                self._analysis_job.output_directory))
        if not self._analysis_job.compile_instructions:
            self._analysis_job.compile_instructions = \
                Paths().get_project_compile_instructions_path_from_compilation_path(self._analysis_job.compilation)
            self._logger.info(
                "Setting to default compile instructions path {0}".format(
                    self._analysis_job.compile_instructions))

        allow_start = AnalysisModelFactory.validate(self._analysis_job)

        self._orginal_model = AnalysisModelFactory.copy(self._analysis_job)
        AnalysisModelFactory.set_absolute_paths(self._analysis_job)

        try:
            self._scanning_instructions = ScanningModelFactory.serializer.load_first(
                Paths(
                ).get_scan_instructions_path_from_compile_instructions_path(
                    self._analysis_job.compile_instructions))
        except IOError:
            self._logger.warning(
                "No information found about how the scanning was done," +
                " using empty instructions instead")

        if not self._scanning_instructions:
            self._scanning_instructions = ScanningModelFactory.create()

        self.ensure_default_values_if_missing()

        self._allow_start = allow_start
        if not self._allow_start:
            self._logger.error(
                "Can't perform analysis; instructions don't validate.")
            for bad_instruction in AnalysisModelFactory.get_invalid(
                    self._analysis_job):
                self._logger.error("Bad value {0}={1}".format(
                    bad_instruction, self._analysis_job[bad_instruction.name]))
            self.add_message(
                "Can't perform analysis; instructions don't validate.")
            self._stopping = True
Beispiel #7
0
    def _compile():

        if request.args.get("run"):

            if not rpc_client.online:
                return jsonify(success=False,
                               reason="Scan-o-Matic server offline")

            path = request.values.get('path')
            path = os.path.abspath(
                path.replace('root',
                             Config().paths.projects_root))
            fixture_is_local = bool(int(request.values.get('local')))
            fixture = request.values.get("fixture")
            chain_steps = bool(request.values.get('chain', default=1,
                                                  type=int))
            images = request.values.getlist('images[]')

            _logger.info(
                "Attempting to compile on path {0}, as {1} fixture{2} (Chaining: {3}), images {4}"
                .format(
                    path, 'local' if fixture_is_local else 'global',
                    fixture_is_local and "."
                    or " (Fixture {0}).".format(fixture), chain_steps, images))

            dict_model = CompileProjectFactory.dict_from_path_and_fixture(
                path,
                fixture=fixture,
                is_local=fixture_is_local,
                compile_action=COMPILE_ACTION.InitiateAndSpawnAnalysis
                if chain_steps else COMPILE_ACTION.Initiate)

            if images:
                dict_model['images'] = [
                    p for p in dict_model['images']
                    if os.path.basename(p['path']) in images
                ]
                if len(dict_model['images']) != len(images):
                    return jsonify(
                        success=False,
                        reason=
                        "The manually set list of images could not be satisfied"
                        "with the images in the specified folder")

            job_id = rpc_client.create_compile_project_job(dict_model)

            return jsonify(success=True if job_id else False,
                           reason="" if job_id else "Invalid parameters")

        return send_from_directory(Paths().ui_root, Paths().ui_compile_file)
Beispiel #8
0
    def _markers_detect(fixture_name):

        markers = request.values.get('markers', default=3, type=int)
        image = request.files.get('image')
        name = os.path.basename(fixture_name)
        image_name, ext = os.path.splitext(image.filename)
        _logger.info("Working on detecting marker for fixture {0} using image {1} ({2})".format(
            name, image.filename, image_is_allowed(ext)))

        if name and image_is_allowed(ext):

            fixture_file = Paths().get_fixture_path(name)

            path = os.path.extsep.join((fixture_file, ext.lstrip(os.path.extsep)))
            image.save(path)

            fixture = get_fixture_image(name, path)
            fixture.run_marker_analysis(markings=markers)

            save_image_as_png(path)

            return jsonify(
                success=True,
                markers=json_data(fixture['current'].get_marker_positions()),
                image=os.path.basename(fixture_file))

        _logger.warning("Refused detection (keys files: {0} values: {1})".format(
            request.files.keys(), request.values.keys()))

        return jsonify(
            success=False,
            reason="No fixture image name" if image_is_allowed(ext) else "Image type not allowed")
def remove_calibration(label, degree=None, file_path=None):

    if file_path is None:
        file_path = Paths().analysis_polynomial

    data = load_calibrations(file_path)
    keys = tuple(data.keys())
    has_changed = False

    for key in keys:

        if degree:

            if key == "{0}_{1}".format(label, degree):
                del data[key]
                has_changed = True
                break
        elif key.startswith("{0}_".format(label)):
            del data[key]
            has_changed = True

    if has_changed:
        _safe_copy_file_if_needed(file_path)
        with open(file_path, 'w') as fh:
            json.dump(data, fh)

        return True

    else:
        _logger.warning(
            "No polynomial was found matching the criteria (label={0}, degree={1}"
            .format(label, degree))
        return False
Beispiel #10
0
def load_calibrations(file_path=None):

    if file_path is None:
        file_path = Paths().analysis_polynomial

    try:

        with open(file_path, 'r') as fh:

            try:
                data = json.load(fh)
            except ValueError:
                data = {}
                fh.seek(0)
                for i, l in enumerate(fh):
                    try:
                        key, value = eval(l)
                        data[key] = value
                    except (TypeError, ValueError):
                        _logger.info(
                            "Skipping line {0}: '{0}' (can't parse)".format(
                                i, l.strip()))

    except IOError:
        _logger.warning("Could not locate file '{0}'".format(file_path))
        data = {}

    return data
Beispiel #11
0
    def setup(self, job):

        self._logger.info("Setup called")

        self._compile_job = RPC_Job_Model_Factory.serializer.load_serialized_object(
            job)[0].content_model
        self._job.content_model = self._compile_job

        if self._compile_job.images is None:
            self._compile_job.images = tuple()

        self._logger.set_output_target(
            Paths().get_project_compile_log_path_from_compile_model(
                self._compile_job),
            catch_stdout=True,
            catch_stderr=True)
        self._logger.surpress_prints = True

        self._logger.info("Doing setup")
        self._logger.info("Action {0}".format(
            self._compile_job.compile_action))
        self._compile_instructions_path = Paths(
        ).get_project_compile_instructions_path_from_compile_model(
            self._compile_job)
        if self._compile_job.compile_action == COMPILE_ACTION.Initiate or \
                        self._compile_job.compile_action == COMPILE_ACTION.InitiateAndSpawnAnalysis:

            # Empty files
            try:
                os.remove(self._compile_instructions_path)
            except OSError:
                pass

            CompileProjectFactory.serializer.dump(
                self._compile_job, self._compile_instructions_path)

        self._tweak_path()
        self._load_fixture()
        self._allow_start = True

        if self._fixture_settings is None:
            self._logger.critical(
                "No fixture loaded, name probably not recognized or old fixture settings file"
            )
            self._stopping = True

        self._start_time = time.time()
    def dump(self,
             directory,
             new_name=None,
             force_dump_scan_instructions=False):

        self._logger.warning(
            """This functionality has not fully been tested, if you test it and it works fine let Martin konw.
            If it doesn't work, let him know too.""")
        directory = os.path.abspath(directory)
        os.makedirs(directory)
        if new_name is None:
            new_name = os.path.basename(directory)

        try:
            with open(
                    os.path.join(
                        directory,
                        Paths().project_compilation_pattern.format(new_name)),
                    'w') as fh:
                while True:
                    model = CompileImageAnalysisFactory.copy(
                        self.get_next_image_model())
                    self._update_image_path_if_needed(model, directory)
                    if model is None:
                        break
                    CompileImageAnalysisFactory.serializer.dump_to_filehandle(
                        model, fh)
        except IOError:
            self._logger.error("Could not save to directory")
            return

        compile_instructions = os.path.join(
            directory,
            Paths().project_compilation_pattern.format(new_name))
        CompileProjectFactory.serializer.dump(self._compile_instructions,
                                              compile_instructions)

        if not glob(os.path.join(directory, Paths().scan_project_file_pattern.format('*'))) or \
                force_dump_scan_instructions:

            scan_instructions = os.path.join(
                directory,
                Paths().scan_project_file_pattern.format(new_name))
            ScanningModelFactory.serializer.dump(self._scanner_instructions,
                                                 scan_instructions)
Beispiel #13
0
    def get_save_data_path(self, base_path):

        if base_path is None:
            base_path = Paths().log

        return os.path.join(
            base_path, "grid_cell_{0}_{1}_{2}".format(
                self.image_index, self._identifier[0][1],
                "_".join(map(str, self._identifier[-1][::-1]))))
Beispiel #14
0
    def _status(status_type=""):

        if status_type != "" and not rpc_client.online:
            return jsonify(sucess=False, reason="Server offline")

        if status_type == 'queue':
            return jsonify(success=True, data=rpc_client.get_queue_status())
        elif 'scanner' in status_type:
            return jsonify(success=True, data=rpc_client.get_scanner_status())
        elif 'job' in status_type:
            return jsonify(success=True, data=rpc_client.get_job_status())
        elif status_type == 'server':
            return jsonify(success=True, data=rpc_client.get_status())
        elif status_type == "":

            return send_from_directory(Paths().ui_root, Paths().ui_status_file)
        else:
            return jsonify(succes=False, reason='Unknown status request')
Beispiel #15
0
    def _fixture_get_image(name):
        """Get downscaled png image for the fixture.

        Args:
            name: Name of the fixture

        Returns: image

        """
        image = os.path.extsep.join((name, "png"))
        _logger.info("Sending fixture image {0}".format(image))
        return send_from_directory(Paths().fixtures, image)
Beispiel #16
0
def produce_grid_images(path=".", image=None, mark_position=None):

    project_path = os.path.join(os.path.dirname(os.path.abspath(path)))
    compilations = glob.glob(os.path.join(os.path.dirname(os.path.abspath(path)),
                                          Paths().project_compilation_pattern.format("*")))

    if not compilations:
        raise ValueError("There are no compilations in the parent directory")

    compilation = compilations[0]
    _logger.info("Using {0}".format(os.path.basename(compilation)))
    compilation = CompileImageAnalysisFactory.serializer.load(compilation)

    image_path = compilation[-1].image.path
    plates = compilation[-1].fixture.plates
    if image is not None:
        for c in compilation:
            if os.path.basename(c.image.path) == os.path.basename(image):
                image_path = c.image.path
                plates = c.fixture.plates
                break

    try:
        image = load_image_to_numpy(image_path)
    except IOError:

        try:
            image = load_image_to_numpy(os.path.join(project_path, os.path.basename(image_path)))
        except IOError:
            raise ValueError("Image doesn't exist, can't show gridding")

    for plate in plates:

        plate_image = image[plate.y1: plate.y2, plate.x1: plate.x2]
        grid = np.load(os.path.join(path, Paths().grid_pattern.format(plate.index)))
        make_grid_im(plate_image, grid, os.path.join(path, Paths().experiment_grid_image_pattern.format(plate.index)),
                     marked_position=mark_position)
Beispiel #17
0
    def _fixture_local_data(project):

        path = os.path.join(convert_url_to_path(project), Paths().experiment_local_fixturename)

        try:
            fixture = FixtureFactory.serializer.load_first(path)
            if fixture is None:
                return jsonify(
                    success=False,
                    reason="File is missing")
            return jsonify(
                success=True, grayscale=dict(**fixture.grayscale),
                plates=[dict(**plate) for plate in fixture.plates],
                markers=zip(fixture.orientation_marks_x, fixture.orientation_marks_y))
        except IndexError:
            return jsonify(success=False, reason="Fixture without data")
        except ConfigError:
            return jsonify(success=False, reason="Fixture data corrupted")
Beispiel #18
0
    def __init__(self, output_directory, xml_model):
        """

        :type xml_model: scanomatic.models.analysis_model.XMLModel
        """
        self._directory = output_directory
        self._formatting = xml_model
        self._logger = logger.Logger("XML writer")
        self._paths = Paths()

        self._outdata_full = os.sep.join((output_directory, "analysis.xml"))
        self._outdata_slim = os.sep.join(
            (output_directory, "analysis_slimmed.xml"))

        self._file_handles = {'full': None, 'slim': None}
        self._open_tags = list()

        self._initialized = self._open_outputs(file_mode='w')
 def _validate_fixture(cls, model):
     """
     :type model: scanomatic.models.compile_project_model.CompileInstructionsModel
     """
     if model.fixture_type is compile_project_model.FIXTURE.Local:
         if os.path.isfile(
                 os.path.join(model.path,
                              Paths().experiment_local_fixturename)):
             return True
         else:
             return model.FIELD_TYPES.fixture_type
     elif model.fixture_type is compile_project_model.FIXTURE.Global:
         if model.fixture_name in Fixtures():
             return True
         else:
             return model.FIELD_TYPES.fixture_name
     else:
         return model.FIELD_TYPES.fixture_type
Beispiel #20
0
def add_calibration(label, poly, file_path=None):

    if file_path is None:
        file_path = Paths().analysis_polynomial

    _safe_copy_file_if_needed(file_path)

    data = load_calibrations(file_path)

    key = "{0}_{1}".format(label, len(poly) - 1)
    if key in data:
        _logger.warning("Replacing previous calibration {0}: {1}".format(
            key, data[key]))

    data[key] = poly.tolist() if hasattr(poly, 'tolist') else poly

    with open(file_path, 'w') as fh:

        json.dump(data, fh)
Beispiel #21
0
def get_project_name(project_path):
    no_name = None

    if not path_is_in_jail(project_path):
        return no_name

    candidates = glob.glob(
        os.path.join(project_path,
                     Paths().scan_project_file_pattern.format("*")))
    if candidates:
        for candidate in candidates:
            model = ScanningModelFactory.serializer.load_first(candidate)
            if model:
                return model.project_name if model.project_name else no_name

    if project_path:
        return get_project_name(os.path.dirname(project_path))

    return no_name
Beispiel #22
0
    def _load_fixture(self):

        if self._compile_job.fixture_type is FIXTURE.Global:
            self._fixture_settings = Fixtures()[self._compile_job.fixture_name]
            if self._fixture_settings and \
                    self._compile_job.compile_action in (COMPILE_ACTION.Initiate,
                                                         COMPILE_ACTION.InitiateAndSpawnAnalysis):

                self._fixture_settings.update_path_to_local_copy(
                    os.path.dirname(self._compile_job.path))
                self._fixture_settings.save()

        else:
            dir_path = os.path.dirname(self._compile_job.path)
            self._logger.info(
                "Attempting to load local fixture copy in directory {0}".
                format(dir_path))
            self._fixture_settings = FixtureSettings(
                Paths().experiment_local_fixturename, dir_path=dir_path)
    def load_scanner_instructions(self, path=None):
        """

        Args:
            path:  Path to the instrucitons or None to infer it


        """
        if path is None:
            try:
                path = glob(
                    os.path.join(
                        os.path.dirname(self._compilation_path),
                        Paths().scan_project_file_pattern.format('*')))[0]
            except IndexError:
                self._logger.warning(
                    "No information of start time of project, can't safely be joined with others"
                )
                return

        self._scanner_instructions = ScanningModelFactory.serializer.load_first(
            path)
Beispiel #24
0
    def _config():

        app_conf = Config()

        action = request.args.get("action")
        if action == "update":

            data_object = request.get_json(silent=True, force=True)
            if not data_object:
                data_object = request.values

            app_conf.number_of_scanners = data_object["number_of_scanners"]
            app_conf.power_manager.number_of_sockets = data_object[
                "power_manager"]["sockets"]
            app_conf.power_manager.host = data_object["power_manager"]["host"]
            app_conf.power_manager.mac = data_object["power_manager"]["mac"]
            app_conf.power_manager.name = data_object["power_manager"]["name"]
            app_conf.power_manager.password = data_object["power_manager"][
                "password"]
            app_conf.power_manager.host = data_object["power_manager"]["host"]
            app_conf.power_manager.type = POWER_MANAGER_TYPE[
                data_object["power_manager"]["type"]]
            app_conf.paths.projects_root = data_object["paths"][
                "projects_root"]
            app_conf.computer_human_name = data_object["computer_human_name"]
            app_conf.mail.warn_scanning_done_minutes_before = data_object[
                "mail"]["warn_scanning_done_minutes_before"]

            bad_data = []
            success = app_conf.validate(bad_data)
            app_conf.save_current_settings()
            return jsonify(success=success,
                           reason=None
                           if success else "Bad data for {0}".format(bad_data))
        elif action:
            return jsonify(success=False, reason="Not implemented")

        return render_template(Paths().ui_settings_template,
                               **app_conf.model_copy())
Beispiel #25
0
    def _fixture_data(name=None):
        """Get the specifications of a fixture

        Args:
            name: The name of the fixture

        Returns: json-object where keys:
            "plates" is an array of key-value arrays of the
                included plates specs
            "grayscale" is a key-value array of its specs
            "markers" is a 2D array of the marker centra
            "success" if the fixture was found and valid else
            "reason" to explain why not.

        """
        if not rpc_client.online:
            return jsonify(success=False, reason="Scan-o-Matic server offline")
        elif name in rpc_client.get_fixtures():
            path = Paths().get_fixture_path(name)
            try:
                fixture = FixtureFactory.serializer.load_first(path)
                if fixture is None:
                    return jsonify(
                        success=False,
                        reason="File is missing"
                    )
                return jsonify(
                    success=True, grayscale=dict(**fixture.grayscale),
                    plates=[dict(**plate) for plate in fixture.plates],
                    markers=zip(fixture.orientation_marks_x, fixture.orientation_marks_y))
            except IndexError:
                return jsonify(success=False, reason="Fixture without data")
            except ConfigError:
                return jsonify(success=False, reason="Fixture data corrupted")
        else:
            return jsonify(success=False, reason="Unknown fixture")
Beispiel #26
0
    def get_analysis_instructions(project=None):

        base_url = "/api/analysis/instructions"

        path = convert_url_to_path(project)

        analysis_file = os.path.join(path, Paths().analysis_model_file)
        model = AnalysisModelFactory.serializer.load_first(analysis_file)
        """:type model: AnalysisModel"""

        if model is None:

            return jsonify(**json_response(
                ["urls"], dict(**get_search_results(path, base_url))))

        return jsonify(**json_response(
            ["urls", "compile_instructions"],
            dict(instructions={
                'grayscale':
                "one-time" if model.one_time_grayscale else "dynamic",
                'positioning':
                "one-time" if model.one_time_positioning else "dynamic",
                'compilation': model.compilation,
                'compile_instructions': model.compile_instructions,
                'email': model.email,
                'grid_model': {
                    'gridding_offsets': model.grid_model.gridding_offsets,
                    'reference_grid_folder':
                    model.grid_model.reference_grid_folder
                },
            },
                 compile_instructions=[
                     convert_path_to_url("/api/compile/instructions",
                                         model.compile_instructions)
                 ],
                 **get_search_results(path, base_url))))
Beispiel #27
0
    If it doesn't work, you need to check the setup
    output above to see where the files were copied and
    extend the path accordingly.

    Alternatively, if you install Scan-o-Matic for all
    users then the launch scripts should be copied
    into a folder that is already in path.

    If you use a USB-connected PowerManager, make sure
    sispmctl is installed.

"""

from scanomatic.io.paths import Paths

try:
    with open(Paths().source_location_file, mode='w') as fh:
        directory = os.path.dirname(
            os.path.join(os.path.abspath(os.path.expanduser(os.path.curdir)),
                         sys.argv[0]))
        json.dump({'location': directory, 'branch': branch}, fh)

except IOError:
    _logger.warning(
        "Could not write info for future upgrades. You should stick to manual upgrades"
    )

# postSetup.CheckDependencies(package_dependencies)

_logger.info("Install Complete")
Beispiel #28
0
def get_fixture_image_by_name(name, ext="tiff"):

    fixture_file = Paths().get_fixture_path(name)
    image_path = os.path.extsep.join((fixture_file, ext))
    return get_fixture_image(name, image_path)
    def path(self):

        return Paths().fixture_grid_history_pattern.format(
            self._fixture_settings.path)
def detect_grayscale(im_trimmed, grayscale):

    gray_scale = []
    grayscale_segment_centers = []

    if im_trimmed is None or sum(im_trimmed.shape) == 0:

        _logger.error("No image loaded or null image")
        return None, None

    rect = ([0, 0], im_trimmed.shape)
    mid_ortho_slice = (rect[1][1] + rect[0][1]) / 2.0
    mid_ortho_trimmed = mid_ortho_slice - rect[0][1]
    _logger.info("Loaded pre-trimmed image slice for GS detection")

    if DEBUG_DETECTION:
        np.save(os.path.join(Paths().log, 'gs_section_used_in_detection.npy'),
                im_trimmed)

    # THE 1D SIGNAL ALONG THE GS
    para_signal_trimmed_im = np.mean(im_trimmed, axis=1)

    if DEBUG_DETECTION:
        np.save(os.path.join(Paths().log, 'gs_para_signal_trimmed_im.npy'),
                para_signal_trimmed_im)

    # FOUND GS-SEGMENT DIFFERENCE TO EXPECTED SIZE
    expected_strip_size = float(grayscale['length'] * grayscale['sections'])

    gs_l_diff = abs(1 - para_signal_trimmed_im.size / expected_strip_size)

    up_spikes = signal.get_signal(para_signal_trimmed_im, SPIKE_UP_T)

    if DEBUG_DETECTION:
        np.save(os.path.join(Paths().log, "gs_up_spikes.npy"), up_spikes)

    if gs_l_diff < NEW_GS_ALG_L_DIFF_T:

        deltas, observed_spikes, observed_to_expected_map = signal.get_signal_data(
            para_signal_trimmed_im, up_spikes, grayscale,
            grayscale["length"] * NEW_GS_ALG_L_DIFF_SPIKE_T)

        # IF GS-SECTION SEEMS TO BE RIGHT SIZE FOR THE WHOLE GS
        # THEN THE SECTIONING PROBABLY IS A GOOD ESTIMATE FOR THE GS
        # IF SPIKES MATCHES MOST OF THE EXPECTED EDGES
        if ((np.isfinite(deltas).sum() - np.isnan(deltas[0]) -
             np.isnan(deltas[-1])) / float(grayscale['sections']) >
                NEW_GS_ALG_SPIKES_FRACTION):

            if DEBUG_DETECTION:
                np.save(os.path.join(Paths().log, "gs_pos_diffs.npy"),
                        observed_to_expected_map)
                np.save(os.path.join(Paths().log, "gs_deltas.npy"), deltas)
                np.save(os.path.join(Paths().log, "gs_observed_spikes.npy"),
                        observed_spikes)

            edges = signal.get_signal_edges(observed_to_expected_map, deltas,
                                            observed_spikes,
                                            grayscale['sections'])

            fin_edges = np.isfinite(edges)
            where_fin_edges = np.where(fin_edges)[0]

            if DEBUG_DETECTION:
                np.save(os.path.join(Paths().log, "gs_edges.npy"), edges)

            # GET THE FREQ
            frequency = np.diff(edges[where_fin_edges[0]:where_fin_edges[-1]],
                                1)
            frequency = frequency[np.isfinite(frequency)].mean()

            if not np.isfinite(frequency):
                _logger.critical(
                    "No frequency was detected, thus no grayscale")
                return None, None

            edges = signal.extrapolate_edges(edges, frequency,
                                             para_signal_trimmed_im.size)

            if edges.size != grayscale['sections'] + 1:
                _logger.critical(
                    "Number of edges doesn't correspond to the grayscale segments ({0}!={1})"
                    .format(edges.size, grayscale['sections'] + 1))
                return None, None

            # EXTRACTING SECTION MIDPOINTS
            grayscale_segment_centers = np.interp(
                np.arange(grayscale['sections']) + 0.5,
                np.arange(grayscale['sections'] + 1), edges)

            _logger.info("GRAYSCALE: Got signal with new method")

            # CHECKING OVERFLOWS
            if grayscale_segment_centers[
                    0] - frequency * NEW_SAFETY_PADDING < 0:
                grayscale_segment_centers += frequency
            if (grayscale_segment_centers[-1] + frequency * NEW_SAFETY_PADDING
                    > para_signal_trimmed_im.size):
                grayscale_segment_centers -= frequency

            # SETTING ABS POS REL TO WHOLE IM-SECTION
            grayscale_segment_centers += rect[0][0]
            _logger.info("Offsetting centers with {0}".format(rect[0][0]))

            if DEBUG_DETECTION:
                np.save(os.path.join(Paths().log, "gs_segment_centers.npy"),
                        grayscale_segment_centers)

            val_orth = grayscale['width'] * NEW_SAFETY_PADDING
            val_para = frequency * NEW_SAFETY_PADDING

            # SETTING VALUE TOP
            top = mid_ortho_trimmed - val_orth
            if top < 0:
                top = 0

            # SETTING VALUE BOTTOM
            bottom = mid_ortho_trimmed + val_orth + 1
            if bottom >= im_trimmed.shape[1]:
                bottom = im_trimmed.shape[1] - 1

            if DEBUG_DETECTION:
                np.save(os.path.join(Paths().log, "gs_slice.npy"), im_trimmed)

            for i, pos in enumerate(grayscale_segment_centers):

                left = pos - val_para

                if left < 0:
                    left = 0

                right = pos + val_para

                if right >= im_trimmed.shape[0]:
                    right = im_trimmed.shape[0] - 1

                gray_scale.append(
                    iqr_mean(im_trimmed[int(round(left)):int(round(right)),
                                        int(round(top)):int(round(bottom))]))

                if DEBUG_DETECTION:
                    np.save(
                        os.path.join(Paths().log,
                                     "gs_segment_{0}.npy".format(i)),
                        im_trimmed[left:right, top:bottom])

        else:

            _logger.warning("New method failed, using fallback")

    else:

        _logger.warning(
            "Skipped new method, threshold not met ({0} > {1}; slice {2})".
            format(gs_l_diff, NEW_GS_ALG_L_DIFF_T, rect))

    if len(grayscale_segment_centers) == 0:

        _logger.warning("Using fallback method")

        best_spikes = signal.get_best_spikes(up_spikes,
                                             grayscale['length'],
                                             tollerance=SPIKE_BEST_TOLLERANCE,
                                             require_both_sides=False)

        frequency = signal.get_perfect_frequency2(best_spikes,
                                                  grayscale['length'])

        # Sections + 1 because actually looking at edges to sections
        offset = signal.get_best_offset(grayscale['sections'] + 1,
                                        best_spikes,
                                        frequency=frequency)

        s = signal.get_true_signal(im_trimmed.shape[0],
                                   grayscale['sections'] + 1,
                                   up_spikes,
                                   frequency=frequency,
                                   offset=offset)

        if s is None:

            _logger.warning(
                ("GRAYSCALE, no signal detected for f={0} and"
                 " offset={1} in best_spikes={2} from spikes={3}").format(
                     frequency, offset, best_spikes, up_spikes))

            return None, None

        if s[0] - frequency * SAFETY_PADDING < 0:

            _logger.warning("GRAYSCALE, the signal got adjusted one interval"
                            " due to lower bound overshoot")

            s += frequency

        if s[-1] + frequency * SAFETY_PADDING > para_signal_trimmed_im.size:

            _logger.warning("GRAYSCALE, the signal got adjusted one interval"
                            " due to upper bound overshoot")

            s -= frequency

        ortho_half_height = grayscale['width'] / 2.0 * SAFETY_COEFF

        # SETTING TOP
        top = mid_ortho_trimmed - ortho_half_height
        if top < 0:
            top = 0

        # SETTING BOTTOM
        bottom = mid_ortho_trimmed + ortho_half_height
        if bottom >= im_trimmed.shape[1]:
            bottom = im_trimmed.shape[1] - 1

        for pos in xrange(s.size - 1):

            mid = s[pos:pos + 2].mean() + rect[0][0]

            grayscale_segment_centers.append(mid)

            left = grayscale_segment_centers[
                -1] - 0.5 * frequency * SAFETY_COEFF

            if left < 0:
                left = 0

            right = grayscale_segment_centers[
                -1] + 0.5 * frequency * SAFETY_COEFF

            if right >= im_trimmed.shape[0]:
                right = im_trimmed.shape[0] - 1

            gray_scale.append(iqr_mean(im_trimmed[left:right, top:bottom]))

    gray_scale, grayscale_segment_centers = signal.get_higher_second_half_order_according_to_first(
        gray_scale, grayscale_segment_centers)

    if DEBUG_DETECTION:
        np.save(os.path.join(Paths().log, "gs_final_values.npy"), gray_scale)

    return grayscale_segment_centers, gray_scale