Ejemplo n.º 1
0
class TestImageCache(unittest.TestCase):
    """
    Checks image cache is working as expected.
    """
    def __init__(self, *args, **kwargs):
        self.IMAGE_CACHE = ImageCache(  # pylint: disable=invalid-name
            storage_dir=tempfile.mkdtemp(prefix="flatisfy-"))
        super(TestImageCache, self).__init__(*args, **kwargs)

    def test_invalid_url(self):
        """
        Check that it returns nothing on an invalid URL.
        """
        # See https://framagit.org/phyks/Flatisfy/issues/116.
        self.assertIsNone(
            self.IMAGE_CACHE.get("https://httpbin.org/status/404"))
        self.assertIsNone(
            self.IMAGE_CACHE.get("https://httpbin.org/status/500"))

    def test_invalid_data(self):
        """
        Check that it returns nothing on an invalid data.
        """
        # See https://framagit.org/phyks/Flatisfy/issues/116.
        self.assertIsNone(self.IMAGE_CACHE.get("https://httpbin.org/"))
Ejemplo n.º 2
0
def download_images(flats_list, config):
    """
    Download images for all flats in the list, to serve them locally.

    :param flats_list: A list of flats dicts.
    :param config: A config dict.
    """
    photo_cache = ImageCache(storage_dir=os.path.join(config["data_directory"], "images"))
    for flat in flats_list:
        for photo in flat["photos"]:
            # Download photo
            image = photo_cache.get(photo["url"])
            # And store the local image
            # Only add it if fetching was successful
            if image:
                photo["local"] = photo_cache.compute_filename(photo["url"])
Ejemplo n.º 3
0
 def __init__(self, *args, **kwargs):
     self.IMAGE_CACHE = ImageCache(storage_dir=tempfile.mkdtemp(prefix="flatisfy-"))  # pylint: disable=invalid-name
     super(TestImageCache, self).__init__(*args, **kwargs)
Ejemplo n.º 4
0
def deep_detect(flats_list):
    """
    Deeper detection of duplicates based on any available data.

    :param flats_list: A list of flats dicts.
    :return: A tuple of the deduplicated list of flat dicts and the list of all
    the flats objects that should be removed and considered as duplicates (they
    were already merged).
    """

    photo_cache = ImageCache()

    LOGGER.info("Running deep duplicates detection.")
    matching_flats = collections.defaultdict(list)
    for i, flat1 in enumerate(flats_list):
        matching_flats[flat1["id"]].append(flat1["id"])
        for j, flat2 in enumerate(flats_list):
            if i <= j:
                continue

            if flat2["id"] in matching_flats[flat1["id"]]:
                continue

            n_common_items = 0
            try:
                # They should have the same area, up to one unit
                assert abs(flat1["area"] - flat2["area"]) < 1
                n_common_items += 1

                # They should be at the same price, up to one unit
                assert abs(flat1["cost"] - flat2["cost"]) < 1
                n_common_items += 1

                # They should have the same number of bedrooms if this was
                # fetched for both
                if flat1["bedrooms"] and flat2["bedrooms"]:
                    assert flat1["bedrooms"] == flat2["bedrooms"]
                    n_common_items += 1

                # They should have the same utilities (included or excluded for
                # both of them), if this was fetched for both
                if flat1["utilities"] and flat2["utilities"]:
                    assert flat1["utilities"] == flat2["utilities"]
                    n_common_items += 1

                # They should have the same number of rooms if it was fetched
                # for both of them
                if flat1["rooms"] and flat2["rooms"]:
                    assert flat1["rooms"] == flat2["rooms"]
                    n_common_items += 1

                # They should have the same postal code, if available
                if (flat1["flatisfy"].get("postal_code", None)
                        and flat2["flatisfy"].get("postal_code", None)):
                    assert (flat1["flatisfy"]["postal_code"] ==
                            flat2["flatisfy"]["postal_code"])
                    n_common_items += 1

                # TODO: Compare texts (one is included in another? fuzzymatch?)

                # They should have the same phone number if it was fetched for
                # both
                flat1_phone = homogeneize_phone_number(flat1["phone"])
                flat2_phone = homogeneize_phone_number(flat2["phone"])
                if flat1_phone and flat2_phone:
                    assert flat1_phone == flat2_phone
                    n_common_items += 10  # Counts much more that the rest

                # They should have at least one photo in common if there
                # are some photos
                if flat1["photos"] and flat2["photos"]:
                    n_common_photos = find_number_common_photos(
                        photo_cache, flat1["photos"], flat2["photos"])
                    assert n_common_photos > 1

                    min_number_photos = min(len(flat1["photos"]),
                                            len(flat2["photos"]))

                    # Either all the photos are the same, or there are at least
                    # three common photos.
                    if n_common_photos == min_number_photos:
                        n_common_items += 15
                    else:
                        n_common_items += 5 * min(n_common_photos, 3)

                # Minimal score to consider they are duplicates
                assert n_common_items >= 15
            except (AssertionError, TypeError):
                # Skip and consider as not duplicates whenever the conditions
                # are not met
                # TypeError occurs when an area or a cost is None, which should
                # not be considered as duplicates
                continue

            # Mark flats as duplicates
            LOGGER.info(("Found duplicates using deep detection: (%s, %s). "
                         "Score is %d."), flat1["id"], flat2["id"],
                        n_common_items)
            matching_flats[flat1["id"]].append(flat2["id"])
            matching_flats[flat2["id"]].append(flat1["id"])

    if photo_cache.total():
        LOGGER.debug("Photo cache: hits: %d%% / misses: %d%%.",
                     photo_cache.hit_rate(), photo_cache.miss_rate())

    seen_ids = []
    duplicate_flats = []
    unique_flats_list = []
    for flat_id in [flat["id"] for flat in flats_list]:
        if flat_id in seen_ids:
            continue

        seen_ids.extend(matching_flats[flat_id])
        to_merge = sorted([
            flat
            for flat in flats_list if flat["id"] in matching_flats[flat_id]
        ],
                          key=lambda flat:
                          next(i for
                               (i, backend) in enumerate(BACKENDS_PRECEDENCE)
                               if flat["id"].endswith(backend)),
                          reverse=True)
        unique_flats_list.append(tools.merge_dicts(*to_merge))
        # The ID of the added merged flat will be the one of the last item
        # in ``matching_flats``. Then, any flat object that was before in
        # the ``matching_flats`` list is to be considered as a duplicate
        # and should have a ``duplicate`` status.
        duplicate_flats.extend(to_merge[:-1])

    return unique_flats_list, duplicate_flats
Ejemplo n.º 5
0
def deep_detect(flats_list, config):
    """
    Deeper detection of duplicates based on any available data.

    :param flats_list: A list of flats dicts.
    :param config: A config dict.
    :return: A tuple of the deduplicated list of flat dicts and the list of all
        the flats objects that should be removed and considered as duplicates
        (they were already merged).
    """
    if config["serve_images_locally"]:
        storage_dir = os.path.join(config["data_directory"], "images")
    else:
        storage_dir = None
    photo_cache = ImageCache(storage_dir=storage_dir)

    LOGGER.info("Running deep duplicates detection.")
    matching_flats = collections.defaultdict(list)
    for i, flat1 in enumerate(flats_list):
        matching_flats[flat1["id"]].append(flat1["id"])
        for j, flat2 in enumerate(flats_list):
            if i <= j:
                continue

            if flat2["id"] in matching_flats[flat1["id"]]:
                continue

            n_common_items = get_duplicate_score(
                flat1, flat2, photo_cache,
                config["duplicate_image_hash_threshold"])

            # Minimal score to consider they are duplicates
            if n_common_items >= config["duplicate_threshold"]:
                # Mark flats as duplicates
                LOGGER.info(
                    ("Found duplicates using deep detection: (%s, %s). Score is %d."
                     ),
                    flat1["id"],
                    flat2["id"],
                    n_common_items,
                )
                matching_flats[flat1["id"]].append(flat2["id"])
                matching_flats[flat2["id"]].append(flat1["id"])

    if photo_cache.total():
        LOGGER.debug(
            "Photo cache: hits: %d%% / misses: %d%%.",
            photo_cache.hit_rate(),
            photo_cache.miss_rate(),
        )

    seen_ids = []
    duplicate_flats = []
    unique_flats_list = []
    for flat_id in [flat["id"] for flat in flats_list]:
        if flat_id in seen_ids:
            continue

        seen_ids.extend(matching_flats[flat_id])
        to_merge = sorted(
            [
                flat
                for flat in flats_list if flat["id"] in matching_flats[flat_id]
            ],
            key=lambda flat: next(i for (i, backend) in enumerate(
                BACKENDS_BY_PRECEDENCE) if flat["id"].endswith(backend)),
            reverse=True,
        )
        unique_flats_list.append(tools.merge_dicts(*to_merge))
        # The ID of the added merged flat will be the one of the last item
        # in ``matching_flats``. Then, any flat object that was before in
        # the ``matching_flats`` list is to be considered as a duplicate
        # and should have a ``duplicate`` status.
        duplicate_flats.extend(to_merge[:-1])

    return unique_flats_list, duplicate_flats