Esempio n. 1
0
def _copy_file(source_file, destination_file):
    if os.path.exists(destination_file):
        LOGGER.warning(
            __name__,
            f"Archive file {destination_file} already exists. The existing file will be "
            f"overwritten.")
    copy2(source_file, destination_file)
Esempio n. 2
0
def get_mappenavn(image_path, exif):
    dirs = image_path.split(os.sep)[:-1]
    if config.exif_top_dir in dirs:
        # Uncomment below for forward-slash separator or backward-slash.
        rel_path = "/".join(dirs[(dirs.index(config.exif_top_dir) + 1):])
        # rel_path = os.sep.join(dirs[(dirs.index(config.exif_top_dir) + 1):])
    else:
        LOGGER.warning(
            __name__,
            f"Top directory '{config.exif_top_dir}' not found in image path '{image_path}'. "
            f"'rel_path' will be empty")
        rel_path = ""

    timestamp = iso8601.parse_date(exif["exif_tid"])
    format_values = dict(aar=timestamp.year,
                         maaned=timestamp.month,
                         dag=timestamp.day,
                         fylke=str(exif["exif_fylke"]).zfill(2),
                         vegkat=exif["exif_vegkat"],
                         vegstat=exif["exif_vegstat"],
                         vegnr=exif["exif_vegnr"],
                         hp=exif["exif_hp"],
                         meter=exif["exif_meter"],
                         feltkode=exif["exif_feltkode"],
                         strekningreferanse=exif["exif_strekningreferanse"],
                         relative_input_dir=rel_path)
    folder_name = config.exif_mappenavn.format(**format_values)
    assert "{" not in folder_name and "}" not in folder_name, f"Invalid `Mappenavn`: {config.db_folder_name} -> " \
                                                              f"{folder_name}."
    return folder_name
def wait_until_path_is_found(paths,
                             retry_interval=config.file_access_retry_seconds,
                             timeout=config.file_access_timeout_seconds):
    """
    Blocks execution until all elements of `paths` are valid paths, for `timeout` seconds. If the timeout is reached,
    and one or more paths still do not exist, a `PathNotReachableError` will be raised.

    :param paths: Iterable where each element is a string of paths. The elements can also be `bytes`.
    :type paths: list of str | tuple of str | np.ndarray
    :param retry_interval: Number of seconds to wait between each retry.
    :type retry_interval: int
    :param timeout: Total number of seconds to wait.
    :type timeout: int
    :return: 0, if the existence of all paths is confirmed before the timeout is reached.
    :rtype: int
    """
    total_wait_time = 0

    if not isinstance(paths, (list, tuple, np.ndarray)):
        paths = [paths]

    while not all_exists(paths):
        time.sleep(retry_interval)
        total_wait_time += retry_interval
        if total_wait_time > timeout:
            raise PathNotReachableError(
                f"At least one of the paths in {paths} could not be reached in {timeout}s. "
                f"Aborting.")
        else:
            LOGGER.warning(
                __name__,
                f"At least one of the paths in {paths} could not be reached. Retrying."
            )
    return 0
Esempio n. 4
0
 def remove_cache_file(self):
     if os.path.isfile(self.cache_file):
         os.remove(self.cache_file)
     else:
         LOGGER.warning(
             __name__,
             f"Attempted to remove cache file '{self.cache_file}', but is does not exist."
         )
Esempio n. 5
0
def ID(json_data):
    # Try to get 'bildeuuid' from the json_data.
    image_id = json_data.get("bildeid", None)

    # If 'bilde_id' could not be found in the json_data. Create it from the contents.
    if image_id is None:
        LOGGER.warning(__name__, "Could not find 'bildeid' in JSON data. The ID will be created from the contents of "
                                 "the JSON data instead.")
        image_id = get_deterministic_id(json_data)

    return image_id
Esempio n. 6
0
def get_exif(img, image_path):
    """
    Parse the EXIF data from `img`.

    :param img: Input image
    :type img: PIL.Image
    :param image_path: Path to input image. Used to recreate metadata when EXIF-header is missing
    :type image_path: str
    :return: EXIF data
    :rtype: dict
    """
    # Make a copy of the template dictionary. Values from the EXIF header will be inserted into this dict.
    parsed_exif = EXIF_TEMPLATE.copy()

    # Get the EXIF data
    exif = img._getexif()

    if exif is not None:
        # Convert the integer keys in the exif dict to text
        labeled = label_exif(exif)
        # Process the `ImageProperties` XML
        image_properties_xml = labeled.get("ImageProperties", None)
        assert image_properties_xml is not None, "Unable to get key 40055:`ImageProperties` from EXIF."
        process_image_properties(image_properties_xml, parsed_exif)
        # Process the `ReflinkInfo` XML if it is available
        reflink_info_xml = labeled.get("ReflinkInfo", None)
        process_reflink_info(reflink_info_xml, parsed_exif)
        # Title of image.
        XPTitle = labeled.get("XPTitle", b"").decode("utf16")
        parsed_exif["exif_xptitle"] = XPTitle
    else:
        LOGGER.warning(
            __name__,
            "No EXIF data found for image. Attempting to reconstruct data from image path."
        )
        if image_path is not None:
            get_metadata_from_path(image_path, parsed_exif)

    # Get a deterministic ID from the exif data.
    parsed_exif["bildeid"] = get_deterministic_id(parsed_exif)
    # Insert the folder name
    parsed_exif["mappenavn"] = get_mappenavn(image_path, parsed_exif)
    return parsed_exif
Esempio n. 7
0
def check_config(args):
    """ Check that the specified configuration variables are valid. """
    if config.archive_json and not config.remote_json:
        raise ValueError("Parameter 'archive_json' requires remote_json=True.")
    if config.archive_mask and not config.remote_mask:
        raise ValueError("Parameter 'archive_mask' requires remote_mask=True.")

    if config.delete_input:
        LOGGER.warning(
            __name__,
            "Parameter 'delete_input' is enabled. This will permanently delete the original"
            " image from the input directory!")
        assert args.archive_folder, "Argument 'delete_input' requires a valid archive directory to be specified."

    if config.uncaught_exception_email or config.processing_error_email or config.finished_email:
        # Try to import the email_sender module, which checks if the `email_config.py` file is present.
        # Otherwise this will raise an exception prompting the user to create the file.
        import src.email_sender

    valid_log_levels = ["DEBUG", "INFO", "WARNING", "ERROR"]
    assert config.log_level in valid_log_levels, f"config.log_level must be one of {valid_log_levels}"
Esempio n. 8
0
    def create_row(self, json_dict):
        """
        Create a database row from the given `json_dict`.

        :param json_dict: EXIF data
        :type json_dict: dict
        :return: Dict representing the database row.
        :rtype: dict
        """
        out = {}
        for col in self.columns:
            try:
                value = col.get_value(json_dict)
            except Exception as err:
                LOGGER.warning(
                    __name__,
                    f"Got error '{type(err).__name__}: {err}' while getting value for database "
                    f"column {col.name}. Value will be set to None")
                value = None
            out[col.name] = value
        return out
Esempio n. 9
0
    def insert_accumulated_rows(self):
        """
        Insert all accumulated rows into the database
        """
        try:
            # Insert the rows
            self.insert_or_update_rows(self.accumulated_rows)
            # Clear the list of accumulated rows
            self.accumulated_rows = []

            if self.enable_cache:
                # Delete the cached files
                while self.cached_rows:
                    cache_file = self.cached_rows.pop(0)
                    if os.path.exists(cache_file):
                        os.remove(cache_file)
                    else:
                        LOGGER.warning(
                            __name__,
                            f"Could not find cache file to remove: {cache_file}"
                        )

        except cxo.DatabaseError as err:
            raise DatabaseError(f"cx_Oracle.DatabaseError: {str(err)}")
Esempio n. 10
0
import os
import sys
import smtplib
import traceback
from datetime import datetime
from email.message import EmailMessage
from socket import gethostname

import config
from src.Logger import LOGGER
try:
    from config import email_config
except ImportError:
    LOGGER.warning(
        __name__,
        "Could not find `email_config.py` in the project root. Please create it if you want to use"
        " the emailing feature. See `config.py` for more details.")
    email_config = None

CRITICAL_SUBJECT = "[image-anonymisation] {hostname} : Execution stopped due to uncaught {etype}."
ERROR_SUBJECT = "[image-anonymisation] {hostname} : Processing error encountered."
FINISHED_SUBJECT = "[image-anonymisation] {hostname} : Anonymisation finished."


def email_excepthook(etype, ex, tb):
    """
    Function which can be replaced with sys.excepthook in order to send an email when the program exits due to an
    uncaught exception.

    :param etype: Exception type
    :type etype: