Exemple #1
0
def get_service_url(service, url=index_url):
    """
    Summary.

        Retrieve Amazon API Global Offer File (Service API Index) File

    Args:
        :url (str): universal resource locator for Amazon API Index file.
            index file details the current url locations for retrieving the
            most up to date API data files
    Returns:
        Current URL of EC2 Price file (str), Publication date (str)

    """
    url_prefix = 'https://pricing.us-east-1.amazonaws.com'
    converted_name = name_lookup(service, url)

    if not converted_name:
        logger.critical(
            f'{inspect.stack()[0][3]}: The boto3 service name provided could \
            not be found in the index file')
        return None

    r = requests.get(url)
    f1 = json.loads(r.content)
    index_url = url_prefix + f1['offers'][converted_name][
        'currentRegionIndexUrl']
    data = json.loads(requests.get(index_url).content)
    url_suffix = data['regions']['us-east-1']['currentVersionUrl']
    return url_prefix + url_suffix
Exemple #2
0
def read_list_from_file(
    files_list_path: str = FILES_LIST_PATH,
    files_list_filename: str = FILES_LIST_FILENAME,
) -> list:
    """ Import list from file """

    if os.path.exists(f"{files_list_path}/{files_list_filename}"):
        try:
            with open(f"{files_list_path}/{files_list_filename}", "r") as r:
                data = r.read().splitlines()

            statistics.append(["read_list_from_file", len(data)])
            logger.info(f"{len(data)} items imported from file.")
        except Exception as e:
            logger.error(e)
            raise
        return data
    else:
        logger.critical(
            f'Cannot open the file "{files_list_path}/{files_list_filename}", looks like it does not exists.'
        )
        return False

    logger.critical("Something went wrong!")
    return False
def remote_video_encoder(files_list_path: str = FILES_LIST_PATH) -> bool:
    """ Send movies list to SQS -> lambda/ffmpeg """

    logger.info("Starting remote movie re-encoding operations...")

    data_path = f"{files_list_path}/defered_encode.json"
    if os.path.exists(data_path):
        try:
            with open(data_path, "r") as r:
                movies = r.read()
            for movie in movies:
                queue_message = send_to_queue(movies)
                logger.info(f"Re-encoding process launched for '{movie}'.")
                logger.debug(queue_message)
        except Exception as e:
            logger.error(e)
            raise
    else:
        logger.critical(f"Path does not exist: '{data_path}'. Stopping here.")
        return False

    logger.info("...done.")

    return True
Exemple #4
0
def build_media_files_from_list(
    local_files_list: list = None,
    output_image_width: int = OUTPUT_IMAGE_WIDTH,
    output_image_height: int = OUTPUT_IMAGE_HEIGHT,
    output_path: str = LOCAL_MEDIA_OUTPUT_PATH,
    log_path: str = LOG_PATH,
) -> bool:
    """ Generates web friendly resized images and copy other media files """

    logger.info("Generating web friendly images...")
    processed_files_count = 0
    unprocessed_files = []
    path_pattern = re.compile("^.*?/[0-9]{8}/.*[.][a-z-A-Z-0-9]+$")
    ts_pattern = re.compile("^[0-9]{8}$")

    try:
        for media in local_files_list:
            ts = media.split("/")[-2]

            if path_pattern.match(media):
                media_ts = ts
            elif not ts_pattern.match(ts):
                media_ts = media_ts_format(ts, media)
            else:
                logger.warning(
                    f'The file path format should by like eg.: "path/ts/image.jpg".'
                )
                logger.critical(
                    f'Input file path format "{media}" is incorrect! Stopping here!'
                )
                return False

            if not media_ts:
                unprocessed_files.append(media)
                logger.warning(
                    f"Could not identify the date format. Skipping."
                )
            else:
                gen = media_generate(
                    media=media,
                    output_path=output_path,
                    media_ts=media_ts,
                    output_image_width=output_image_width,
                    output_image_height=output_image_height,
                    processed_files_count=processed_files_count,
                    unprocessed_files=unprocessed_files,
                )
                # processed_files_count = gen[0]
                # unprocessed_files = gen[1]
                processed_files_count, unprocessed_files = gen

        statistics.append(
            ["build_media_files_from_list", processed_files_count]
        )
        logger.info(
            f"{processed_files_count} images have been generated successfully."
        )

        log_file = f"{log_path}/unprocessed_files.log"

        if len(unprocessed_files) > 0:
            up_files = [item + "\n" for item in unprocessed_files]

            with open(log_file, "w") as w:
                w.writelines(up_files)

            logger.warning(f"{len(unprocessed_files)} unprocessed file(s)!")
            logger.debug(f"Unprocessed file(s): {unprocessed_files}")
        elif os.path.exists(log_file):
            with open(log_file, "r+") as t:
                t.truncate(0)
        else:
            pass

        logger.info("Image files tree generation done.")

        if len(unprocessed_files) > 0:
            logger.info(
                f'Some files were not processed, please review the list: "{log_path}/unprocessed_files.log".'
            )
        else:
            pass
    except Exception as e:
        logger.error(e)
        raise

    return True
Exemple #5
0
def get_local_medias_files(
    path: str = LOCAL_MEDIA_PATH,
    save_to_disk: bool = True,
    files_list_path: str = FILES_LIST_PATH,
    files_list_filename: str = FILES_LIST_FILENAME,
    config_path: str = CONFIG_PATH,
) -> list:
    """ Generates a list of local media files """

    if os.path.exists(path):
        local_medias = []
        filtered_files = []

        try:
            logger.info("Generating list of local files...")
            for dirpath, _, files in os.walk(path):
                for filename in files:
                    fname = os.path.join(dirpath, filename)
                    if is_filtered(filename):
                        filtered_files.append(fname)
                    else:
                        local_medias.append(fname)

            if len(local_medias) > 0:
                statistics.append(
                    ["get_local_medias_files", len(local_medias)]
                )

                logger.info("List successfully generated.")
                logger.debug(f"Count: {len(local_medias)} local files.")
            else:
                logger.critical(
                    f'No files found in source directory: "{path}".'
                )
                return False

            if save_to_disk:
                logger.info("Writing local files list to disk...")
                data_to_write = [item + "\n" for item in local_medias]

                with open(
                    f"{files_list_path}/{files_list_filename}", "w"
                ) as w:
                    w.writelines(data_to_write)

                logger.info(
                    f'The list has been saved successfully: "{files_list_path}/{files_list_filename}".'
                )
            else:
                pass

            if len(filtered_files) > 0:
                logger.info(
                    f'Number of file(s) excluded by filter specified in "{config_path}/exclude_local.txt": {len(filtered_files)}.'
                )
                logger.debug(f"excluded by filter: {filtered_files}")
            else:
                pass
        except Exception as e:
            logger.error(e)
            raise
        return local_medias
    else:
        logger.critical(f'Missing input "path"! Stopping here!')
        return False
Exemple #6
0
def media_sync(
    local_path: str = LOCAL_MEDIA_OUTPUT_PATH,
    bucket_name: str = BUCKET_NAME,
    remote_path_prefix: str = S3_PREFIX,
    log_path: str = LOG_PATH,
    aws_region: str = AWS_REGION,
    config_path: str = CONFIG_PATH,
) -> bool:
    """ Synchronize local/S3 media files tree """

    exclude_s3_file_path = f"{config_path}/exclude_s3.txt"
    if os.path.exists(exclude_s3_file_path):
        with open(exclude_s3_file_path, "r") as r:
            common_oses_filter = r.read().splitlines()
        cli_filter_args = ""
        cli_filter_args = cli_filter_args.join(
            [
                f' --exclude "{item}"'
                for item in common_oses_filter
                if not item.startswith("#") or item != "\n"
            ]
        )
    else:
        cli_filter_args = ""

    logger.info("Starting sync...")
    logger.info(f"S3 sync task log => tail -F {log_path}/s3_sync.log")

    try:
        cli_cmd = f"aws s3 sync {local_path}/ s3://{bucket_name}/{remote_path_prefix}/ --delete --region {aws_region} {cli_filter_args}"
        logger.debug(f"cli command: {cli_cmd}")
        with open(f"{log_path}/s3_sync.log", "w") as w:
            proc = subprocess.run(
                cli_cmd,
                shell=True,
                check=True,
                stdout=w,
                stderr=subprocess.STDOUT,
                universal_newlines=True,
            )

        if proc.returncode == 0:
            with open(f"{log_path}/s3_sync.log", "r") as r:
                processed_objects = r.read().splitlines()

            processed_objects = [
                item for item in processed_objects if "upload" in item
            ]
            statistics.append(["media_sync", len(processed_objects)])

            logger.info("Sync completed successfully.")
            logger.debug(
                f"{len(processed_objects)} files have been synchronized successfully."
            )
            logger.debug(f"S3 CLI returned code: {proc.returncode} => OK")
        else:
            logger.critical("Something wrong happened during sync operation!")
            return False
    except Exception as e:
        logger.error(e)
        raise

    return True
def media_generate(
    media: str = None,
    output_path: str = None,
    media_ts: str = None,
    output_image_width: int = None,
    output_image_height: int = None,
    processed_files_count: int = None,
    unprocessed_files: list = None,
    video_encode: str = VIDEO_ENCODE,
    log_path: str = LOG_PATH,
    s3_prefix: str = S3_PREFIX,
    media_encode_platform: str = MEDIA_ENCODE_PLATFORM,
) -> list:
    """ invoked by build_media_files_from_list() - gemerates media files """

    media_name = media.split("/")[-1]
    media_type = get_media_type(media_name)

    if not os.path.exists(f"{output_path}/{media_ts}"):
        os.mkdir(f"{output_path}/{media_ts}")
        logger.debug(f'Created directory: "{output_path}/{media_ts}".')
    else:
        pass

    if media_type == "picture":
        logger.info(
            f"Picture type identified, starting generation of media...")
        image = Image.open(media)
        if image:
            with image as im:
                im.thumbnail((output_image_width, output_image_height))
                im.save(
                    f"{output_path}/{media_ts}/{media_name}",
                    format="JPEG",
                    quality="web_high",
                    dpi=(72, 72),
                )
            processed_files_count += 1
            logger.info(
                f'Generated media: "{output_path}/{media_ts}/{media_name}".')
        else:
            logger.warning(
                f'Impossible to open the image file: "{media_name}"! File identified format is : "{media_type}". Skipping it.'
            )
    elif media_type == "movie":
        if video_encode:
            logger.info(f"Movie type identified...")

            if media_encode_platform == "local":
                if os.path.exists(f"{log_path}/ffmpeg.log"):
                    with open(f"{log_path}/ffmpeg.log", "r+") as w:
                        w.truncate(0)
                else:
                    pass
                video_encoder(media, media_ts, output_path)
            elif media_encode_platform == "cloud":
                logger.info(f"Movie type identified, starting copy of file...")
                shutil.copyfile(media,
                                f"{output_path}/{media_ts}/{media_name}")
                logger.info(
                    f'File copied successfully: "{media}" => "{output_path}/{media_ts}/{media_name}"'
                )
                movie = f"{s3_prefix}/{media_ts}/{media_name}"
                cloud_video_encoder_list.append({
                    "src": movie,
                    "ts": media_ts,
                    "delete_old": True
                })
                logger.info(
                    f"Added movie '{movie}' to queue for defered remote re-encoding."
                )
            else:
                logger.critical(
                    'Wrong or missing value! Valid values for "media_encode_platform": local|cloud'
                )
        else:
            logger.info(f"Movie type identified, starting copy of file...")
            shutil.copyfile(media, f"{output_path}/{media_ts}/{media_name}")
            logger.info(
                f'File copied successfully: "{media}" => "{output_path}/{media_ts}/{media_name}"'
            )

        processed_files_count += 1
    else:
        unprocessed_files.append(media)
        logger.warning(f'Impossible to process file: "{media}". Skipping it.')

    return (processed_files_count, unprocessed_files)
Exemple #8
0
def build_media_objects(
    items: list = None,
    aws_region: str = AWS_REGION,
    bucket_name: str = BUCKET_NAME,
) -> list:
    """ Build media objects """

    mediaItems: list = []
    ts = None
    logger.info("Building media list dictionaries...")
    logger.debug(
        f"Context Parameters: {build_media_objects.__name__} => {build_media_objects.__code__.co_varnames}"
    )

    try:
        for item in items:
            key = item.split("/")
            name = key[3]
            ts = key[2]
            ts = f"{ts[0:4]}-{ts[4:6]}-{ts[6:8]}"
            path = f"{key[0]}/{key[1]}/{key[2]}"
            url = f"https://s3-{aws_region}.amazonaws.com/{bucket_name}/{path}/{name}"

            media_type = get_media_type(name)

            if ts != "" and name != "":
                media = {}
                media["ts"] = ts
                media["name"] = name
                media["kind"] = media_type
                media["path"] = path
                media["url"] = url
                mediaItems.append(media)
            else:
                logger.warning(f"ts = {ts} and name = {name}. Stopping here.")
                return False

        data = sorted(mediaItems, key=itemgetter("ts"), reverse=False)

        nbr_data = len(data)
        nbr_items = len(items)

        statistics.append(["build_media_objects", len(data)])

        logger.info("Media list dictionaries built successfully.")
        logger.debug(f"{nbr_data} objects in media list.")

        if nbr_data != nbr_items:
            logger.critical(
                "Inconsistency found between data input and output! Stopping here!"
            )
            logger.debug(
                f"Input objects list count [{nbr_items}] and generated media objects count [{nbr_data}] are uneven. Stopping here."
            )
            return False
        else:
            pass
    except Exception as e:
        logger.error(e)
        raise

    return data