Esempio n. 1
0
def test_dynamic_images_priority(mocker, image_data):
    """Dynamic images should be first in images list."""
    mocker.patch('config.IMAGES_REQUIRED', False)
    mocker.patch('config.imageconf', {'image_reduction': [30, 30, 40, 0]})
    calculate.run()
    with open(config.LOG_REDUCCION, 'rt', encoding='utf-8') as fh:
        first_image_info = fh.read().split()[0]
    assert 'bar.bmp' in first_image_info
Esempio n. 2
0
def test_no_repeated_images(mocker, image_data):
    """Image with same name should be included only once."""
    mocker.patch('config.IMAGES_REQUIRED', True)
    mocker.patch('config.imageconf', {'image_reduction': [100, 0, 0, 0]})
    calculate.run()
    with open(config.LOG_REDUCCION, 'rt', encoding='utf-8') as fh:
        images = fh.read()
    assert images.count('foo.png') == 1
Esempio n. 3
0
def test_required_images(reduction, mocker, image_data):
    """Test that required images are included independently of reduction values."""
    mocker.patch('config.IMAGES_REQUIRED', True)
    mocker.patch('config.imageconf', {'image_reduction': reduction})
    calculate.run()
    with open(config.LOG_REDUCCION, 'r', encoding='utf-8') as fh:
        images = fh.read()
    assert 'baz.svg' in images
    assert 'spam.svg' in images
Esempio n. 4
0
def test_required_images_only(mocker, image_data):
    """Test inclusion of required images only."""
    mocker.patch('config.IMAGES_REQUIRED', True)
    reduction = [0, 0, 0, 100]  # no optional images
    mocker.patch('config.imageconf', {'image_reduction': reduction})
    calculate.run()
    with open(config.LOG_REDUCCION, 'r', encoding='utf-8') as fh:
        images = fh.read()
    assert 'baz.svg' in images
    assert 'spam.svg' in images
    # only the two required SVG images should be included
    assert len(images.split()) == 2
Esempio n. 5
0
def test_no_images(mocker, image_data):
    """Test no images included."""
    mocker.patch('config.IMAGES_REQUIRED', False)
    mocker.patch('config.imageconf', {'image_reduction': [0, 0, 0, 100]})
    calculate.run()
    assert os.path.getsize(config.LOG_REDUCCION) == 0
Esempio n. 6
0
def main(lang,
         src_info,
         version,
         lang_config,
         gendate,
         images_dump_dir,
         verbose=False,
         desconectado=False,
         process_articles=True):
    """Generate the CDPedia tarball or iso."""
    # don't affect the rest of the machine
    make_it_nicer()

    # set language in config
    if config.LANGUAGE is None:
        config.LANGUAGE = lang
        config.URL_WIKIPEDIA = config.URL_WIKIPEDIA_TPL.format(lang=lang)

    # validate lang and versions, and fix config with selected data
    logger.info("Fixing config for lang=%r version=%r", lang, version)
    try:
        _lang_conf = config.imagtypes[lang]
    except KeyError:
        available_langs = list(config.imagtypes.keys())
        logger.error("%r is not a valid language! try one of %s", lang,
                     available_langs)
        exit()
    try:
        config.imageconf = _lang_conf[version]
    except KeyError:
        available_versions = list(_lang_conf.keys())
        logger.error("%r is not a valid version! try one of %s", version,
                     available_versions)
        exit()
    config.langconf = lang_config

    logger.info("Starting!")
    prepare_temporary_dirs(process_articles)

    logger.info("Copying the assets and locale files")
    dst_assets = os.path.join(config.DIR_CDBASE, 'assets')
    copy_assets(src_info, dst_assets)
    link(os.path.join(src_info, 'portal_pages.txt'), config.DIR_TEMP)
    copy_dir('locale', path.join(config.DIR_CDBASE, "locale"))
    set_locale(lang_config.get('second_language'), record=True)

    logger.info("Copying '%s' stylesheet and associated media resources",
                config.CSS_FILENAME)
    copy_css(src_info, dst_assets)

    articulos = path.join(src_info, "articles")
    if process_articles:
        logger.info("Preprocessing")
        if not path.exists(articulos):
            logger.error("Couldn't find articles dir: %r", articulos)
            raise EnvironmentError("Directory not found, can't continue")
            sys.exit()
        preprocess.run(articulos)

        logger.info("Calculating which stay and which don't")
        preprocess.pages_selector.calculate()

        logger.info("Generating the images log")
        taken, adesc = extract.run()
        logger.info("Extracted %d images, need to download %d", taken, adesc)
    else:
        logger.info("Avoid processing articles and generating images log")

    logger.info("Recalculating the reduction percentages.")
    calculate.run()

    if not desconectado:
        logger.info("Downloading the images from the internet")
        download.retrieve(images_dump_dir)

    logger.info("Reducing the downloaded images")
    scale.run(verbose, images_dump_dir)

    if config.EMBED_IMAGES:
        logger.info("Embedding selected images")
        embed.run(images_dump_dir)

    logger.info("Putting the reduced images into blocks")
    # agrupamos las imagenes en bloques
    q_blocks, q_images = ImageManager.generar_bloques(verbose)
    logger.info("Got %d blocks with %d images", q_blocks, q_images)

    if not process_articles:
        logger.info("Not generating index and blocks (by user request)")
    elif preprocess.pages_selector.same_info_through_runs:
        logger.info("Same articles than previous run "
                    "(not generating index and blocks)")
    else:
        logger.info("Generating the index")
        result = cdpindex.generate_from_html(articulos, verbose)
        logger.info("Got %d files", result)
        logger.info("Generating the articles blocks")
        q_blocks, q_files, q_redirs = ArticleManager.generar_bloques(
            lang, verbose)
        logger.info("Got %d blocks with %d files and %d redirects", q_blocks,
                    q_files, q_redirs)

    logger.info("Copying the sources and libs")
    copy_sources()
    generate_libs()

    # Copy python docs
    pydocs.clone(lang, lang_config, os.path.dirname(src_info))

    logger.info("Generating the links to blocks and indexes")
    # pages blocks
    dest = path.join(config.DIR_CDBASE, "pages")
    if os.path.exists(dest):
        os.remove(dest)
    os.symlink(path.abspath(config.DIR_PAGES_BLOCKS), dest)
    # images blocks
    dest = path.join(config.DIR_CDBASE, "images")
    if os.path.exists(dest):
        os.remove(dest)
    os.symlink(path.abspath(config.DIR_IMAGES_BLOCKS), dest)
    # indexes
    dest = path.join(config.DIR_CDBASE, "indice")
    if os.path.exists(dest):
        os.remove(dest)
    os.symlink(path.abspath(config.DIR_INDICE), dest)

    if config.imageconf["windows"]:
        logger.info("Copying Windows stuff")
        copy_dir("resources/autorun.win/cdroot", config.DIR_CDBASE)
        # unpack embeddable python distribution for win32
        py_win_zip = "resources/autorun.win/python-win32.zip"
        py_win_dst = os.path.join(config.DIR_CDBASE, 'python')
        with zipfile.ZipFile(py_win_zip, 'r') as zh:
            zh.extractall(py_win_dst)

    logger.info("Generating runtime config")
    gen_run_config(lang_config)

    base_dest_name = "cdpedia-%s-%s-%s-%s" % (lang, config.VERSION, gendate,
                                              version)
    if config.imageconf["type"] == "iso":
        logger.info("Building the ISO: %r", base_dest_name)
        build_iso(base_dest_name)
    elif config.imageconf["type"] == "tarball":
        logger.info("Building the tarball: %r", base_dest_name)
        build_tarball(base_dest_name)
    else:
        raise ValueError("Unrecognized image type")

    logger.info("All done!")