Exemplo n.º 1
0
def download(file=None, url=None, thumbnail_size=None, download_dir=None):

    download_dir = get_download_dir(download_dir)

    global downloader, thumbnailer
    downloader = ImageDownloader()
    thumbnailer = None

    if thumbnail_size:
        width = int(thumbnail_size[0])
        height = int(thumbnail_size[1])
        thumbnailer = Thumbnailer(width, height)

    if file:
        result_list = list()
        f = codecs.open(file, 'r', 'utf-8')
        url_list = f.readlines()

        for url in url_list:
            result_list.append(
                do_work(url=url.strip(), download_dir=download_dir))

        return result_list

    elif url:
        return do_work(url=url, download_dir=download_dir)
    else:
        print "no arguments"
Exemplo n.º 2
0
    def test_image_good(self):
        write_image = Image.open(
            os.path.join(os.path.dirname(__file__), 'rainier.jpg'))
        t = Thumbnailer()
        thumb_image = t.make_thumbnail(write_image)

        assert thumb_image.size == (100, 100)
        write_image.show()
        thumb_image.show()
Exemplo n.º 3
0
def main(cache_dir):
    okapi = Okapi(OC_OKAPI_KEY, user_agent=USER_AGENT)

    os.makedirs(cache_dir, exist_ok=True)
    os.makedirs(os.path.join(cache_dir, "json"), exist_ok=True)
    os.makedirs(os.path.join(cache_dir, "orig"), exist_ok=True)
    os.makedirs(os.path.join(cache_dir, "small"), exist_ok=True)
    os.makedirs(os.path.join(cache_dir, "big"), exist_ok=True)

    file_name = os.path.join(cache_dir, "json", "caches.json")
    if os.path.isfile(file_name):
        json_data = load_json(file_name)
    else:
        print("-- downloading query...")
        oc_codes = download_query(OC_USERNAME, OC_PASSWORD, OC_QUERYID)
        try:
            with open(MANUAL_CACHES_FILE, "r") as f:
                for oc_code in f:
                    oc_code = oc_code.strip()
                    if oc_code.startswith("OC"):
                        print("-- adding manual code {}".format(oc_code))
                        oc_codes.append(oc_code)
        except IOError:
            pass

        print("-> codes: {}".format(len(oc_codes)))
        fields = [
            'code', 'name', 'location', 'status', 'url', 'owner', 'founds',
            'date_hidden', 'date_created', 'short_description', 'description',
            'images', 'preview_image', 'internal_id'
        ]
        json_data = okapi.get_caches(oc_codes, fields)
        store_json(file_name, json_data)

    print("-- analyzing cache data...")
    caches = load_caches(json_data)
    caches = sorted(caches, key=lambda c: c._date, reverse=True)
    print("-> caches: {}".format(len(caches)))

    print("-- analyzing log data...")
    total_logs = 0
    logs_without_coords = 0
    for cache in caches:
        file_name = os.path.join(cache_dir, "json", f"{cache._code}-logs.json")
        if os.path.isfile(file_name):
            json_data = load_json(file_name)
        else:
            fields = [
                'uuid', 'date', 'user', 'type', 'comment', 'images',
                'internal_id'
            ]
            json_data = okapi.get_logs(cache._code, fields)
            store_json(file_name, json_data)
        cache._logs = load_logs(json_data)

        for log in cache._logs:
            total_logs += 1
            if log._coordinates is None:
                logs_without_coords += 1
    print("-- logs without coordinates: {}/{}".format(logs_without_coords,
                                                      total_logs))

    print("-- downloading missing images...")
    downloader = Downloader(threads=4, user_agent=USER_AGENT)
    thumbnailer = Thumbnailer(threads=4)
    for cache in caches:
        if cache._preview_image is not None:
            extension = 'noext'
            m = re.match('^.*\.([^.\?]+)(\?.*)?$', cache._preview_image)
            if m:
                extension = m.group(1)
            raw_image = '{}/{}/{}.{}'.format(cache_dir, "orig", cache._code,
                                             extension)
            downloader.add_job(cache._preview_image, raw_image)
            thumb_small = '{}/{}/{}.jpg'.format(cache_dir, "small",
                                                cache._code)
            thumbnailer.add_job(raw_image, thumb_small, SIZE_SMALL)
            thumb_big = '{}/{}/{}.jpg'.format(cache_dir, "big", cache._code)
            thumbnailer.add_job(raw_image, thumb_big, SIZE_BIG)
    downloader.run()

    print("-- scaling images...")
    thumbnailer.run()

    print("-- creating files...")
    create_db(caches, os.path.join(cache_dir, "safari.sqlite"))
    collect_logs(caches, os.path.join(cache_dir, "log-data.js"))
    createlist(caches, 30, cache_dir)
    create_feed(caches, os.path.join(cache_dir, "feed.xml"))
    create_sidebar(caches, "static/index.html",
                   os.path.join(cache_dir, "index.html"), cache_dir)
Exemplo n.º 4
0
 def test_image_bad(self):
     t = Thumbnailer()
     with pytest.raises(Exception):
         thumb = t.make_thumbnail("Not an image")
Exemplo n.º 5
0
from flask import Flask, send_file
from flask_restful import Resource, Api, reqparse
from flask_cors import CORS
import numpy as np
from data_backend import Dataset as HDF_Dataset
from dataset_manager import DatasetManager
from thumbnailer import Thumbnailer
from utils import merge_overlapping_filters

DATASET_PATH = "./datasets"
dataset_manager = DatasetManager(DATASET_PATH)
API_BASE_STR = "/api/v1"

# Init thumbnails (clean directory)
thumbnailer = Thumbnailer("./thumbnails")
thumbnailer.clean();

dataset_list = []
for dset_index, name in enumerate(dataset_manager.get_dataset_names()):
    dset = HDF_Dataset(DATASET_PATH, name)
    dataset_list.append({
        "id": dset_index,
        "name": name,
        "device": {
            "name": dset.device.name,
            "version": dset.device.version
        },
        "subsets": [
            {
                "id": subset_index,
                "name": subset,