예제 #1
0
def cal_similarity(image_name, image_files, directory):
    # rdb.set_trace()
    original = io.imread(os.path.join(directory, image_name))
    original = rgb2gray(original)
    similarity = {}
    for image in image_files:
        if image_name != image:
            compare = rgb2gray(io.imread(os.path.join(directory, image)))
            sim = ssim(original, compare)
            if len(similarity) >= 2:
                min_ssim = min(similarity, key=similarity.get)
                if sim > similarity[min_ssim]:
                    del similarity[min_ssim]
                else:
                    continue
            similarity[image] = sim

            # update the cache
            if image in redis_cache.keys():
                image_similarity = pickle.loads(redis_cache.get(image))
                if len(image_similarity) < 2:
                    image_similarity[image_name] = sim
                    redis_cache.set(image, pickle.dumps(image_similarity, pickle.HIGHEST_PROTOCOL))
                min_ssim = min(image_similarity, key=image_similarity.get)
                if sim > image_similarity[min_ssim]:
                    del image_similarity[min_ssim]
                    image_similarity[image_name] = sim
                    redis_cache.set(image, pickle.dumps(image_similarity, pickle.HIGHEST_PROTOCOL))
    return similarity
예제 #2
0
파일: client.py 프로젝트: luthy2/tinypin
def get_content(url):
    if redis_cache.get(url):
        resp = redis_cache.get(url)
        resp = json.loads(resp.decode('utf-8'))
        print resp, "item loaded from cache"
    else:
        resp = cli.oembed(url, raw=True, words = 30)
        if resp.get('raw'):
            r = redis_cache.set(url, resp.get('raw'))
            print 'item cached:', r
        else:
            resp = lassie.fetch(url)
            j = json.dumps(resp)
            r = redis_cache.set(url, j)
    print resp
    if resp:
        if resp.get("provider_name") == "Twitter":
            return render_twitter(url)
        elif resp.get("provider_name") == "YouTube":
            return render_youtube(resp.get("html"))
        elif resp.get("type") == "rich":
            print "rich"
            ratio = (float(resp.get("height",1))/resp.get("width",1))*100
            print ratio
            if ratio <= 0:
                ratio = 100
            return render_template("video.html", content = resp.get("html"), ratio = str(ratio))
        elif resp.get("type") == "video":
            print "video"
            ratio = (float(resp.get("height", 1))/resp.get("width",1))*100
            print ratio
            if ratio <= 0:
                ratio = 100
            return render_template("video.html", content = resp.get("html"), ratio = ratio)
        elif resp.get("type") == "link":
            return render_template("article.html", title = resp.get("title"), image=resp.get("thumbnail_url"), description = resp.get("description"), _url=resp.get("url"), provider = resp.get("provider_name"))
        elif resp.get("type")  == "photo":
            print "photo"
            return render_template("photo.html", _url = str(resp.get("url")), source = url)
        else:
            return render_nostyle(url)
    else:
        return render_nostyle(url)
예제 #3
0
파일: client.py 프로젝트: luthy2/tinypin
def render_nostyle(url):
    resp = redis_cache.get(url)
    print resp, type(resp)
    if not resp:
        resp = lassie.fetch(url)
        resp = json.dumps(resp)
        r = redis_cache.set(url, resp)
    else:
        resp = json.loads(resp)
    thumbnail = resp.get('images')
    if thumbnail:
        thumbnail = thumbnail[0].get('src')
    title = resp.get('title')
    description = resp.get('description')
    parse_obj = urlparse(url)
    provider = parse_obj.netloc
    return render_template('article.html', _url = url, image = thumbnail, title = title, description = description, provider=provider)
예제 #4
0
def cache_urls(urls):
    #check which urls are not in the cache
    not_cached_urls = []
    for u in urls:
        r = redis_cache.get(u)
        if not r:
            not_cached_urls.append(u)

    #takes a list and splits it into a list of lists of size n
    def chunks(u, n=20):
        for i in xrange(0, len(urls), n):
            yield urls[i:i + n]

    #send each batch to the embedly api and cache the results
    for batch in list(chunks(not_cached_urls)):
        resp = cli.oembed(batch, raw=True, words=30)
        for i in resp:
            c = redis_cache.set(i.get(original_url), i.get('raw'))
        print 'Item cached', c
    return True
예제 #5
0
def details(image_name):
    similarity = {}
    if image_name in redis_cache.keys():
        similarity = pickle.loads(redis_cache.get(image_name))
        similarity = collections.OrderedDict(sorted(similarity.items(), key=lambda x: x[1], reverse=True))
    return render_template("details.html", image=image_name, similar=similarity)