Exemple #1
0
def CreateAvatar(user):
    image_w = 500
    image_h = 500

    color = (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))

    img = Image.new('RGB', (image_w, image_h), color=color)
    draw = ImageDraw.Draw(img)
    font = ImageFont.truetype(config.get('ROOT_PATH')+'/static/fonts/Roboto-Bold.ttf', size=300)

    user_letters = user['first_name'][0]+user['last_name'][0]

    text_w, text_h = draw.textsize(user_letters, font=font)

    draw.text(
        (
            (image_w - text_w)/2,
            (image_h - text_h - 80)/2,
        ),
        user_letters, 
        font=font, 
        fill=(int(color[0] - (color[0]/10)), int(color[1] - (color[1]/10)), int(color[2]  - (color[2]/10))),
    )

    image_path = user['id']+'/avatar.png'

    img.save(config['UPLOAD_FOLDER_PROFILE']+image_path)

    webp.cwebp(config['UPLOAD_FOLDER_PROFILE']+image_path, config['UPLOAD_FOLDER_PROFILE']+user['id']+'/avatar.webp', "-q 90")

    return image_path
Exemple #2
0
def SaveImage(userId, type):
    if type == 'profile':
        file_name, file_ext = os.path.splitext(request.files['avatarimg'].filename)
        picture_path = config['UPLOAD_FOLDER_PROFILE']+str(userId)+'/avatar.'+file_ext
    elif type == 'cover':
        file_name, file_ext = os.path.splitext(request.files['coverimg'].filename)
        picture_path = config['UPLOAD_FOLDER_PROFILE']+str(userId)+'/cover.'+file_ext

    if type == 'profile':
        i = Image.open(request.files['avatarimg'])
        output_size = (500, 500)
        i.thumbnail(output_size)
    elif type == 'cover':
        i = Image.open(request.files['coverimg'])

    i.save(picture_path)

    if type == 'profile':
        webp.cwebp(config['UPLOAD_FOLDER_PROFILE']+str(userId)+'/avatar.'+file_ext,
                   config['UPLOAD_FOLDER_PROFILE']+str(userId)+'/avatar.webp', "-q 80")
        return str(userId)+'/avatar.png'
    elif type == 'cover':
        webp.cwebp(config['UPLOAD_FOLDER_PROFILE']+str(userId)+'/cover.'+file_ext,
                   config['UPLOAD_FOLDER_PROFILE']+str(userId)+'/cover.webp', "-q 80")
        return str(userId)+'/cover.webp';
def webp(x):
    x2 = x.replace('.png', '') + '.webp'
    webplib.cwebp(x, x2, '-q 70')
    if removepng:
        os.system('rm -f ' + x)
    if sendimages2ftp:
        ftpput(x2)
    return x2
Exemple #4
0
def SaveImage(post_id):
    # if(form_img.data):
    file_name, file_ext = os.path.splitext(request.files['image'].filename)
    picture_fn = 'post_' + str(post_id) + file_ext
    picture_path = os.path.join(config['UPLOAD_FOLDER_POST'], picture_fn)

    i = Image.open(request.files['image'])
    i.save(picture_path)
    webp.cwebp(
        os.path.join(config['UPLOAD_FOLDER_POST'], picture_fn),
        os.path.join(config['UPLOAD_FOLDER_POST'],
                     'post_' + str(post_id) + '.webp'), "-q 80")
    os.remove(os.path.join(config['UPLOAD_FOLDER_POST'], picture_fn))

    picture_fn = 'post_' + str(post_id) + '.webp'

    return picture_fn
    def transcode_file(self, worker_id, files):
        processed = 0

        for file in files:

            if self.should_die():
                sys.exit(1)

            original_file = str(file)

            webp_file = self.get_new_file_path('{0}.webp'.format(original_file))

            self.progress += 1
            processed += 1
            result = '✓'

            if self.purge:
                if file_exists(webp_file):
                    print('({0}/{1}) Deleting existing {2} {3}'.format(self.progress, self.file_no, '?', webp_file))
                    remove_file(webp_file)

            if not file_exists(webp_file):
                try:
                    create_file_path(webp_file)
                    webp.cwebp(original_file, webp_file, self.config)
                except Exception as e:
                    print('Failed transcoding file: {0} {1}'.format(webp_file, repr(e)))
                    continue

            if self.check_size:
                if os.path.getsize(webp_file) > os.path.getsize(original_file):
                    remove_file(webp_file)
                    result = '×'

            print('({0}/{1}) {2} {3}'.format(self.progress, self.file_no, result, webp_file))

        print('Thread {0} Completed. Processed {1}/{2}'.format(worker_id, processed, len(files)))

        return True
def converter(queue_in, path):  # Обработчик очереди в отдельном процессе
    # Смена приоритета
    pid = libc.getpid()
    libc.setpriority(0, pid, 20)

    filter = {}
    moved = {}

    while True:
        event = queue_in.get()  # Извлекаем элемент из очереди
        mask = event.mask
        is_dir = event.dir
        item = event.pathname

        if mask == "SIG_TERM":
            break

        # Удалим из фильтра события старше 2-х секунд
        for key, value in list(filter.items()):
            if value['time'] + 2 < time.time():
                del filter[key]

        for p in path:
            if (item + "/").startswith(p + "/"):
                #Init
                extension = Path(item).suffix.lower()
                dest = p + result_path
                dest_item = item.replace(p, dest)
                base_dest_item = Path(dest_item).parent
                base_item = Path(item).parent

                uid = os.stat(p).st_uid
                gid = os.stat(p).st_gid

                if not Path(dest).is_dir():  # создает каталог если нету /webp
                    Path(dest).mkdir(parents=True, exist_ok=True)
                    os.chown(dest, uid, gid)

                if item.startswith(dest):  # если это /webp то выход
                    break

                if mask == "IN_MOVED_FROM":  # Перемещение файла или каталога
                    if event.wait == False:
                        if item in moved:  # Внутреннее - переименовываем
                            # Проверить перемещение из разных точек наблюдения
                            moved_dest_item = moved[item].replace(p, dest)
                            log(p,
                                "Rename: " + dest_item,
                                mask="IN_MOVED_FROM Rename")
                            Path(dest_item).rename(moved_dest_item)
                            del moved[item]
                            break

                        else:  # Внешнее - удаляем
                            log(p,
                                "Delete: " + dest_item,
                                mask="IN_MOVED_FROM Delete")
                            if Path(dest_item).is_file():
                                Path(dest_item).unlink()
                                rm_empty_dir(base_dest_item)
                            else:
                                rm_tree(dest_item)
                            break

                    if event.wait == True:  # Чтобы понять направление подождем IN_MOVED_TO
                        event.wait = False
                        queue_in.put(event)
                        break

                if mask == "IN_DELETE" and Path(dest_item).is_file():
                    log(p, "Delete: " + dest_item, mask="IN_DELETE Delete")
                    Path(dest_item).unlink()  # Удаляем файл

                    # Удаляем подкаталог если пустой
                    if rm_empty_dir(base_dest_item):
                        log(p,
                            "Delete dir: " + str(base_dest_item),
                            mask="IN_DELETE Delete dir")

                # Если дубль события то выходим
                if Path(item).exists():
                    if item in filter and filter[item]['st_mtime'] == Path(
                            item).stat(
                            ).st_mtime and filter[item]['mask'] == mask:
                        break
                    filter[item] = {
                        'time': time.time(),
                        'st_mtime': Path(item).stat().st_mtime,
                        'mask': mask
                    }
                else:
                    break

                if mask == "IN_MOVED_TO":
                    src_pathname = getattr(event, 'src_pathname', False)

                    if src_pathname != False:  # Переименование, на следующей итерации, хотя лучше здесь
                        moved[src_pathname] = item
                        break
                    else:  # Перемещение
                        if Path(item).is_dir(
                        ):  # Если каталог то запускаем сканер
                            convert_tree(p)
                            break
                        else:  # Если файл то стартуем конвертер
                            mask = "IN_CLOSE_WRITE"

                if mask == "IN_CLOSE_WRITE" and Path(item).is_file():
                    # отсеиваем глюки, дубликаты, проверка предыдущего цикла
                    if Path(dest_item).is_file() and Path(dest_item).stat(
                    ).st_mtime > Path(item).stat().st_mtime:
                        break
                    if not Path(item).is_file():
                        break

                    log(p,
                        "Converting: " + dest_item,
                        mask="IN_CLOSE_WRITE Converting")

                    if not Path(base_dest_item).is_dir(
                    ):  # создаем подкаталог если нету
                        Path(base_dest_item).mkdir(parents=True, exist_ok=True)
                        os.chown(base_dest_item, uid, gid)

                    if extension == '.jpg' or extension == '.jpeg':
                        webp.cwebp(item, dest_item,
                                   "-quiet -pass 10 -m 6 -mt -q 80")
                        os.chown(dest_item, uid, gid)

                    if extension == '.png':
                        webp.cwebp(
                            item, dest_item,
                            "-quiet -pass 10 -m 6 -alpha_q 100 -mt -alpha_filter best -alpha_method 1 -q 80"
                        )
                        os.chown(dest_item, uid, gid)

                    break
                break

        # Сообщаем, что элемент очереди queue_in обработан с помощью метода task_done
        queue_in.task_done()
Exemple #7
0
def builder():
    with open('app.yaml', 'r') as stream:
        o = load(stream, Loader=Loader)
        app_config = o['app']
        blueprints = o['blueprints']
        models = o['models']

    # Sitemap Data
    sitemap = []

    # Build static pages
    for template_key in blueprints:
        template_options = blueprints[template_key]

        # one-to-one
        if type(template_options) is dict:
            build_template(template_key, {
                **app_config,
                **template_options
            }, template_key)
        # one-to-many
        else:
            for page in template_options:
                page_name = [x for x in page.keys()][0]
                page_options = page[page_name] or {}
                build_template(template_key, {
                    **app_config,
                    **page_options
                }, page_name)
                sitemap.append(page_name)

    # Build modeled pages
    for model_key in models:
        model = models[model_key]
        templates = model['templates']
        items = model['items']
        for template in templates:
            t = templates[template]
            if type(t) is str:  # list views
                build_template(template, {**app_config, **{'items': items}}, t)
                sitemap.append(t)
            else:  # detail views
                t_search = [k for k in t][0]
                t_glob = t[t_search]

                kvs = {}
                for page_name in items:
                    item = items[page_name]

                    if t_glob.endswith('*'):
                        page_options = {**app_config, **item}
                        build_template(template, page_options,
                                       t_glob.replace('*', page_name))
                        sitemap.append(t_glob.replace('*', page_name))
                    else:
                        for v in item[t_search]:
                            if not v in kvs:
                                kvs[v] = []
                            kvs[v].append({**item, **{'key': page_name}})

                if t_glob.endswith('[*]'):
                    for k in kvs:
                        page_options = {
                            **app_config,
                            **{
                                'kvs': kvs[k],
                                'title': k
                            }
                        }
                        build_template(template, page_options,
                                       t_glob.replace('[*]', k))
                        sitemap.append(t_glob.replace('[*]', k))

    # Compile special items
    with open('dist/sitemap.xml', 'w+') as stream:
        env = Environment(loader=PackageLoader(__name__, 'assets/templates'))
        template = env.get_template('sitemap.xml')
        html = template.render({
            **app_config,
            **{
                'urls': sitemap,
                'date': '2020-04-13'
            }
        })
        stream.write(html)

    with open('dist/robots.txt', 'w+') as stream:
        env = Environment(loader=PackageLoader(__name__, 'assets/templates'))
        template = env.get_template('robots.txt')
        html = template.render(**app_config)
        stream.write(html)

    with open('dist/404.html', 'w+') as stream:
        env = Environment(loader=PackageLoader(__name__, 'assets/templates'))
        template = env.get_template('404.html')
        html = template.render(**app_config)
        stream.write(html)

    # Minify Images
    img_task = False
    for (root, dirs, files) in os.walk('assets/static/img'):
        if files.__len__() > 0:
            img_task = True
        break

    if img_task:
        os.system(
            "imagemin --plugin=pngquant assets/static/img/*.png --out-dir=assets/static/img/min"
        )
        os.system(
            "imagemin --plugin=mozjpeg assets/static/img/*.jpeg --out-dir=assets/static/img/min"
        )
        os.system(
            "imagemin --plugin=mozjpeg assets/static/img/*.jpg --out-dir=assets/static/img/min"
        )
        os.system(
            "imagemin --plugin=gifsicle assets/static/img/*.gif --out-dir=assets/static/img/min"
        )
        #os.system("imagemin --plugin=svgo assets/static/img/*.svg --out-dir=assets/static/img/min")

        for (root, dirs, files) in os.walk('assets/static/img'):
            for file in files:
                old_fp = root + '/' + file
                raw_fp = root + '/raw/' + file
                webp_fp = root + '/webp/' + '.'.join(
                    file.split('.')[:-1]) + '.webp'

                shutil.move(old_fp, raw_fp)

                if not os.path.exists(webp_fp):
                    webp.cwebp(raw_fp, webp_fp, "-q 80")
            break

    # Collect static assets
    try:
        shutil.rmtree('dist/static')
    except:
        pass
    shutil.copytree(src='assets/static', dst='dist/static')

    # CSS Minification
    static_cache = {}
    for (root, dirs, files) in os.walk('dist/static/css'):
        for file in files:
            with open(root + '/' + file, 'r') as stream:
                css = csscompress(stream.read())
                m = hashlib.md5()
                m.update(str.encode(css))
                hashsum = m.hexdigest()
                new_file = "{}.{}.css".format(".".join(file.split('.')[:-1]),
                                              hashsum)
                with open('dist/static/css/' + new_file, 'w+') as stream:
                    stream.write(css)
                static_cache[file] = new_file
            os.remove(root + '/' + file)
        break

    # JS Minification
    for (root, dirs, files) in os.walk('dist/static/js'):
        for file in files:
            with open(root + '/' + file, 'r') as stream:
                js = jsminify(stream.read(), mangle=False)
                m = hashlib.md5()
                m.update(str.encode(js))
                hashsum = m.hexdigest()
                new_file = "{}.{}.js".format(".".join(file.split('.')[:-1]),
                                             hashsum)
                with open('dist/static/js/' + new_file, 'w+') as stream:
                    stream.write(js)
                static_cache[file] = new_file
            os.remove(root + '/' + file)
        break

    # Cache Busting
    for (root, dirs, files) in os.walk('dist'):
        for file in files:
            if file.endswith('html'):
                fp = root + '/' + file
                html = open(fp, 'r').read()
                for key in static_cache:
                    html = html.replace(key, static_cache[key])
                html = htmlminify(html, remove_comments=True)
                with open(fp, 'w+') as stream:
                    stream.write(html)
from webptools import webplib as webp

path = "/Users/vikas/Documents/aws/Screen Shot 2020-04-19 at 08.23.49.png"
print(webp.cwebp(path, "Screen08.23.49.webp", "-q 80"))