Пример #1
0
def main():
    from mylib.osutil import ensure_sigint_signal
    ensure_sigint_signal()
    ap = argument_parser()
    args = ap.parse_args()
    try:
        func = args.func
    except AttributeError:
        func = cmd_mode_func
    func(args)
Пример #2
0
#!/usr/bin/env python

import sys
import logging

from mylib.hentai import HentaiCafeKit
from mylib.misc import LOG_FMT_MESSAGE_ONLY
from mylib.osutil import ensure_sigint_signal

logging.basicConfig(level=logging.INFO, format=LOG_FMT_MESSAGE_ONLY)

if __name__ == '__main__':
    ensure_sigint_signal()
    uri = sys.argv[1]
    if len(sys.argv) >= 3:
        hc = HentaiCafeKit(int(sys.argv[2]))
    else:
        hc = HentaiCafeKit(5)
    hc.save_entry_to_cbz(uri)
Пример #3
0
def main(url: str = None):
    ensure_sigint_signal()
    args = parse_args()
    # Get verbose level from args
    if args.verbose:
        lvl = logging.DEBUG
        fmt = LOG_FMT
    else:
        lvl = logging.INFO
        fmt = LOG_FMT_MESSAGE_ONLY
    logging.basicConfig(
        stream=sys.stderr,
        level=lvl,
        format=fmt,
    )
    # Get URL from args
    if url:
        album_url = url
    else:
        album_url = args.url
        if re.search(r'photosmasters\.com/thumbnails\.php\?album=', album_url):
            album_url = re.sub(r'(^.*?album=\d*).*$', r'\1', album_url)
        else:
            _logger.warning('NOT A PHOTOSMASTERS.COM URL!')
            sys.exit()
        db['url'] = album_url
    global server
    server, _ = url_split(album_url)
    # Parse album page
    album_soup, _ = soup(album_url)
    if album_soup('div', class_='cpg_message_warning'):
        _logger.warning('ALBUM NOT EXISTS!')
        sys.exit()
    album_title = album_soup.title.contents[0]
    album_name = re.sub(r'^(.*) - photosmasters\.com', r'\1', album_title)
    album_name = re.sub(r'^.* \((.*)\)', r'\1', album_name)
    album_name = album_name.strip()
    db['name'] = album_name
    _logger.info(album_title)
    # Get album directory to store downloaded photos
    album_id = get_query_dict(album_url)['album']
    album_dir = os.path.join(args.dir, 'photosmasters',
                             '{} - {}'.format(album_id, album_name))
    if not os.path.exists(album_dir):
        os.makedirs(album_dir)
    _logger.info('{} -->> {}'.format(album_url, album_dir))
    # Meta
    info_file = os.path.join(album_dir, INFO_FILE)
    db_file = os.path.join(album_dir, DB_FILE)
    restore_db(db_file)
    totals = album_soup('td', class_='tableh1')[0].contents[0]
    _, total_images, _, _, total_pages = totals.split()
    global stat
    stat = {
        'dd': album_dir,  # download directory
        'at': album_title,  # title
        'ti': int(total_images),  # total images
        'tp': int(total_pages),  # total pages
        'li': 0,  # last image
        'lp': 0,  # last page
    }
    display_stat()
    # Let's roll out
    try:
        for _ in range(stat['tp']):
            stat['lp'] += 1
            page_url = '{}&page={}'.format(album_url, stat['lp'])
            _logger.debug('Page: {}'.format(page_url))
            page_soup, _ = soup(page_url)
            thumbnails = page_soup('td', class_='thumbnails')
            for thumb in thumbnails:
                if not thumb.td:
                    break
                stat['li'] += 1
                thumb_title = thumb.td.span.contents[0]
                photo_url = server + '/' + thumb.a['href'].replace(
                    '#top_display_media', '&fullsize=1')
                _logger.debug('Photo: {}'.format(photo_url))
                photo_id = get_query_dict(photo_url)['pid']
                # Go!
                th = Thread(target=download,
                            args=(photo_id, photo_url, thumb_title))
                while len(thl) >= args.threads:
                    for t in thl:
                        if not t.is_alive():
                            thl.remove(t)
                    sleep(0.1)
                display_stat()
                th.start()
                thl.append(th)
                sleep(0.1)
    except KeyboardInterrupt:
        _logger.info('Wait: {} threads to finish.'.format(len(thl)))
        sys.exit(ExitCode.CTRL_C)
    finally:
        for th in thl:
            th.reunion()
        save_db(db_file)
        save_info(info_file)