Пример #1
0
def search_exact(name='', have_path=False):
    buf = BytesIO()

    # need to unquote for LIST to work properly with nocwd
    name = urllib.parse.unquote(name)
    path = _util.create_nwo_path(name) if not have_path else ''

    c = _curl.curl_common_init(buf)

    #c.setopt(c.DIRLISTONLY, True)
    c.setopt(c.USE_SSL, True)
    c.setopt(c.SSL_VERIFYPEER, False)
    c.setopt(c.USERPWD, '{}:{}'.format(loc['USER'], loc['PASS']))
    c.setopt(c.PORT, loc['FTPPORT'])

    ml = loc['MLOC'] if not have_path else ''

    path_noscheme = '{}{}{}/{}/'.format(loc['DOMAIN'], ml, path, name)

    _g.log.info('ftp://' + path_noscheme)

    return _curl.curl_to_buf('ftp://' + path_noscheme, 'FTP', c, buf)
Пример #2
0
def get_listing(manga):
    badret = ('', '')

    if _g.conf._usecache:
        # XXX move this
        def match_dir(diriter, ldict):
            global mlow

            try:
                cdir = next(diriter)
            except StopIteration:
                for cdict in ldict:
                    if cdict['name'].lower() == mlow:
                        return (cdict['contents'], cdict['name'])
                return None

            for cdict in ldict:
                if cdict['name'] == cdir:
                    return match_dir(diriter, cdict['contents'])
            else:
                return None

        jsonloc = os.path.join(_g.conf._home, '.cache', 'madodl',
                               'files.json') \
            if not _g.conf._cachefile else _g.conf._cachefile

        jsondirloc = os.path.dirname(jsonloc)

        if not os.path.exists(jsonloc):
            os.makedirs(jsondirloc, 0o770, True)
            _curl.curl_json_list(jsonloc, True)

        assert os.path.exists(jsonloc)

        path = _util.create_nwo_path(manga)
        d1,d2,d3 = path.split('/')
        mdir = None

        with breaks(open(jsonloc, errors='surrogateescape')) as f:
            jobj = json.load(f)

            for o in jobj[0].get('contents'):
                if o['name'] == 'Manga':
                    jobj = o['contents']
                    break

            global mlow
            mlow = manga.lower()
            mdir, title = match_dir(iter((d1,d2,d3)), jobj) or badret

            if not mdir:
                _g.log.warning("couldn't find title in JSON file. Trying "
                               "online query.")
                _g.conf._found_in_cache = False
                raise breaks.Break

            _g.conf._found_in_cache = True
            _g.conf._cururl = 'https://{}{}{}/{}/{}/{}'.format(loc['DOMAIN'],
                                            loc['MLOC'], d1, d2, d3, title)

            _g.log.info('\n-----\n{}-----'.format(mdir))

            path = '/'.join((path, title))

            return (mdir, title, path)

    qout = search_query(manga).getvalue().decode()
    qp   = _parsers.ParseQuery()
    qp.feed(qout)

    # FIXME:
    # this is a temporary workaround to
    # filter out non-manga results until
    # madokami allows for this granularity itself.
    qp.mresultnum = 0
    qp.mresults   = []
    for url, r in qp.results:
        if r.startswith('/Manga') and r.count('/') == 5:
            qp.mresults.append([url,r])
            qp.mresultnum += 1

    if qp.mresultnum == 0:
        _out.die('manga not found')

    if qp.mresultnum > 1:
        print('Multiple matches found. Please choose from the '
              'selection below:\n')
        i = 1
        for url, f in qp.mresults:
            print('{}: {}'.format(i, os.path.basename(f)))
            i += 1

        print()

        while 1:
            try:
                ch = int(input('choice > '))
                if ch in range(1, i):
                    break
                print('Pick a number between 1 and {}'.format(i-1))
            except ValueError:
                print('Invalid input.')

        m = qp.mresults[ch-1][0]
        title = os.path.basename(qp.mresults[ch-1][1])
    else:
        m = qp.mresults[0][0]
        title = os.path.basename(qp.mresults[0][1])
        _out._('one match found: {}'.format(title))

    dirls = search_exact(m, True).getvalue().decode()

    _g.log.info('\n-----\n{}-----'.format(dirls))

    return (dirls, title, m)