Exemplo n.º 1
0
def search_by_file(name):
    """ search by file

        :param name:
        :return:
    """

    with open(name, "rb") as f:
        d = [s.strip() for s in cuelib.as_unicode(f.read())[0].split("\n")]
        s = ' '.join(d)
    #

    if not s:
        return name
    #

    return s
Exemplo n.º 2
0
def search_by_file(name):
    """ search by file

        :param name:
        :return:
    """

    with open(name, "rb") as f:
        d = [s.strip() for s in cuelib.as_unicode(f.read())[0].split("\n")]
        s = ' '.join(d)
    #

    if not s:
        return name
    #

    return s
Exemplo n.º 3
0
def main():
    """ main """

    global VERBOSE_LEVEL

    cwd = os.path.basename(os.getcwdu())
    cwd_search = search_by_dir(cwd)

    parser = argparse.ArgumentParser(description="search on discogs.com")
    parser.add_argument('search', help='search string', nargs='*')
    parser.add_argument('-a',
                        '--as_dir',
                        help='add current dir for search',
                        action="store_true")
    parser.add_argument('-A',
                        '--as_dir_log',
                        help='add current dir for search & output to .lst',
                        action="store_true")
    parser.add_argument('-l',
                        '--log',
                        help='save stdout to log-file, use "." for as-dir',
                        type=str,
                        action="store")
    parser.add_argument('-t',
                        '--tag_cue',
                        help='tag .cue for one release',
                        type=str,
                        action="store")
    parser.add_argument('-s',
                        '--tag_skip',
                        help='skip some tracks',
                        type=int,
                        action="store",
                        default=0)
    parser.add_argument(
        '-p',
        '--param',
        help='search parameter: <name>=<value> (ex. type=artist)',
        action='append')
    parser.add_argument('-d',
                        '--deep',
                        help='deep search, "-dd" for labels',
                        action='count',
                        default=0)
    parser.add_argument('-i',
                        '--id',
                        help='client id [%r]' % CLIENT_ID,
                        default=CLIENT_ID)
    parser.add_argument('-q',
                        '--quality',
                        help='search quality',
                        type=float,
                        default=0.1)
    parser.add_argument('-J',
                        '--make_json',
                        help='make .json',
                        action='store_true')
    parser.add_argument('-R',
                        '--raw_print',
                        help='print raw data (for debug)',
                        action='store_true')
    parser.add_argument('-P',
                        '--print_release',
                        help='print formatted release info',
                        action='store_true')
    parser.add_argument('-I',
                        '--object_id',
                        help='search by id, default for "type=release"',
                        action='store_true')
    parser.add_argument('-c',
                        '--cache',
                        help='use cache [%r]' % CACHE_USE,
                        action='store_true',
                        default=CACHE_USE)
    parser.add_argument('-C',
                        '--cache_db',
                        help='cache name [%r]' % CACHE_STORAGE,
                        default=CACHE_STORAGE)
    parser.add_argument('-U',
                        '--update_db',
                        help='update cache, "-UU" for full update',
                        action='count',
                        default=0)
    parser.add_argument('-T',
                        '--table_db',
                        help='table name for cache [%r]' % CACHE_TABLE,
                        default=CACHE_TABLE)
    parser.add_argument('-v',
                        '--verbose',
                        help='increase output verbosity, "-vv" for more',
                        action="count",
                        default=0)
    parser.add_argument('--timeout',
                        help='maximum timeout for network operation(s)',
                        type=float,
                        default=120.0)
    parser.add_argument('--images',
                        help='load release image(s)',
                        action="store_true")
    parser.add_argument('--all_images',
                        help='load all image(s)',
                        action="store_true")
    parser.add_argument('--user_token',
                        help='user token [%r]' % USER_TOKEN,
                        default=USER_TOKEN)
    parser.add_argument('--user_secret',
                        help='user secret [%r]' % USER_SECRET,
                        default=USER_SECRET)
    args = parser.parse_args()

    # global param
    VERBOSE_LEVEL = args.verbose
    logger("{%r}" % args, level=2)

    if args.quality < 0.0:
        args.quality = 0.1
    #

    if args.quality > 1.0:
        args.quality = 1.0
    #

    if args.as_dir_log:
        args.as_dir = True
        args.log = cwd + ".lst"
    #

    if args.log:
        if args.log == ".":
            args.log = cwd + ".lst"
        #

        # need unicode log-name
        name = args.log
        if isinstance(name, str):
            name = name.decode(DEFAULT_ENCODING)
        #

        logger("info: stdout => {%s}" % name)
        log = open(name, "w")
        sys.stdout = log
    #

    if args.as_dir:
        args.search.append(cwd_search)
        logger("info: dir-search for {%s} => {%s}" % (cwd, cwd_search))
        # args.search.append(os.getcwdu())
    #

    dc = discogs_client.Client(args.id)
    dc.set_consumer_key(CONSUMER_KEY, CONSUMER_SECRET)

    if not (args.user_token and args.user_secret):
        logger("authorization (discogs.com):")
        access_token, access_secret, authorize_url = dc.get_authorize_url()
        logger("access_token{%s} access_secret{%s}" %
               (access_token, access_secret))
        logger("authorize_url{%s}" % authorize_url)
        verifier = raw_input("verifier> ")
        token, secret = dc.get_access_token(verifier)
        logger("user token{%s}" % token)
        logger("user secret{%s}" % secret)
        return None
    else:
        dc.set_token(args.user_token, args.user_secret)
    #

    dc_fetcher = getattr(dc, "_fetcher", None)

    # setup verbose
    if VERBOSE_LEVEL > 1:
        dc.verbose = True
    #

    # setup cache
    if args.cache:
        logger("cache: db{%r}, table{%r}, update{%r}" %
               (args.cache_db, args.table_db, args.update_db),
               level=2)
        db = sqlitedict.open(filename=args.cache_db, tablename=args.table_db)
        cache = CacheFetcher(dc_fetcher,
                             db,
                             args.update_db,
                             commit_max=CACHE_SYNC,
                             compression=CACHE_COMPRESS)
        dc._fetcher = cache
    else:
        logger("cache: internal (temporary)")
        cache = CacheFetcherDict(dc_fetcher)
        dc._fetcher = cache
    #

    # parse params
    p = {}
    if args.param:
        for params in args.param:
            for param in params.split(","):
                name, _, value = param.partition("=")
                name = name.strip()
                value = value.strip()

                if not (name or value):
                    continue
                #

                if not value:
                    value = name
                    name = "type"
                #

                p[name.strip()] = value.strip()
            #
        #
    #

    s_type = p.get("type", "release")
    s_meth = getattr(dc, s_type, None)

    # all is unicode now (must be)
    logger("search [1]: %s" % args.search, level=1)
    args.search = [
        cuelib.as_unicode(s, DEFAULT_ENCODING, True)[0] for s in args.search
    ]
    logger("search [2]: %s" % args.search, level=1)

    # load info from db
    rt = []
    tt = time.time()
    deep_data = None
    for search in args.search:

        # if isinstance(search, unicode):
        #     search = search.encode(encoding='utf-8')
        # #

        # auto-detect: .cue & folder
        if search.lower().endswith(".cue") and os.path.isfile(search):
            old_search = search
            search = search_by_cue(old_search)
            logger("info: cue-search for {%s} => {%s}" % (old_search, search))
        elif os.path.isdir(search):
            old_search = search
            search = search_by_dir(os.path.basename(old_search))
            logger("info: dir-search for {%s} => {%s}" % (old_search, search))
        elif search.startswith("@") and os.path.isfile(search[1:]):
            old_search = search[1:]
            search = search_by_file(old_search)
            logger("info: file-search for {%s} => {%s}" % (old_search, search))
        #

        search = search.lower()
        a_search = filter(
            None,
            search.replace(".", " ").replace(",", " ").replace("&",
                                                               " ").split(" "))
        a_search = [s.strip() for s in a_search]
        search = ' '.join(a_search)
        logger("info: search for {%s}" % a_search, level=1)

        # warning
        # if len(a_search) == 1:
        #     # #args.quality = 1.0
        #     # if args.quality != 1.0:
        #     #     logger("warning: use quality setting as 1.0 for better result")
        #     # #
        #     pass
        # #

        if search.isdigit() and args.object_id:
            object_id = int(search)
            if callable(s_meth):
                o = s_meth(object_id)
                sr = [o]
                url = o.url

                # ignore "quality" setting
                args.quality = 0
            else:
                sr = []
                url = ""
            #
        else:
            if isinstance(search, unicode):
                searchu = search
            else:
                searchu = unicode(search, DEFAULT_ENCODING)
            #
            # search8 = searchu.encode('utf-8')
            sq = dc.search(searchu, **p)
            url = sq.url
            logger("search{%s} url{%s}" % (search, sq.url), level=1)
            sr = load_result(sq, a_search, args.quality, timeout=args.timeout)

            # logger("for %r found %s item(s)" % (search, len(sr)), level=1)
            # sr = [x for x in sr if find_words(get_name_or_title(x), a_search)[-1] >= args.quality]
            logger("after filter{%r} found %s item(s)" %
                   (args.quality, len(sr)),
                   level=1)
        #

        # deep loading
        deep_data = {}
        if sr:
            for o in sr:
                if args.raw_print:
                    print_raw(o)
                #

                ok = False

                if isinstance(o, Artist) and args.deep >= 1:
                    ok = True
                elif isinstance(o, Label) and args.deep > 1:
                    ok = True
                elif isinstance(o, Master):
                    ok = True
                #

                if ok:
                    deep_result(o, deep_data, a_search, args.quality,
                                args.timeout)
                #
            #

            # add found objects from deep loding (object => list-if-releases)
            for o in deep_data:
                if o not in sr:
                    sr.append(o)
                #
            #

            logger("deep{%s}" % repr(deep_data), level=2)
        #

        if sr:
            rt.append((search, url, sort_result(sr), deep_data))
            logger("items{%s}" % repr(rt[-1]), level=2)
        #
    #

    # collect all releases, artists, labels
    releases = []
    artists = []
    labels = []
    for _search, _url, sr, dd in rt:
        releases.extend(sr[Release])
        for o_type in (Label, Artist, Master):
            for o in sr[o_type]:
                releases.extend(dd.get(o, []))
            #
        #

        artists.extend(sr[Artist])
        labels.extend(sr[Label])
        # logger("%r: labels: %r, artists: %r, releases: %r" % (_search, len(labels), len(artists), len(releases)))
    #

    if cache:
        cache.commit()
    #

    if len(releases) == 1:
        args.print_release = True
    #

    # total time
    tt = time.time() - tt

    logger()
    s = "time {%.4f}, search {%s}" % (tt, ' | '.join(args.search))
    logger(s, level=0)
    logger("~" * len(s), level=0)
    logger()

    dump = ((Label, labels, "%10d | %s"), (Artist, artists, "%10d | %s {%s}"),
            (Release, releases, "%10d | %s - %s (%s) @ %s # %s <%s>"))

    for t, data, s in dump:
        d = []
        _ = [obj_dump(o, d) for o in data]
        d = obj_sort(t, d)
        if not d:
            continue
        #

        ids = {}
        for x in d:
            _id = x[0]
            if _id not in ids:
                logger(s % x)
                ids[_id] = True
            #
        #

        logger()
        logger("%r: found %d item(s)" % (t.__name__, len(ids)))
        logger()
    #

    if args.print_release:
        head_fmt = (("id", "%d"), ("title", "%s - %s (%s)"), ("format", "%s"),
                    ("label", "%s"), ("genre", "%s"))
        head = '\n'.join(["%-6s / %s" % x for x in head_fmt])

        for o in releases:
            print_release(o, head=head)
            logger()
        #
    #

    if args.make_json:
        for o in releases:
            make_json(o)
        #
    #

    # try to tag .cue
    if args.tag_cue:
        _, cue_name = cue_tagger(releases, args.tag_cue, args.tag_skip)
        logger("tag: '%s'" % cue_name)
    #

    # load release image(s)
    if args.images:
        for o in releases:
            load_images(o)
        #
    #

    return rt, deep_data
Exemplo n.º 4
0
def main():
    """ main """

    global VERBOSE_LEVEL

    cwd = os.path.basename(os.getcwdu())
    cwd_search = search_by_dir(cwd)

    parser = argparse.ArgumentParser(description="search on discogs.com")
    parser.add_argument('search', help='search string', nargs='*')
    parser.add_argument('-a', '--as_dir', help='add current dir for search', action="store_true")
    parser.add_argument('-A', '--as_dir_log', help='add current dir for search & output to .lst', action="store_true")
    parser.add_argument('-l', '--log', help='save stdout to log-file, use "." for as-dir', type=str, action="store")
    parser.add_argument('-t', '--tag_cue', help='tag .cue for one release', type=str, action="store")
    parser.add_argument('-s', '--tag_skip', help='skip some tracks', type=int, action="store", default=0)
    parser.add_argument('-p', '--param', help='search parameter: <name>=<value> (ex. type=artist)', action='append')
    parser.add_argument('-d', '--deep', help='deep search, "-dd" for labels', action='count', default=0)
    parser.add_argument('-i', '--id', help='client id [%r]' % CLIENT_ID, default=CLIENT_ID)
    parser.add_argument('-q', '--quality', help='search quality', type=float, default=0.1)
    parser.add_argument('-J', '--make_json', help='make .json', action='store_true')
    parser.add_argument('-R', '--raw_print', help='print raw data (for debug)', action='store_true')
    parser.add_argument('-P', '--print_release', help='print formatted release info', action='store_true')
    parser.add_argument('-I', '--object_id', help='search by id, default for "type=release"', action='store_true')
    parser.add_argument('-c', '--cache', help='use cache [%r]' % CACHE_USE, action='store_true', default=CACHE_USE)
    parser.add_argument('-C', '--cache_db', help='cache name [%r]' % CACHE_STORAGE, default=CACHE_STORAGE)
    parser.add_argument('-U', '--update_db', help='update cache, "-UU" for full update', action='count', default=0)
    parser.add_argument('-T', '--table_db', help='table name for cache [%r]' % CACHE_TABLE, default=CACHE_TABLE)
    parser.add_argument('-v', '--verbose', help='increase output verbosity, "-vv" for more', action="count", default=0)
    parser.add_argument('--timeout', help='maximum timeout for network operation(s)', type=float, default=120.0)
    parser.add_argument('--images', help='load release image(s)', action="store_true")
    parser.add_argument('--all_images', help='load all image(s)', action="store_true")
    parser.add_argument('--user_token', help='user token [%r]' % USER_TOKEN, default=USER_TOKEN)
    parser.add_argument('--user_secret', help='user secret [%r]' % USER_SECRET, default=USER_SECRET)
    args = parser.parse_args()

    # global param
    VERBOSE_LEVEL = args.verbose
    logger("{%r}" % args, level=2)

    if args.quality < 0.0:
        args.quality = 0.1
    #

    if args.quality > 1.0:
        args.quality = 1.0
    #

    if args.as_dir_log:
        args.as_dir = True
        args.log = cwd + ".lst"
    #

    if args.log:
        if args.log == ".":
            args.log = cwd + ".lst"
        #

        # need unicode log-name
        name = args.log
        if isinstance(name, str):
            name = name.decode(DEFAULT_ENCODING)
        #

        logger("info: stdout => {%s}" % name)
        log = open(name, "w")
        sys.stdout = log
    #

    if args.as_dir:
        args.search.append(cwd_search)
        logger("info: dir-search for {%s} => {%s}" % (cwd, cwd_search))
        # args.search.append(os.getcwdu())
    #

    dc = discogs_client.Client(args.id)
    dc.set_consumer_key(CONSUMER_KEY, CONSUMER_SECRET)

    if not (args.user_token and args.user_secret):
        logger("authorization (discogs.com):")
        access_token, access_secret, authorize_url = dc.get_authorize_url()
        logger("access_token{%s} access_secret{%s}" % (access_token, access_secret))
        logger("authorize_url{%s}" % authorize_url)
        verifier = raw_input("verifier> ")
        token, secret = dc.get_access_token(verifier)
        logger("user token{%s}" % token)
        logger("user secret{%s}" % secret)
        return None
    else:
        dc.set_token(args.user_token, args.user_secret)
    #

    dc_fetcher = getattr(dc, "_fetcher", None)

    # setup verbose
    if VERBOSE_LEVEL > 1:
        dc.verbose = True
    #

    # setup cache
    if args.cache:
        logger("cache: db{%r}, table{%r}, update{%r}" % (args.cache_db, args.table_db, args.update_db), level=2)
        db = sqlitedict.open(filename=args.cache_db, tablename=args.table_db)
        cache = CacheFetcher(dc_fetcher, db, args.update_db, commit_max=CACHE_SYNC, compression=CACHE_COMPRESS)
        dc._fetcher = cache
    else:
        logger("cache: internal (temporary)")
        cache = CacheFetcherDict(dc_fetcher)
        dc._fetcher = cache
    #

    # parse params
    p = {}
    if args.param:
        for params in args.param:
            for param in params.split(","):
                name, _, value = param.partition("=")
                name = name.strip()
                value = value.strip()

                if not (name or value):
                    continue
                #

                if not value:
                    value = name
                    name = "type"
                #

                p[name.strip()] = value.strip()
            #
        #
    #

    s_type = p.get("type", "release")
    s_meth = getattr(dc, s_type, None)

    # all is unicode now (must be)
    logger("search [1]: %s" % args.search, level=1)
    args.search = [cuelib.as_unicode(s, DEFAULT_ENCODING, True)[0] for s in args.search]
    logger("search [2]: %s" % args.search, level=1)

    # load info from db
    rt = []
    tt = time.time()
    deep_data = None
    for search in args.search:

        # if isinstance(search, unicode):
        #     search = search.encode(encoding='utf-8')
        # #

        # auto-detect: .cue & folder
        if search.lower().endswith(".cue") and os.path.isfile(search):
            old_search = search
            search = search_by_cue(old_search)
            logger("info: cue-search for {%s} => {%s}" % (old_search, search))
        elif os.path.isdir(search):
            old_search = search
            search = search_by_dir(os.path.basename(old_search))
            logger("info: dir-search for {%s} => {%s}" % (old_search, search))
        elif search.startswith("@") and os.path.isfile(search[1:]):
            old_search = search[1:]
            search = search_by_file(old_search)
            logger("info: file-search for {%s} => {%s}" % (old_search, search))
        #

        search = search.lower()
        a_search = filter(None, search.replace(".", " ").replace(",", " ").replace("&", " ").split(" "))
        a_search = [s.strip() for s in a_search]
        search = ' '.join(a_search)
        logger("info: search for {%s}" % a_search, level=1)

        # warning
        # if len(a_search) == 1:
        #     # #args.quality = 1.0
        #     # if args.quality != 1.0:
        #     #     logger("warning: use quality setting as 1.0 for better result")
        #     # #
        #     pass
        # #

        if search.isdigit() and args.object_id:
            object_id = int(search)
            if callable(s_meth):
                o = s_meth(object_id)
                sr = [o]
                url = o.url

                # ignore "quality" setting
                args.quality = 0
            else:
                sr = []
                url = ""
            #
        else:
            if isinstance(search, unicode):
                searchu = search
            else:
                searchu = unicode(search, DEFAULT_ENCODING)
            #
            # search8 = searchu.encode('utf-8')
            sq = dc.search(searchu, **p)
            url = sq.url
            logger("search{%s} url{%s}" % (search, sq.url), level=1)
            sr = load_result(sq, a_search, args.quality, timeout=args.timeout)

            # logger("for %r found %s item(s)" % (search, len(sr)), level=1)
            # sr = [x for x in sr if find_words(get_name_or_title(x), a_search)[-1] >= args.quality]
            logger("after filter{%r} found %s item(s)" % (args.quality, len(sr)), level=1)
        #

        # deep loading
        deep_data = {}
        if sr:
            for o in sr:
                if args.raw_print:
                    print_raw(o)
                #

                ok = False

                if isinstance(o, Artist) and args.deep >= 1:
                    ok = True
                elif isinstance(o, Label) and args.deep > 1:
                    ok = True
                elif isinstance(o, Master):
                    ok = True
                #

                if ok:
                    deep_result(o, deep_data, a_search, args.quality, args.timeout)
                #
            #

            # add found objects from deep loding (object => list-if-releases)
            for o in deep_data:
                if o not in sr:
                    sr.append(o)
                #
            #

            logger("deep{%s}" % repr(deep_data), level=2)
        #

        if sr:
            rt.append((search, url, sort_result(sr), deep_data))
            logger("items{%s}" % repr(rt[-1]), level=2)
        #
    #

    # collect all releases, artists, labels
    releases = []
    artists = []
    labels = []
    for _search, _url, sr, dd in rt:
        releases.extend(sr[Release])
        for o_type in (Label, Artist, Master):
            for o in sr[o_type]:
                releases.extend(dd.get(o, []))
            #
        #

        artists.extend(sr[Artist])
        labels.extend(sr[Label])
        # logger("%r: labels: %r, artists: %r, releases: %r" % (_search, len(labels), len(artists), len(releases)))
    #

    if cache:
        cache.commit()
    #

    if len(releases) == 1:
        args.print_release = True
    #

    # total time
    tt = time.time() - tt

    logger()
    s = "time {%.4f}, search {%s}" % (tt, ' | '.join(args.search))
    logger(s, level=0)
    logger("~" * len(s), level=0)
    logger()

    dump = ((Label, labels, "%10d | %s"),
            (Artist, artists, "%10d | %s {%s}"),
            (Release, releases, "%10d | %s - %s (%s) @ %s # %s <%s>"))

    for t, data, s in dump:
        d = []
        _ = [obj_dump(o, d) for o in data]
        d = obj_sort(t, d)
        if not d:
            continue
        #

        ids = {}
        for x in d:
            _id = x[0]
            if _id not in ids:
                logger(s % x)
                ids[_id] = True
            #
        #

        logger()
        logger("%r: found %d item(s)" % (t.__name__, len(ids)))
        logger()
    #

    if args.print_release:
        head_fmt = (("id", "%d"), ("title", "%s - %s (%s)"), ("format", "%s"), ("label", "%s"), ("genre", "%s"))
        head = '\n'.join(["%-6s / %s" % x for x in head_fmt])

        for o in releases:
            print_release(o, head=head)
            logger()
        #
    #

    if args.make_json:
        for o in releases:
            make_json(o)
        #
    #

    # try to tag .cue
    if args.tag_cue:
        _, cue_name = cue_tagger(releases, args.tag_cue, args.tag_skip)
        logger("tag: '%s'" % cue_name)
    #

    # load release image(s)
    if args.images:
        for o in releases:
            load_images(o)
        #
    #

    return rt, deep_data