def main_generator():
    nml_file = os.path.join(os.getcwd(), 'output.nml')
    cprint(u'Writing Traktor file to {}'.format(nml_file))
    with codecs.open(nml_file, "w", "utf-8") as out_fh:
        # TODO(trow): Don't hard-wire the drive letter.
        writer = nml_writer.NMLWriter("T:", "/Library", out_fh)
        db = database.Database(conf.LIBRARY_DB)
        count = 0
        start_t = time.time()
        for au_file in db.get_all():
            writer.write(au_file)
            count += 1
            elapsed_t = time.time() - start_t
            cprint(type='count', count=count, elapsed_seconds=elapsed_t)
            if count % 1000 == 0:
                sys.stderr.write("{count} ({rate:.1f}/s)...\n".format(
                    count=count, rate=count / elapsed_t))
            yield
        writer.close()

    # Move the file to where Traktor users expect to find it.
    cprint(u'Copying NML file to {}'.format(conf.TRAKTOR_NML_FILE))
    cmd = [
        'install',  # command that combines cp with chown, chmod, and strip
        '-m',
        '0775',
        '-g',
        'traktor',
        nml_file,
        conf.TRAKTOR_NML_FILE
    ]
    subprocess.check_call(cmd)

    cprint("Wrote %d tracks to collection\n" % count, type='success')
def main_generator(start_timestamp):
    #chirpradio.connect("10.0.1.98:8000")
    chirpradio.connect()

    sql_db = database.Database(conf.LIBRARY_DB)
    pending_albums = []
    this_album = []
    # TODO(trow): Select the albums to import in a saner way.
    for vol, import_timestamp in sql_db.get_all_imports():
        if start_timestamp is not None and import_timestamp < start_timestamp:
            continue
        cprint("***")
        cprint("*** import_timestamp = %s" %
               timestamp.get_human_readable(import_timestamp))
        cprint("***")
        for au_file in sql_db.get_by_import(vol, import_timestamp):
            if this_album and this_album[0].album_id != au_file.album_id:
                alb = album.Album(this_album)
                pending_albums.append(alb)
                cprint('Adding "%s"' % alb.title())
                pending_albums = maybe_flush(pending_albums)
                this_album = []
            this_album.append(au_file)
            yield

    # Add the last album to the list of pending albums, then do the
    # final flush.
    if this_album:
        alb = album.Album(this_album)
        cprint('Adding "%s"' % alb.title())
        pending_albums.append(alb)
        this_album = []
    flush(pending_albums)
def main():
    out_fh = codecs.open("output.nml", "w", "utf-8")
    # TODO(trow): Don't hard-wire the drive letter.
    writer = nml_writer.NMLWriter("T:", "/Library", out_fh)
    db = database.Database(conf.LIBRARY_DB)
    count = 0
    start_t = time.time()
    for au_file in db.get_all():
        writer.write(au_file)
        count += 1
        if count % 1000 == 0:
            elapsed_t = time.time() - start_t
            sys.stderr.write("%d (%.1f/s)...\n" % (count, count / elapsed_t))
    writer.close()
    out_fh.close()
    sys.stderr.write("Wrote %d tracks to collection\n" % count)
Exemplo n.º 4
0
def main():
    db = database.Database("/home/trow/library/catalog.sqlite3_db")

    def new_txn():
        # TODO(trow): Use a better /tmp directory.
        return import_transaction.ImportTransaction(db,
                                                    1,
                                                    timestamp.now(),
                                                    "/tmp/import",
                                                    dry_run=False)

    # TODO(trow): Use a better prefix.
    TARGET = "/home/trow/prefix"
    SIZE_LIMIT = 0.95 * (4 << 30)  # 95% of 4GB, our basic import size.
    txn = None

    dir_hash_map = load_dir_hash_map()
    form = bulk_tagging_form.parse_file(
        codecs.open("/home/trow/initial_import/form2.txt", "r", "utf-8"))
    verified = sorted(
        [x for x in form.iteritems() if x[1][0] == bulk_tagging_form.VERIFIED],
        key=lambda x: x[1])
    for i, (dir_hash, val) in enumerate(verified):
        code = val[0]
        if code != bulk_tagging_form.VERIFIED:
            continue
        path = dir_hash_map[dir_hash].encode("utf-8")
        _, artist, talb = val
        sys.stderr.write("%d of %d\n" % (i, len(verified)))
        sys.stderr.write("%s\n" % path)
        sys.stderr.write("Artist: %s\n" % artist.encode("utf-8"))
        if not txn:
            txn = new_txn()
        txn.add_album_from_directory(path, new_album_name=talb)
        if txn.total_size_in_bytes > SIZE_LIMIT:
            txn.commit(TARGET)
            txn = None

    if txn:
        txn.commit(TARGET)
Exemplo n.º 5
0
 def setUp(self):
     self.name = TEST_DB_NAME_PATTERN % int(time.time() * 1000000)
     self.db = database.Database(self.name)
def import_albums(dry_run):
    inbox = dropbox.Dropbox()
    prescan_timestamp = timestamp.now()
    error_count = 0
    album_count = 0
    seen_fp = {}

    db = database.Database(LIBRARY_DB)

    try:
        for alb in inbox.albums():
            alb.drop_payloads()
            album_count += 1
            cprint(u'#{num} "{title}"'.format(num=album_count,
                                              title=alb.title()))
            if alb.tags():
                cprint(u"(%s)" % ", ".join(alb.tags()))
            else:
                print
            duration_ms = sum(au.duration_ms for au in alb.all_au_files)
            if alb.is_compilation():
                cprint("Compilation")
                for i, au in enumerate(alb.all_au_files):
                    artist = au.mutagen_id3["TPE1"]
                    cprint(u"  {:02d}: {}".format(i + 1, artist))
            else:
                cprint(alb.artist_name())
            cprint(u"{} tracks / {} minutes".format(len(alb.all_au_files),
                                                    int(duration_ms / 60000)))
            cprint(u"ID=%015x" % alb.album_id)
            sys.stdout.flush()

            # Check that the album isn't already in library.
            collision = False
            for au in alb.all_au_files:
                if au.fingerprint in seen_fp:
                    cprint(u"***** ERROR: DUPLICATE TRACK WITHIN IMPORT",
                           type='error')
                    cprint(u"This one is at %s" % au.path)
                    cprint(u"Other one is at %s" %
                           seen_fp[au.fingerprint].path)
                    collision = True
                    break
                fp_au_file = db.get_by_fingerprint(au.fingerprint)
                if fp_au_file is not None:
                    cprint(u"***** ERROR: TRACK ALREADY IN LIBRARY",
                           type='error')
                    cprint(fp_au_file.mutagen_id3)
                    collision = True
                    break
                seen_fp[au.fingerprint] = au

            if collision:
                sys.stdout.flush()
                error_count += 1

            # Attach a dummy volume # and timestamp
            alb.set_volume_and_import_timestamp(0xff, prescan_timestamp)
            try:
                alb.standardize()
                cprint("OK!\n")
            except (import_file.ImportFileError, album.AlbumError), ex:
                cprint("***** IMPORT ERROR")
                cprint("*****   %s\n" % str(ex))
                error_count += 1

            sys.stdout.flush()
            yield  # scanned an album
    except analyzer.InvalidFileError as ex:
        cprint("***** INVALID FILE ERROR", type='error')
        cprint("*****   %s\n" % str(ex), type='error')
        error_count += 1

    cprint("-" * 40)
    cprint("Found %d albums" % album_count)
    if error_count > 0:
        cprint("Saw %d errors" % error_count, type='failure')
        return
    cprint("No errors found")

    if dry_run:
        cprint("Dry run --- terminating", type='success')
        return

    txn = None
    for alb in inbox.albums():
        if txn is None:
            txn = import_transaction.ImportTransaction(db,
                                                       VOLUME_NUMBER,
                                                       timestamp.now(),
                                                       LIBRARY_TMP_PREFIX,
                                                       dry_run=dry_run)
        txn.add_album(alb)
        # If the transaction has grown too large, commit it.
        if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT:
            txn.commit(LIBRARY_PREFIX)
            txn = None
        yield  # import an album
    # Flush out any remaining tracks.
    if txn:
        txn.commit(LIBRARY_PREFIX)
    return
Exemplo n.º 7
0
    def do_push(self):

        # IMPORT_TIME_STAMP from import step
        START_TIMESTAMP = ImportTimeStamp.import_time_stamp
        # TODO(trow): Is this optimal?
        _NUM_ALBUMS_PER_FLUSH = 3

        _DISC_NUM_RE = re.compile("disc\s+(\d+)", re.IGNORECASE)

        class UnknownArtistError(Exception):
            pass

        def get_artist_by_name(name):
            global _artist_cache
            if name in _artist_cache:
                return _artist_cache[name]
            while True:
                try:
                    art = models.Artist.fetch_by_name(name)
                    if art is None:
                        raise UnknownArtistError("Unknown artist: %s" % name)
                    _artist_cache[name] = art
                    return art
                except urllib2.URLError:
                    #print "Retrying fetch_by_name for '%s'" % name
                    pass

        def seen_album(album_id):
            while True:
                try:
                    for alb in models.Album.all().filter(
                            "album_id =", album_id):
                        if not alb.revoked:
                            return True
                    return False
                except urllib2.URLError:
                    #print "Retrying fetch of album_id=%s" % album_id
                    pass

        def process_one_album(idx, alb):
            # Build up an Album entity.
            kwargs = {}
            kwargs["parent"] = idx.transaction
            kwargs["title"] = alb.title()
            kwargs["album_id"] = alb.album_id
            kwargs["import_timestamp"] = datetime.datetime.utcfromtimestamp(
                alb.import_timestamp())
            kwargs["num_tracks"] = len(alb.all_au_files)
            kwargs["import_tags"] = alb.tags()

            if alb.is_compilation():
                kwargs["is_compilation"] = True
            else:
                kwargs["is_compilation"] = False
                kwargs["album_artist"] = get_artist_by_name(alb.artist_name())

            #for key, val in sorted(kwargs.iteritems()):
            #print "%s: %s" % (key, val)
            if seen_album(alb.album_id):
                #print "   Skipping"
                return

            album = models.Album(**kwargs)

            # Look for a disc number in the tags.
            for tag in kwargs["import_tags"]:
                m = _DISC_NUM_RE.search(tag)
                if m:
                    album.disc_number = int(m.group(1))
                    break

            idx.add_album(album)

            for au_file in alb.all_au_files:
                track_title, import_tags = titles.split_tags(au_file.tit2())
                track_num, _ = order.decode(
                    unicode(au_file.mutagen_id3["TRCK"]))
                kwargs = {}
                if alb.is_compilation():
                    kwargs["track_artist"] = get_artist_by_name(au_file.tpe1())
                track = models.Track(
                    parent=idx.transaction,
                    ufid=au_file.ufid(),
                    album=album,
                    title=track_title,
                    import_tags=import_tags,
                    track_num=track_num,
                    sampling_rate_hz=au_file.mp3_header.sampling_rate_hz,
                    bit_rate_kbps=int(au_file.mp3_header.bit_rate_kbps),
                    channels=au_file.mp3_header.channels_str,
                    duration_ms=au_file.duration_ms,
                    **kwargs)
                idx.add_track(track)

        def flush(list_of_pending_albums):
            if not list_of_pending_albums:
                return
            idx = search.Indexer()
            for alb in list_of_pending_albums:
                process_one_album(idx, alb)
            # This runs as a batch job, so set a very long deadline.
            while True:
                try:
                    rpc = db.create_rpc(deadline=120)
                    idx.save(rpc=rpc)
                    return
                except urllib2.URLError:
                    #print "Retrying indexer flush"
                    pass

        def maybe_flush(list_of_pending_albums):
            if len(list_of_pending_albums) < _NUM_ALBUMS_PER_FLUSH:
                return list_of_pending_albums
            flush(list_of_pending_albums)
            return []

        # main

        #chirpradio.connect("10.0.1.98:8000")
        chirpradio.connect()

        Messages.add_message('Beginning to push albums.', 'warning')

        sql_db = database.Database(conf.LIBRARY_DB)
        pending_albums = []
        this_album = []
        # TODO(trow): Select the albums to import in a saner way.
        for vol, import_timestamp in sql_db.get_all_imports():
            if START_TIMESTAMP is not None and import_timestamp < START_TIMESTAMP:
                continue
            #print "***"
            #print "*** import_timestamp = %s" % timestamp.get_human_readable(
            #import_timestamp)
            #print "***"
            for au_file in sql_db.get_by_import(vol, import_timestamp):
                if this_album and this_album[0].album_id != au_file.album_id:
                    alb = album.Album(this_album)
                    pending_albums.append(alb)
                    Messages.add_message('Adding "%s"' % alb.title(),
                                         'success')
                    pending_albums = maybe_flush(pending_albums)
                    this_album = []
                this_album.append(au_file)

        # Add the last album to the list of pending albums, then do the
        # final flush.
        if this_album:
            alb = album.Album(this_album)
            Messages.add_message('Adding "%s"' % alb.title(), 'success')
            pending_albums.append(alb)
            this_album = []
        flush(pending_albums)

        Messages.add_message('Album push complete. OK!', 'success')
        Messages.add_message('Import process complete. OK!', 'success')

        current_route.CURRENT_ROUTE = 'import'
Exemplo n.º 8
0
    def import_albums(self, inbox):
        prescan_timestamp = timestamp.now()

        # timestamp to be referenced by push step
        ImportTimeStamp.import_time_stamp = timestamp.now()
        Messages.add_message(
            'Import time stamp set: %s' % ImportTimeStamp.import_time_stamp,
            'warning')
        error_count = 0
        album_count = 0
        seen_fp = {}
        albums = []
        transaction = []

        db = database.Database(LIBRARY_DB)

        dirs = inbox._dirs
        for path in sorted(dirs):
            try:
                albs = album.from_directory(path)
            except analyzer.InvalidFileError, ex:
                album_message = "<br>***** INVALID FILE ERROR<br>"
                album_message += "<br>%s" % str(ex)
                Messages.add_message(album_message, 'error')

                error_count += 1
                albums.append({
                    'path': path,
                    'title': 'There was an error at %s' % path,
                    'error': True
                })
                continue

            for alb in albs:

                # generate response
                album_path = path
                album_response = album_to_json(alb, album_path)

                # initialize error state
                # import process will halt if an error is seen
                album_error = False

                alb.drop_payloads()
                album_count += 1

                # start album_message
                album_message = (u'"%s"<br>' % alb.title()).encode("utf-8")

                if alb.tags():
                    album_message += "(%s)" % ", ".join(alb.tags())

                duration_ms = sum(au.duration_ms for au in alb.all_au_files)
                if alb.is_compilation():
                    album_message += "Compilation<br>"
                    for i, au in enumerate(alb.all_au_files):
                        album_message += "  %02d:" % (i + 1, )
                        try:
                            album_message += unicode(
                                au.mutagen_id3["TPE1"]).encode("utf-8")
                        except UnicodeDecodeError, e:
                            album_message += "<br>***** Encoding ERROR<br>"
                            album_message += "<br>%s" % str(ex)
                            error_count += 1
                            album_error = True
                else:
                    album_message += alb.artist_name().encode("utf-8")
                album_message += "<br>%d tracks / %d minutes<br>" % (len(
                    alb.all_au_files), int(duration_ms / 60000))
                album_message += "ID=%015x<br>" % alb.album_id

                # Check that the album isn't already in library.
                collision = False
                for au in alb.all_au_files:
                    if au.fingerprint in seen_fp:
                        album_message += "<br>***** ERROR: DUPLICATE TRACK WITHIN IMPORT<br>"
                        collision = True
                        break
                    fp_au_file = db.get_by_fingerprint(au.fingerprint)
                    if fp_au_file is not None:
                        album_message += "<br>***** ERROR: TRACK ALREADY IN LIBRARY"
                        collision = True
                        break
                    seen_fp[au.fingerprint] = au

                if collision:
                    album_error = True
                    error_count += 1

                # Attach a dummy volume # and timestamp
                alb.set_volume_and_import_timestamp(0xff, prescan_timestamp)
                try:
                    alb.standardize()
                except (import_file.ImportFileError, album.AlbumError), ex:
                    album_message += "<br>***** IMPORT ERROR<br>"
                    album_message += "<br>%s" % str(ex)
                    error_count += 1
                    album_error = True
def import_albums(inbox):
    prescan_timestamp = timestamp.now()
    error_count = 0
    album_count = 0
    seen_fp = {}

    db = database.Database(LIBRARY_DB)

    try:
        for alb in inbox.albums():
            alb.drop_payloads()
            album_count += 1
            print "#%d" % album_count,
            print (u'"%s"' % alb.title()).encode("utf-8"),
            if alb.tags():
                print "(%s)" % ", ".join(alb.tags())
            else:
                print
            duration_ms = sum(au.duration_ms for au in alb.all_au_files)
            if alb.is_compilation():
                print "Compilation"
                for i, au in enumerate(alb.all_au_files):
                    print "  %02d:" % (i+1,),
                    print unicode(au.mutagen_id3["TPE1"]).encode("utf-8")
            else:
                print alb.artist_name().encode("utf-8")
            print "%d tracks / %d minutes" % (
                len(alb.all_au_files), int(duration_ms / 60000))
            print "ID=%015x" % alb.album_id
            sys.stdout.flush()

            # Check that the album isn't already in library.
            collision = False
            for au in alb.all_au_files:
                if au.fingerprint in seen_fp:
                    print "***** ERROR: DUPLICATE TRACK WITHIN IMPORT"
                    print "This one is at %s" % au.path
                    print "Other one is at %s" % seen_fp[au.fingerprint].path
                    collision = True
                    break
                fp_au_file = db.get_by_fingerprint(au.fingerprint)
                if fp_au_file is not None:
                    print "***** ERROR: TRACK ALREADY IN LIBRARY"
                    print unicode(fp_au_file.mutagen_id3).encode("utf-8")
                    collision = True
                    break
                seen_fp[au.fingerprint] = au

            if collision:
                sys.stdout.flush()
                error_count += 1

            # Attach a dummy volume # and timestamp
            alb.set_volume_and_import_timestamp(0xff, prescan_timestamp)
            try:
                alb.standardize()
                print "OK!\n"
            except (import_file.ImportFileError, album.AlbumError), ex:
                print "***** IMPORT ERROR"
                print "*****   %s\n" % str(ex)
                error_count += 1
            
            sys.stdout.flush()
    except analyzer.InvalidFileError, ex:
        print "***** INVALID FILE ERROR"
        print "*****   %s\n" % str(ex)
        error_count += 1