def main_generator(start_timestamp):
    #chirpradio.connect("10.0.1.98:8000")
    chirpradio.connect()

    sql_db = database.Database(conf.LIBRARY_DB)
    pending_albums = []
    this_album = []
    # TODO(trow): Select the albums to import in a saner way.
    for vol, import_timestamp in sql_db.get_all_imports():
        if start_timestamp is not None and import_timestamp < start_timestamp:
            continue
        cprint("***")
        cprint("*** import_timestamp = %s" % timestamp.get_human_readable(
            import_timestamp))
        cprint("***")
        for au_file in sql_db.get_by_import(vol, import_timestamp):
            if this_album and this_album[0].album_id != au_file.album_id:
                alb = album.Album(this_album)
                pending_albums.append(alb)
                cprint('Adding "%s"' % alb.title())
                pending_albums = maybe_flush(pending_albums)
                this_album = []
            this_album.append(au_file)
            yield

    # Add the last album to the list of pending albums, then do the
    # final flush.
    if this_album:
        alb = album.Album(this_album)
        cprint('Adding "%s"' % alb.title())
        pending_albums.append(alb)
        this_album = []
    flush(pending_albums)
def main_generator(start_timestamp):
    #chirpradio.connect("10.0.1.98:8000")
    chirpradio.connect()

    sql_db = database.Database(conf.LIBRARY_DB)
    pending_albums = []
    this_album = []
    # TODO(trow): Select the albums to import in a saner way.
    for vol, import_timestamp in sql_db.get_all_imports():
        if start_timestamp is not None and import_timestamp < start_timestamp:
            continue
        cprint("***")
        cprint("*** import_timestamp = %s" %
               timestamp.get_human_readable(import_timestamp))
        cprint("***")
        for au_file in sql_db.get_by_import(vol, import_timestamp):
            if this_album and this_album[0].album_id != au_file.album_id:
                alb = album.Album(this_album)
                pending_albums.append(alb)
                cprint('Adding "%s"' % alb.title())
                pending_albums = maybe_flush(pending_albums)
                this_album = []
            this_album.append(au_file)
            yield

    # Add the last album to the list of pending albums, then do the
    # final flush.
    if this_album:
        alb = album.Album(this_album)
        cprint('Adding "%s"' % alb.title())
        pending_albums.append(alb)
        this_album = []
    flush(pending_albums)
Example #3
0
def update_artist_whitelist():
    from chirp.library import artists
    from chirp.library.do_dump_new_artists_in_dropbox import main_generator
    cwd = op.dirname(artists._WHITELIST_FILE)

    # Make sure the comitted version of the whitelist is checked out.
    # This allows operators to fix mistakes by editing mp3 tags
    # and continuously re-running this task.
    cmd = ['git', 'checkout', artists._WHITELIST_FILE]
    exec_and_print(cmd, cwd)

    # This will reload the artist whitelist file
    # in python memory.
    artists._init()

    for _ in main_generator(rewrite=True):
        yield

    # Show changes to the artist whitelist file
    cprint('Changes made to artist whitelist:')
    cmd = ['git', 'diff', artists._WHITELIST_FILE]
    exec_and_print(cmd, cwd)

    # Once again, this reloads the artist whitelist file
    # in python memory.
    artists._init()
def new_artists():
    for i in range(1, 31):
        cprint('Artist #%d' % i)
        time.sleep(0.2)
        yield

    cprint('Found 30 new artists!', type='success')
def seen_album(album_id):
    while True:
        try:
            for alb in models.Album.all().filter("album_id =", album_id):
                if not alb.revoked:
                    return True
            return False
        except urllib2.URLError:
            cprint("Retrying fetch of album_id=%s" % album_id)
def update_artist_whitelist():
    for i in range(1, 31):
        cprint('Artist #%d' % i)
        time.sleep(0.2)
        if i == 11:
            raise Exception('Oh no random unexpected error!')
        yield

    cprint('Added 30 new artists!', type='success')
def seen_album(album_id):
    while True:
        try:
            for alb in models.Album.all().filter("album_id =", album_id):
                if not alb.revoked:
                    return True
            return False
        except urllib2.URLError:
            cprint("Retrying fetch of album_id=%s" % album_id)
def check_music():
    for i in range(1, 13):
        kwargs = {}
        if i % 5 == 0:
            kwargs.update(type='error')
        cprint('Track #%d' % i, **kwargs)
        time.sleep(0.2)
        yield

    cprint('Encountered some errors', type='failure')
Example #9
0
def push_artist_whitelist():
    from chirp.library import artists
    cwd = op.dirname(artists._WHITELIST_FILE)

    # Commit and push.
    cmd = ['git', 'commit', artists._WHITELIST_FILE, '-m', 'Adding new artists']
    exec_and_print(cmd, cwd)
    cmd = ['git', 'push']
    exec_and_print(cmd, cwd)
    cprint('Changes to artist whitelist pushed to git', type='success')

    yield   # to make this function a generator function
def process_one_album(idx, alb):
    # Build up an Album entity.
    kwargs = {}
    kwargs["parent"] = idx.transaction
    kwargs["title"] = alb.title()
    kwargs["album_id"] = alb.album_id
    kwargs["import_timestamp"] = datetime.datetime.utcfromtimestamp(
        alb.import_timestamp())
    kwargs["num_tracks"] = len(alb.all_au_files)
    kwargs["import_tags"] = alb.tags()

    if alb.is_compilation():
        kwargs["is_compilation"] = True
    else:
        kwargs["is_compilation"] = False
        kwargs["album_artist"] = get_artist_by_name(alb.artist_name())

    for key, val in sorted(kwargs.iteritems()):
        cprint("%s: %s" % (key, val))
    if seen_album(alb.album_id):
        cprint("   Skipping")
        return

    album = models.Album(**kwargs)

    # Look for a disc number in the tags.
    for tag in kwargs["import_tags"]:
        m = _DISC_NUM_RE.search(tag)
        if m:
            album.disc_number = int(m.group(1))
            break

    idx.add_album(album)

    for au_file in alb.all_au_files:
        track_title, import_tags = titles.split_tags(au_file.tit2())
        track_num, _ = order.decode(unicode(au_file.mutagen_id3["TRCK"]))
        kwargs = {}
        if alb.is_compilation():
            kwargs["track_artist"] = get_artist_by_name(au_file.tpe1())
        track = models.Track(
            parent=idx.transaction,
            ufid=au_file.ufid(),
            album=album,
            title=track_title,
            import_tags=import_tags,
            track_num=track_num,
            sampling_rate_hz=au_file.mp3_header.sampling_rate_hz,
            bit_rate_kbps=int(au_file.mp3_header.bit_rate_kbps),
            channels=au_file.mp3_header.channels_str,
            duration_ms=au_file.duration_ms,
            **kwargs)
        idx.add_track(track)
def process_one_album(idx, alb):
    # Build up an Album entity.
    kwargs = {}
    kwargs["parent"] = idx.transaction
    kwargs["title"] = alb.title()
    kwargs["album_id"] = alb.album_id
    kwargs["import_timestamp"] = datetime.datetime.utcfromtimestamp(
        alb.import_timestamp())
    kwargs["num_tracks"] = len(alb.all_au_files)
    kwargs["import_tags"] = alb.tags()

    if alb.is_compilation():
        kwargs["is_compilation"] = True
    else:
        kwargs["is_compilation"] = False
        kwargs["album_artist"] = get_artist_by_name(alb.artist_name())

    for key, val in sorted(kwargs.iteritems()):
        cprint("%s: %s" % (key, val))
    if seen_album(alb.album_id):
        cprint("   Skipping")
        return

    album = models.Album(**kwargs)

    # Look for a disc number in the tags.
    for tag in kwargs["import_tags"]:
        m = _DISC_NUM_RE.search(tag)
        if m:
            album.disc_number = int(m.group(1))
            break

    idx.add_album(album)

    for au_file in alb.all_au_files:
        track_title, import_tags = titles.split_tags(au_file.tit2())
        track_num, _ = order.decode(unicode(au_file.mutagen_id3["TRCK"]))
        kwargs = {}
        if alb.is_compilation():
            kwargs["track_artist"] = get_artist_by_name(au_file.tpe1())
        track = models.Track(
            parent=idx.transaction,
            ufid=au_file.ufid(),
            album=album,
            title=track_title,
            import_tags=import_tags,
            track_num=track_num,
            sampling_rate_hz=au_file.mp3_header.sampling_rate_hz,
            bit_rate_kbps=int(au_file.mp3_header.bit_rate_kbps),
            channels=au_file.mp3_header.channels_str,
            duration_ms=au_file.duration_ms,
            **kwargs)
        idx.add_track(track)
def get_artist_by_name(name):
    global _artist_cache
    if name in _artist_cache:
        return _artist_cache[name]
    while True:
        try:
            art = models.Artist.fetch_by_name(name)
            if art is None:
                raise UnknownArtistError("Unknown artist: %s" % name)
            _artist_cache[name] = art
            return art
        except urllib2.URLError:
            cprint("Retrying fetch_by_name for '%s'" % name)
def get_artist_by_name(name):
    global _artist_cache
    if name in _artist_cache:
        return _artist_cache[name]
    while True:
        try:
            art = models.Artist.fetch_by_name(name)
            if art is None:
                raise UnknownArtistError("Unknown artist: %s" % name)
            _artist_cache[name] = art
            return art
        except urllib2.URLError:
            cprint("Retrying fetch_by_name for '%s'" % name)
Example #14
0
def main_generator(rewrite):
    drop = dropbox.Dropbox()
    new_artists = set()
    for au_file in drop.tracks():
        try:
            tpe1 = au_file.mutagen_id3["TPE1"].text[0]
        except:
            cprint('** file: %r' % au_file.path)
            raise
        if artists.standardize(tpe1) is None:
            new_artists.add(tpe1)

    to_print = list(new_artists)
    if rewrite:
        to_print.extend(artists.all())
    to_print.sort(key=artists.sort_key)

    output = None
    if rewrite:
        output = codecs.open(artists._WHITELIST_FILE, "w", "utf-8")
    for tpe1 in to_print:
        if output:
            output.write(tpe1)
            output.write("\n")
        else:
            cprint(tpe1.encode("utf-8"))
        yield

    if rewrite:
        cprint('Artist whitelist updated', type='highlight')
    else:
        cprint('Found %d new artists' % len(to_print), type='success')
def main_generator(rewrite):
    drop = dropbox.Dropbox()
    new_artists = set()
    for au_file in drop.tracks():
        try:
            tpe1 = au_file.mutagen_id3["TPE1"].text[0]
        except:
            cprint(u'** file: %r' % au_file.path)
            raise
        if artists.standardize(tpe1) is None:
            new_artists.add(tpe1)

    to_print = list(new_artists)
    if rewrite:
        to_print.extend(artists.all())
    to_print.sort(key=artists.sort_key)

    output = None
    if rewrite:
        output = codecs.open(artists._WHITELIST_FILE, "w", "utf-8")
    for tpe1 in to_print:
        if output:
            output.write(tpe1)
            output.write("\n")
        else:
            cprint(tpe1)
        yield

    if rewrite:
        cprint('Artist whitelist updated', type='success')
    else:
        cprint('Found %d new artists' % len(to_print), type='success')
def main_generator():
    nml_file = os.path.join(os.getcwd(), 'output.nml')
    cprint(u'Writing Traktor file to {}'.format(nml_file))
    with codecs.open(nml_file, "w", "utf-8") as out_fh:
        # TODO(trow): Don't hard-wire the drive letter.
        writer = nml_writer.NMLWriter("T:", "/Library", out_fh)
        db = database.Database(conf.LIBRARY_DB)
        count = 0
        start_t = time.time()
        for au_file in db.get_all():
            writer.write(au_file)
            count += 1
            elapsed_t = time.time() - start_t
            cprint(type='count', count=count, elapsed_seconds=elapsed_t)
            if count % 1000 == 0:
                sys.stderr.write("{count} ({rate:.1f}/s)...\n".format(
                    count=count, rate=count / elapsed_t))
            yield
        writer.close()

    # Move the file to where Traktor users expect to find it.
    cprint(u'Copying NML file to {}'.format(conf.TRAKTOR_NML_FILE))
    cmd = [
        'install',  # command that combines cp with chown, chmod, and strip
        '-m',
        '0775',
        '-g',
        'traktor',
        nml_file,
        conf.TRAKTOR_NML_FILE
    ]
    subprocess.check_call(cmd)

    cprint("Wrote %d tracks to collection\n" % count, type='success')
def main_generator():
    nml_file = os.path.join(os.getcwd(), 'output.nml')
    cprint('Writing Traktor file to {}'.format(nml_file))
    with codecs.open(nml_file, "w", "utf-8") as out_fh:
        # TODO(trow): Don't hard-wire the drive letter.
        writer = nml_writer.NMLWriter("T:", "/Library", out_fh)
        db = database.Database(conf.LIBRARY_DB)
        count = 0
        start_t = time.time()
        for au_file in db.get_all():
            writer.write(au_file)
            count += 1
            elapsed_t = time.time() - start_t
            cprint(type='count', count=count, elapsed_seconds=elapsed_t)
            if count % 1000 == 0:
                sys.stderr.write("{count} ({rate:.1f}/s)...\n".format(count=count, rate=count / elapsed_t))
            yield
        writer.close()

    # Move the file to where Traktor users expect to find it.
    cprint('Copying NML file to {}'.format(conf.TRAKTOR_NML_FILE))
    cmd = [
        'install',      # command that combines cp with chown, chmod, and strip
        '-m', '0775',
        '-g', 'traktor',
        nml_file,
        conf.TRAKTOR_NML_FILE]
    subprocess.check_call(cmd)

    cprint("Wrote %d tracks to collection\n" % count, type='success')
Example #18
0
def upload(date):
    from chirp.library.do_push_artists_to_chirpradio import main_generator
    for _ in main_generator():
        yield

    from chirp.library.do_push_to_chirpradio import main_generator

    # Parse the date string we got from the client.
    dt = datetime.datetime.strptime(date, '%m/%d/%Y')
    cprint('Uploading track changes made since: {:%m/%d/%Y %H:%M}'.format(dt))
    timestamp = time.mktime(dt.timetuple())
    for _ in main_generator(start_timestamp=timestamp):
        yield

    cprint('Finished!', type='success')
def flush(list_of_pending_albums):
    if not list_of_pending_albums:
        return
    if DRY_RUN:
        cprint("Dry run -- skipped flush")
        return
    idx = search.Indexer()
    for alb in list_of_pending_albums:
        process_one_album(idx, alb)
    # This runs as a batch job, so set a very long deadline.
    while True:
        try:
            rpc = db.create_rpc(deadline=120)
            idx.save(rpc=rpc)
            return
        except urllib2.URLError:
            cprint("Retrying indexer flush")
def flush(list_of_pending_albums):
    if not list_of_pending_albums:
        return
    if DRY_RUN:
        cprint("Dry run -- skipped flush")
        return
    idx = search.Indexer()
    for alb in list_of_pending_albums:
        process_one_album(idx, alb)
    # This runs as a batch job, so set a very long deadline.
    while True:
        try:
            rpc = db.create_rpc(deadline=120)
            idx.save(rpc=rpc)
            return
        except urllib2.URLError:
            cprint("Retrying indexer flush")
Example #21
0
def main_generator():
    out_fh = codecs.open("output.nml", "w", "utf-8")
    # TODO(trow): Don't hard-wire the drive letter.
    writer = nml_writer.NMLWriter("T:", "/Library", out_fh)
    db = database.Database(conf.LIBRARY_DB)
    count = 0
    start_t = time.time()
    for au_file in db.get_all():
        writer.write(au_file)
        count += 1
        elapsed_t = time.time() - start_t
        cprint(type='count', count=count, elapsed_seconds=elapsed_t)
        if count % 1000 == 0:
            sys.stderr.write("{count} ({rate:.1f}/s)...\n".format(count=count, rate=count / elapsed_t))
        yield
    writer.close()
    out_fh.close()
    cprint("Wrote %d tracks to collection\n" % count, type='success')
 def commit(self, target_prefix):
     if self._dry_run:
         return
     # Start a database transaction to add the files.
     txn = self._db.begin_add(self._volume, self._import_timestamp)
     # Write each new file into the database.
     for au_file in self._all_au_files:
         txn.add(au_file)
     ufid_prefix = ufid.ufid_prefix(self._volume, self._import_timestamp)
     # Strip off trailing "/"
     if ufid_prefix.endswith("/"):
         ufid_prefix = ufid_prefix[:-1]
     tmp_dir = os.path.join(self._tmp_prefix, ufid_prefix)
     real_dir = os.path.join(target_prefix, ufid_prefix)
     cprint("*** Committing %d albums / %d bytes" % (
         self.num_albums, self.total_size_in_bytes))
     cprint("*** tmp_dir=%s" % tmp_dir)
     cprint("*** real_dir=%s" % real_dir)
     sys.stdout.flush()
     os.renames(tmp_dir, real_dir)
     txn.commit()
     # Write out a list of source files that were just committed.
     out = open(os.path.join(real_dir, "_source_files"), "w")
     for path in sorted(af.path for af in self._all_au_files):
         out.write(path)
         out.write("\n")
     out.close()
Example #23
0
 def commit(self, target_prefix):
     if self._dry_run:
         return
     # Start a database transaction to add the files.
     txn = self._db.begin_add(self._volume, self._import_timestamp)
     # Write each new file into the database.
     for au_file in self._all_au_files:
         txn.add(au_file)
     ufid_prefix = ufid.ufid_prefix(self._volume, self._import_timestamp)
     # Strip off trailing "/"
     if ufid_prefix.endswith("/"):
         ufid_prefix = ufid_prefix[:-1]
     tmp_dir = os.path.join(self._tmp_prefix, ufid_prefix)
     real_dir = os.path.join(target_prefix, ufid_prefix)
     cprint("*** Committing %d albums / %d bytes" %
            (self.num_albums, self.total_size_in_bytes))
     cprint("*** tmp_dir=%s" % tmp_dir)
     cprint("*** real_dir=%s" % real_dir)
     sys.stdout.flush()
     os.renames(tmp_dir, real_dir)
     txn.commit()
     # Write out a list of source files that were just committed.
     out = open(os.path.join(real_dir, "_source_files"), "w")
     for path in sorted(af.path for af in self._all_au_files):
         out.write(path)
         out.write("\n")
     out.close()
Example #24
0
    def add_album(self, alb, new_album_name=None):
        # Plug in the volume and import timestamp for this transaction.
        alb.set_volume_and_import_timestamp(self._volume,
                                            self._import_timestamp)
        alb.ensure_payloads()

        cprint(u'Adding Album "%s"' % alb.title())
        sys.stdout.flush()

        # Write the files to our temporary prefix.
        for au_file in alb.all_au_files:
            # Might raise an ImportFileError.
            if not self._dry_run:
                import_file.write_file(au_file, self._tmp_prefix)
        # We forget the payloads immediately to save RAM.
        alb.drop_payloads()

        # Everything checks out!
        self._all_au_files.extend(alb.all_au_files)
        self.num_albums += 1
        self.total_size_in_bytes += sum(au.frame_size
                                        for au in alb.all_au_files)
    def add_album(self, alb, new_album_name=None):
        # Plug in the volume and import timestamp for this transaction.
        alb.set_volume_and_import_timestamp(
            self._volume, self._import_timestamp)
        alb.ensure_payloads()

        cprint(u'Adding Album "%s"' % alb.title())
        sys.stdout.flush()

        # Write the files to our temporary prefix.
        for au_file in alb.all_au_files:
            # Might raise an ImportFileError.
            if not self._dry_run:
                import_file.write_file(au_file, self._tmp_prefix)
        # We forget the payloads immediately to save RAM.
        alb.drop_payloads()

        # Everything checks out!
        self._all_au_files.extend(alb.all_au_files)
        self.num_albums += 1
        self.total_size_in_bytes += sum(
            au.frame_size for au in alb.all_au_files)
def main_generator():
    chirpradio.connect()

    dry_run = False

    # Find all of the library artists
    all_library_artists = set(artists.all())

    # Find all of the artists in the cloud.
    all_chirpradio_artists = set()
    mapped = 0
    t1 = time.time()
    for art in models.Artist.fetch_all():
        if art.revoked:
            continue
        std_name = artists.standardize(art.name)
        if std_name != art.name:
            cprint(u"Mapping {}: {} => {}".format(mapped, art.name, std_name))
            mapped += 1
            art.name = std_name
            idx = search.Indexer()
            idx._transaction = art.parent_key()
            idx.add_artist(art)
            if not dry_run:
                idx.save()
        all_chirpradio_artists.add(art.name)
        yield

    to_push = list(all_library_artists.difference(all_chirpradio_artists))

    cprint("Pushing %d artists" % len(to_push))
    while to_push:
        # Push the artists in batches of 50
        this_push = to_push[:50]
        to_push = to_push[50:]
        idx = search.Indexer()
        for name in this_push:
            cprint(name)
            art = models.Artist.create(parent=idx.transaction, name=name)
            idx.add_artist(art)
        if not dry_run:
            idx.save()
        cprint("+++++ Indexer saved")
        yield
def import_albums(dry_run):
    inbox = dropbox.Dropbox()
    prescan_timestamp = timestamp.now()
    error_count = 0
    album_count = 0
    seen_fp = {}

    db = database.Database(LIBRARY_DB)

    try:
        for alb in inbox.albums():
            alb.drop_payloads()
            album_count += 1
            cprint(u'#{num} "{title}"'.format(num=album_count,
                                              title=alb.title()))
            if alb.tags():
                cprint(u"(%s)" % ", ".join(alb.tags()))
            else:
                print
            duration_ms = sum(au.duration_ms for au in alb.all_au_files)
            if alb.is_compilation():
                cprint("Compilation")
                for i, au in enumerate(alb.all_au_files):
                    artist = au.mutagen_id3["TPE1"]
                    cprint(u"  {:02d}: {}".format(i + 1, artist))
            else:
                cprint(alb.artist_name())
            cprint(u"{} tracks / {} minutes".format(len(alb.all_au_files),
                                                    int(duration_ms / 60000)))
            cprint(u"ID=%015x" % alb.album_id)
            sys.stdout.flush()

            # Check that the album isn't already in library.
            collision = False
            for au in alb.all_au_files:
                if au.fingerprint in seen_fp:
                    cprint(u"***** ERROR: DUPLICATE TRACK WITHIN IMPORT",
                           type='error')
                    cprint(u"This one is at %s" % au.path)
                    cprint(u"Other one is at %s" %
                           seen_fp[au.fingerprint].path)
                    collision = True
                    break
                fp_au_file = db.get_by_fingerprint(au.fingerprint)
                if fp_au_file is not None:
                    cprint(u"***** ERROR: TRACK ALREADY IN LIBRARY",
                           type='error')
                    cprint(fp_au_file.mutagen_id3)
                    collision = True
                    break
                seen_fp[au.fingerprint] = au

            if collision:
                sys.stdout.flush()
                error_count += 1

            # Attach a dummy volume # and timestamp
            alb.set_volume_and_import_timestamp(0xff, prescan_timestamp)
            try:
                alb.standardize()
                cprint("OK!\n")
            except (import_file.ImportFileError, album.AlbumError), ex:
                cprint("***** IMPORT ERROR")
                cprint("*****   %s\n" % str(ex))
                error_count += 1

            sys.stdout.flush()
            yield  # scanned an album
    except analyzer.InvalidFileError as ex:
        cprint("***** INVALID FILE ERROR", type='error')
        cprint("*****   %s\n" % str(ex), type='error')
        error_count += 1

    cprint("-" * 40)
    cprint("Found %d albums" % album_count)
    if error_count > 0:
        cprint("Saw %d errors" % error_count, type='failure')
        return
    cprint("No errors found")

    if dry_run:
        cprint("Dry run --- terminating", type='success')
        return

    txn = None
    for alb in inbox.albums():
        if txn is None:
            txn = import_transaction.ImportTransaction(db,
                                                       VOLUME_NUMBER,
                                                       timestamp.now(),
                                                       LIBRARY_TMP_PREFIX,
                                                       dry_run=dry_run)
        txn.add_album(alb)
        # If the transaction has grown too large, commit it.
        if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT:
            txn.commit(LIBRARY_PREFIX)
            txn = None
        yield  # import an album
    # Flush out any remaining tracks.
    if txn:
        txn.commit(LIBRARY_PREFIX)
    return
def main():
    dry_run = "--actually-do-import" not in sys.argv
    cprint()
    if dry_run:
        cprint("+++ This is only a dry run.  No actual import will occur.")
        cprint("+++ We will only scan the dropbox looking for errors.")
    else:
        cprint("*" * 70)
        cprint("***")
        cprint("*** WARNING!  This is a real import!")
        cprint(
            "*** If no errors are found, the music library will be updated!")
        cprint("***")
        cprint("*" * 70)
    cprint()
    for _ in import_albums(dry_run):
        pass
def import_music():
    cprint('Finished!', type='success')
    yield
def push_artist_whitelist():
    cprint('Simulated pushing artist whitelist to git', type='success')
    yield
def upload(date):
    yield
    cprint('Date: {}'.format(date))
    dt = datetime.datetime.strptime(date, '%m/%d/%Y')

    cprint('Pushing artists metadata...')

    cprint('Uploading track changes made since: {:%m/%d/%Y %H:%M}'.format(dt))
    timestamp = time.mktime(dt.timetuple())
    cprint('Using timestamp: {}'.format(timestamp))

    cprint('Pushing track metadata...')

    cprint('Done!', type='success')
def main():
    dry_run = "--actually-do-import" not in sys.argv
    cprint()
    if dry_run:
        cprint("+++ This is only a dry run.  No actual import will occur.")
        cprint("+++ We will only scan the dropbox looking for errors.")
    else:
        cprint("*" * 70)
        cprint("***")
        cprint("*** WARNING!  This is a real import!")
        cprint("*** If no errors are found, the music library will be updated!")
        cprint("***")
        cprint("*" * 70)
    cprint()
    for _ in import_albums(dry_run):
        pass
Example #33
0
def exec_and_print(cmd, cwd):
    cprint(' '.join(cmd), type='highlight')
    output = subprocess.check_output(cmd, cwd=cwd)
    cprint(output)
Example #34
0
def import_music():
    from chirp.library.do_periodic_import import import_albums
    for _ in import_albums(dry_run=False):
        yield
    cprint('Finished!', type='success')
Example #35
0
 def test_print_bytes(self):
     cprint(u'Ivan Krsti\u0107'.encode('utf8'))
Example #36
0
 def test_print_numbers(self):
     cprint(1000)
def import_albums(dry_run):
    inbox = dropbox.Dropbox()
    prescan_timestamp = timestamp.now()
    error_count = 0
    album_count = 0
    seen_fp = {}

    db = database.Database(LIBRARY_DB)

    try:
        for alb in inbox.albums():
            alb.drop_payloads()
            album_count += 1
            cprint(u'#{num} "{title}"'.format(num=album_count, title=alb.title()))
            if alb.tags():
                cprint("(%s)" % ", ".join(alb.tags()))
            else:
                print
            duration_ms = sum(au.duration_ms for au in alb.all_au_files)
            if alb.is_compilation():
                cprint("Compilation")
                for i, au in enumerate(alb.all_au_files):
                    artist = unicode(au.mutagen_id3["TPE1"])
                    cprint("  {:02d}: {}".format(i+1, artist))
            else:
                cprint(alb.artist_name())
            cprint("{} tracks / {} minutes".format(
                len(alb.all_au_files), int(duration_ms / 60000)))
            cprint("ID=%015x" % alb.album_id)
            sys.stdout.flush()

            # Check that the album isn't already in library.
            collision = False
            for au in alb.all_au_files:
                if au.fingerprint in seen_fp:
                    cprint("***** ERROR: DUPLICATE TRACK WITHIN IMPORT", type='error')
                    cprint("This one is at %s" % au.path)
                    cprint("Other one is at %s" % seen_fp[au.fingerprint].path)
                    collision = True
                    break
                fp_au_file = db.get_by_fingerprint(au.fingerprint)
                if fp_au_file is not None:
                    cprint("***** ERROR: TRACK ALREADY IN LIBRARY", type='error')
                    cprint(unicode(fp_au_file.mutagen_id3))
                    collision = True
                    break
                seen_fp[au.fingerprint] = au

            if collision:
                sys.stdout.flush()
                error_count += 1

            # Attach a dummy volume # and timestamp
            alb.set_volume_and_import_timestamp(0xff, prescan_timestamp)
            try:
                alb.standardize()
                cprint("OK!\n")
            except (import_file.ImportFileError, album.AlbumError), ex:
                cprint("***** IMPORT ERROR")
                cprint("*****   %s\n" % str(ex))
                error_count += 1

            sys.stdout.flush()
            yield # scanned an album
    except analyzer.InvalidFileError as ex:
        cprint("***** INVALID FILE ERROR", type='error')
        cprint("*****   %s\n" % str(ex), type='error')
        error_count += 1

    cprint("-" * 40)
    cprint("Found %d albums" % album_count)
    if error_count > 0:
        cprint("Saw %d errors" % error_count, type='failure')
        return
    cprint("No errors found")

    if dry_run:
        cprint("Dry run --- terminating", type='success')
        return

    txn = None
    for alb in inbox.albums():
        if txn is None:
            txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER,
                                                       timestamp.now(),
                                                       LIBRARY_TMP_PREFIX,
                                                       dry_run=dry_run)
        txn.add_album(alb)
        # If the transaction has grown too large, commit it.
        if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT:
            txn.commit(LIBRARY_PREFIX)
            txn = None
        yield # import an album
    # Flush out any remaining tracks.
    if txn:
        txn.commit(LIBRARY_PREFIX)
    return
Example #38
0
 def test_print_unicode(self):
     cprint(u'Ivan Krsti\u0107')
Example #39
0
 def new_artists():
     time.sleep(0.25)
     yield
     time.sleep(0.25)
     yield
     cprint('123 new artists found')