def new_txn(): # TODO(trow): Use a better /tmp directory. return import_transaction.ImportTransaction(db, 1, timestamp.now(), "/tmp/import", dry_run=False)
def test_basics(self): now = timestamp.now() self.assertTrue(timestamp.is_valid(now)) # Check that we can round-trip the timestamp produced by now() # through the human-readable format. human_readable = timestamp.get_human_readable(now) parsed = timestamp.parse_human_readable(human_readable) self.assertEqual(now, parsed) # Now check that a known example encodes correctly. ts = 1228080954 self.assertTrue(timestamp.is_valid(ts)) human_readable = timestamp.get_human_readable(ts) self.assertEqual("20081130-153554", human_readable) parsed = timestamp.parse_human_readable(human_readable) self.assertEqual(ts, parsed) # Make sure that calling timestamp.get_human_readable w/o an # argument returns a value for now. We retry a few times just # in case we are very unlucky and call timestamp.now() for the # second time after the second has incremented. for _ in range(3): now_str = timestamp.get_human_readable(timestamp.now()) no_arg_str = timestamp.get_human_readable() if no_arg_str == now_str: break else: self.assertTrue(False) # Check that is_valid will reject bad timestamps. self.assertFalse(timestamp.is_valid(-1)) self.assertFalse(timestamp.is_valid(0)) self.assertFalse(timestamp.is_valid(1000)) # The distant past self.assertFalse(timestamp.is_valid(1000000000000)) # The far future self.assertFalse( timestamp.is_valid(timestamp._MIN_REASONABLE_TIMESTAMP - 1)) self.assertFalse( timestamp.is_valid(timestamp._MAX_REASONABLE_TIMESTAMP + 1)) # Should raise ValueError on bad inputs. self.assertRaises(ValueError, timestamp.get_human_readable, 0) self.assertRaises(ValueError, timestamp.parse_human_readable, "malformed") self.assertRaises(ValueError, timestamp.parse_human_readable, "20081356-999999")
def test_basics(self): now = timestamp.now() self.assertTrue(timestamp.is_valid(now)) # Check that we can round-trip the timestamp produced by now() # through the human-readable format. human_readable = timestamp.get_human_readable(now) parsed = timestamp.parse_human_readable(human_readable) self.assertEqual(now, parsed) # Now check that a known example encodes correctly. ts = 1228080954 self.assertTrue(timestamp.is_valid(ts)) human_readable = timestamp.get_human_readable(ts) self.assertEqual("20081130-153554", human_readable) parsed = timestamp.parse_human_readable(human_readable) self.assertEqual(ts, parsed) # Make sure that calling timestamp.get_human_readable w/o an # argument returns a value for now. We retry a few times just # in case we are very unlucky and call timestamp.now() for the # second time after the second has incremented. for _ in range(3): now_str = timestamp.get_human_readable(timestamp.now()) no_arg_str = timestamp.get_human_readable() if no_arg_str == now_str: break else: self.assertTrue(False) # Check that is_valid will reject bad timestamps. self.assertFalse(timestamp.is_valid(-1)) self.assertFalse(timestamp.is_valid(0)) self.assertFalse(timestamp.is_valid(1000)) # The distant past self.assertFalse(timestamp.is_valid(1000000000000)) # The far future self.assertFalse(timestamp.is_valid(timestamp._MIN_REASONABLE_TIMESTAMP - 1)) self.assertFalse(timestamp.is_valid(timestamp._MAX_REASONABLE_TIMESTAMP + 1)) # Should raise ValueError on bad inputs. self.assertRaises(ValueError, timestamp.get_human_readable, 0) self.assertRaises(ValueError, timestamp.parse_human_readable, "malformed") self.assertRaises(ValueError, timestamp.parse_human_readable, "20081356-999999")
def import_albums(dry_run): inbox = dropbox.Dropbox() prescan_timestamp = timestamp.now() error_count = 0 album_count = 0 seen_fp = {} db = database.Database(LIBRARY_DB) try: for alb in inbox.albums(): alb.drop_payloads() album_count += 1 cprint(u'#{num} "{title}"'.format(num=album_count, title=alb.title())) if alb.tags(): cprint(u"(%s)" % ", ".join(alb.tags())) else: print duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): cprint("Compilation") for i, au in enumerate(alb.all_au_files): artist = au.mutagen_id3["TPE1"] cprint(u" {:02d}: {}".format(i + 1, artist)) else: cprint(alb.artist_name()) cprint(u"{} tracks / {} minutes".format(len(alb.all_au_files), int(duration_ms / 60000))) cprint(u"ID=%015x" % alb.album_id) sys.stdout.flush() # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: cprint(u"***** ERROR: DUPLICATE TRACK WITHIN IMPORT", type='error') cprint(u"This one is at %s" % au.path) cprint(u"Other one is at %s" % seen_fp[au.fingerprint].path) collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: cprint(u"***** ERROR: TRACK ALREADY IN LIBRARY", type='error') cprint(fp_au_file.mutagen_id3) collision = True break seen_fp[au.fingerprint] = au if collision: sys.stdout.flush() error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() cprint("OK!\n") except (import_file.ImportFileError, album.AlbumError), ex: cprint("***** IMPORT ERROR") cprint("***** %s\n" % str(ex)) error_count += 1 sys.stdout.flush() yield # scanned an album except analyzer.InvalidFileError as ex: cprint("***** INVALID FILE ERROR", type='error') cprint("***** %s\n" % str(ex), type='error') error_count += 1 cprint("-" * 40) cprint("Found %d albums" % album_count) if error_count > 0: cprint("Saw %d errors" % error_count, type='failure') return cprint("No errors found") if dry_run: cprint("Dry run --- terminating", type='success') return txn = None for alb in inbox.albums(): if txn is None: txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER, timestamp.now(), LIBRARY_TMP_PREFIX, dry_run=dry_run) txn.add_album(alb) # If the transaction has grown too large, commit it. if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT: txn.commit(LIBRARY_PREFIX) txn = None yield # import an album # Flush out any remaining tracks. if txn: txn.commit(LIBRARY_PREFIX) return
def import_albums(self, inbox): prescan_timestamp = timestamp.now() # timestamp to be referenced by push step ImportTimeStamp.import_time_stamp = timestamp.now() Messages.add_message('Import time stamp set: %s' % ImportTimeStamp.import_time_stamp, 'warning') error_count = 0 album_count = 0 seen_fp = {} albums = [] transaction = [] db = database.Database(LIBRARY_DB) dirs = inbox._dirs for path in sorted(dirs): try: albs = album.from_directory(path) except analyzer.InvalidFileError, ex: album_message = "<br>***** INVALID FILE ERROR<br>" album_message += "<br>%s" % str(ex) Messages.add_message(album_message, 'error') error_count += 1 albums.append({'path': path, 'title': 'There was an error at %s' % path, 'error': True}) continue for alb in albs: # generate response album_path = path album_response = album_to_json(alb, album_path) # initialize error state # import process will halt if an error is seen album_error = False alb.drop_payloads() album_count += 1 # start album_message album_message = (u'"%s"<br>' % alb.title()).encode("utf-8") if alb.tags(): album_message += "(%s)" % ", ".join(alb.tags()) duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): album_message += "Compilation<br>" for i, au in enumerate(alb.all_au_files): album_message += " %02d:" % (i+1,) try: album_message += unicode(au.mutagen_id3["TPE1"]).encode("utf-8") except UnicodeDecodeError, e: album_message += "<br>***** Encoding ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True else: album_message += alb.artist_name().encode("utf-8") album_message += "<br>%d tracks / %d minutes<br>" % ( len(alb.all_au_files), int(duration_ms / 60000)) album_message += "ID=%015x<br>" % alb.album_id # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: album_message += "<br>***** ERROR: DUPLICATE TRACK WITHIN IMPORT<br>" collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: album_message += "<br>***** ERROR: TRACK ALREADY IN LIBRARY" collision = True break seen_fp[au.fingerprint] = au if collision: album_error = True error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() except (import_file.ImportFileError, album.AlbumError), ex: album_message += "<br>***** IMPORT ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True
message += "Saw %d errors" % error_count Messages.add_message(message, 'error') # return albums with errors attached # halt import before data is commited return albums message += "No errors found." Messages.add_message(message, 'success') Messages.add_message("Beginning import.", 'success') txn = None for alb in transaction: if txn is None: txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER, timestamp.now(), LIBRARY_TMP_PREFIX, dry_run=False) txn.add_album(alb) # If the transaction has grown too large, commit it. if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT: txn.commit(LIBRARY_PREFIX) txn = None message = "%s OK!" % alb.title() Messages.add_message(message, 'success') # Flush out any remaining tracks. if txn: txn.commit(LIBRARY_PREFIX)
def import_albums(inbox): prescan_timestamp = timestamp.now() error_count = 0 album_count = 0 seen_fp = {} db = database.Database(LIBRARY_DB) try: for alb in inbox.albums(): alb.drop_payloads() album_count += 1 print "#%d" % album_count, print (u'"%s"' % alb.title()).encode("utf-8"), if alb.tags(): print "(%s)" % ", ".join(alb.tags()) else: print duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): print "Compilation" for i, au in enumerate(alb.all_au_files): print " %02d:" % (i + 1,), print unicode(au.mutagen_id3["TPE1"]).encode("utf-8") else: print alb.artist_name().encode("utf-8") print "%d tracks / %d minutes" % (len(alb.all_au_files), int(duration_ms / 60000)) print "ID=%015x" % alb.album_id sys.stdout.flush() # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: print "***** ERROR: DUPLICATE TRACK WITHIN IMPORT" print "This one is at %s" % au.path print "Other one is at %s" % seen_fp[au.fingerprint].path collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: print "***** ERROR: TRACK ALREADY IN LIBRARY" print unicode(fp_au_file.mutagen_id3).encode("utf-8") collision = True break seen_fp[au.fingerprint] = au if collision: sys.stdout.flush() error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xFF, prescan_timestamp) try: alb.standardize() print "OK!\n" except (import_file.ImportFileError, album.AlbumError), ex: print "***** IMPORT ERROR" print "***** %s\n" % str(ex) error_count += 1 sys.stdout.flush() except analyzer.InvalidFileError, ex: print "***** INVALID FILE ERROR" print "***** %s\n" % str(ex) error_count += 1
print "-" * 40 print "Found %d albums" % album_count if error_count > 0: print "Saw %d errors" % error_count return False print "No errors found" if dry_run: print "Dry run --- terminating" return True txn = None for alb in inbox.albums(): if txn is None: txn = import_transaction.ImportTransaction( db, VOLUME_NUMBER, timestamp.now(), LIBRARY_TMP_PREFIX, dry_run=dry_run ) txn.add_album(alb) # If the transaction has grown too large, commit it. if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT: txn.commit(LIBRARY_PREFIX) txn = None # Flush out any remaining tracks. if txn: txn.commit(LIBRARY_PREFIX) return True def main(): print if dry_run:
def import_albums(self, inbox): prescan_timestamp = timestamp.now() # timestamp to be referenced by push step ImportTimeStamp.import_time_stamp = timestamp.now() Messages.add_message( 'Import time stamp set: %s' % ImportTimeStamp.import_time_stamp, 'warning') error_count = 0 album_count = 0 seen_fp = {} albums = [] transaction = [] db = database.Database(LIBRARY_DB) dirs = inbox._dirs for path in sorted(dirs): try: albs = album.from_directory(path) except analyzer.InvalidFileError, ex: album_message = "<br>***** INVALID FILE ERROR<br>" album_message += "<br>%s" % str(ex) Messages.add_message(album_message, 'error') error_count += 1 albums.append({ 'path': path, 'title': 'There was an error at %s' % path, 'error': True }) continue for alb in albs: # generate response album_path = path album_response = album_to_json(alb, album_path) # initialize error state # import process will halt if an error is seen album_error = False alb.drop_payloads() album_count += 1 # start album_message album_message = (u'"%s"<br>' % alb.title()).encode("utf-8") if alb.tags(): album_message += "(%s)" % ", ".join(alb.tags()) duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): album_message += "Compilation<br>" for i, au in enumerate(alb.all_au_files): album_message += " %02d:" % (i + 1, ) try: album_message += unicode( au.mutagen_id3["TPE1"]).encode("utf-8") except UnicodeDecodeError, e: album_message += "<br>***** Encoding ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True else: album_message += alb.artist_name().encode("utf-8") album_message += "<br>%d tracks / %d minutes<br>" % (len( alb.all_au_files), int(duration_ms / 60000)) album_message += "ID=%015x<br>" % alb.album_id # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: album_message += "<br>***** ERROR: DUPLICATE TRACK WITHIN IMPORT<br>" collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: album_message += "<br>***** ERROR: TRACK ALREADY IN LIBRARY" collision = True break seen_fp[au.fingerprint] = au if collision: album_error = True error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() except (import_file.ImportFileError, album.AlbumError), ex: album_message += "<br>***** IMPORT ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True
Messages.add_message(message, 'error') # return albums with errors attached # halt import before data is commited return albums message += "No errors found." Messages.add_message(message, 'success') Messages.add_message("Beginning import.", 'success') txn = None for alb in transaction: if txn is None: txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER, timestamp.now(), LIBRARY_TMP_PREFIX, dry_run=False) txn.add_album(alb) # If the transaction has grown too large, commit it. if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT: txn.commit(LIBRARY_PREFIX) txn = None message = "%s OK!" % alb.title() Messages.add_message(message, 'success') # Flush out any remaining tracks. if txn: txn.commit(LIBRARY_PREFIX)
def import_albums(inbox): prescan_timestamp = timestamp.now() error_count = 0 album_count = 0 seen_fp = {} db = database.Database(LIBRARY_DB) try: for alb in inbox.albums(): alb.drop_payloads() album_count += 1 print "#%d" % album_count, print (u'"%s"' % alb.title()).encode("utf-8"), if alb.tags(): print "(%s)" % ", ".join(alb.tags()) else: print duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): print "Compilation" for i, au in enumerate(alb.all_au_files): print " %02d:" % (i+1,), print unicode(au.mutagen_id3["TPE1"]).encode("utf-8") else: print alb.artist_name().encode("utf-8") print "%d tracks / %d minutes" % ( len(alb.all_au_files), int(duration_ms / 60000)) print "ID=%015x" % alb.album_id sys.stdout.flush() # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: print "***** ERROR: DUPLICATE TRACK WITHIN IMPORT" print "This one is at %s" % au.path print "Other one is at %s" % seen_fp[au.fingerprint].path collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: print "***** ERROR: TRACK ALREADY IN LIBRARY" print unicode(fp_au_file.mutagen_id3).encode("utf-8") collision = True break seen_fp[au.fingerprint] = au if collision: sys.stdout.flush() error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() print "OK!\n" except (import_file.ImportFileError, album.AlbumError), ex: print "***** IMPORT ERROR" print "***** %s\n" % str(ex) error_count += 1 sys.stdout.flush() except analyzer.InvalidFileError, ex: print "***** INVALID FILE ERROR" print "***** %s\n" % str(ex) error_count += 1
def import_albums(dry_run): inbox = dropbox.Dropbox() prescan_timestamp = timestamp.now() error_count = 0 album_count = 0 seen_fp = {} db = database.Database(LIBRARY_DB) try: for alb in inbox.albums(): alb.drop_payloads() album_count += 1 cprint(u'#{num} "{title}"'.format(num=album_count, title=alb.title())) if alb.tags(): cprint("(%s)" % ", ".join(alb.tags())) else: print duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): cprint("Compilation") for i, au in enumerate(alb.all_au_files): artist = unicode(au.mutagen_id3["TPE1"]) cprint(" {:02d}: {}".format(i+1, artist)) else: cprint(alb.artist_name()) cprint("{} tracks / {} minutes".format( len(alb.all_au_files), int(duration_ms / 60000))) cprint("ID=%015x" % alb.album_id) sys.stdout.flush() # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: cprint("***** ERROR: DUPLICATE TRACK WITHIN IMPORT", type='error') cprint("This one is at %s" % au.path) cprint("Other one is at %s" % seen_fp[au.fingerprint].path) collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: cprint("***** ERROR: TRACK ALREADY IN LIBRARY", type='error') cprint(unicode(fp_au_file.mutagen_id3)) collision = True break seen_fp[au.fingerprint] = au if collision: sys.stdout.flush() error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() cprint("OK!\n") except (import_file.ImportFileError, album.AlbumError), ex: cprint("***** IMPORT ERROR") cprint("***** %s\n" % str(ex)) error_count += 1 sys.stdout.flush() yield # scanned an album except analyzer.InvalidFileError as ex: cprint("***** INVALID FILE ERROR", type='error') cprint("***** %s\n" % str(ex), type='error') error_count += 1 cprint("-" * 40) cprint("Found %d albums" % album_count) if error_count > 0: cprint("Saw %d errors" % error_count, type='failure') return cprint("No errors found") if dry_run: cprint("Dry run --- terminating", type='success') return txn = None for alb in inbox.albums(): if txn is None: txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER, timestamp.now(), LIBRARY_TMP_PREFIX, dry_run=dry_run) txn.add_album(alb) # If the transaction has grown too large, commit it. if txn.total_size_in_bytes > IMPORT_SIZE_LIMIT: txn.commit(LIBRARY_PREFIX) txn = None yield # import an album # Flush out any remaining tracks. if txn: txn.commit(LIBRARY_PREFIX) return