def push_to_github(self): """ Push changes to the artist-whitelist to CHIRP Github git-dir and work-tree must be specified because we are operating outside of the repo directory. TODO: remove abs paths """ git_dir = '/home/musiclib/chirpradio-machine/.git' work_tree = '/home/musiclib/chirpradio-machine' # commit changes commit_command = 'git --git-dir=%s --work-tree=%s commit %s -m "Adding new artists"' % ( git_dir, work_tree, artists._WHITELIST_FILE, ) commit_output = subprocess.check_output(commit_command, shell=True, stderr=subprocess.STDOUT) Messages.add_message(commit_output, 'success') # push changes push_command = 'git --git-dir=%s --work-tree=%s push' % (git_dir, work_tree) push_output = subprocess.check_output(push_command, shell=True, stderr=subprocess.STDOUT) Messages.add_message(push_output, 'success')
def get(self): generate_nml_out = subprocess.check_output('do_generate_collection_nml', shell=True, stderr=subprocess.STDOUT) Messages.add_message(generate_nml_out, 'success') install = 'install -m 0775 -g traktor output.nml /mnt/disk_array/traktor/TraktorProRootDirectory/new-collection.nml' subprocess.call(install, shell=True, stderr=subprocess.STDOUT) current_route.CURRENT_ROUTE = 'push'
def do_push_artists(self): # patch credentials if not request.headers.get('Authorization'): abort(401) else: auth = request.headers['Authorization'].lstrip('Basic ') username, password = base64.b64decode(auth).split(':') if username and password: conf.CHIRPRADIO_AUTH = '%s %s' % (username, password) chirpradio.connect() else: abort(401) dry_run = False # reload artists from file artists._init() # Find all of the library artists all_library_artists = set(artists.all()) # Find all of the artists in the cloud. all_chirpradio_artists = set() mapped = 0 t1 = time.time() for art in models.Artist.fetch_all(): if art.revoked: continue std_name = artists.standardize(art.name) if std_name != art.name: #print "Mapping %d: %s => %s" % (mapped, art.name, std_name) mapped += 1 art.name = std_name idx = search.Indexer() idx._transaction = art.parent_key() idx.add_artist(art) if not dry_run: idx.save() all_chirpradio_artists.add(art.name) to_push = list(all_library_artists.difference(all_chirpradio_artists)) Messages.add_message("Pushing %d artists" % len(to_push), 'warning') while to_push: # Push the artists in batches of 50 this_push = to_push[:50] to_push = to_push[50:] idx = search.Indexer() for name in this_push: #print name art = models.Artist.create(parent=idx.transaction, name=name) idx.add_artist(art) if not dry_run: idx.save() #print "+++++ Indexer saved" Messages.add_message("Artist push complete. OK!", 'success')
def get(self): generate_nml_out = subprocess.check_output( 'do_generate_collection_nml', shell=True, stderr=subprocess.STDOUT) Messages.add_message(generate_nml_out, 'success') install = 'install -m 0775 -g traktor output.nml /mnt/disk_array/traktor/TraktorProRootDirectory/new-collection.nml' subprocess.call(install, shell=True, stderr=subprocess.STDOUT) current_route.CURRENT_ROUTE = 'push'
def dump_dropbox(self): drop = chirp.library.dropbox.Dropbox() result = [] for path in sorted(drop._dirs): try: chirp_albums = chirp.library.album.from_directory(path, fast=True) except (IOError, chirp.library.album.AlbumError), e: Messages.add_message('There was an error at %s.' % path, 'error') # propagate error to ui so the album may be removed result.append({'path': path, 'title': 'There was an error at %s' % path, 'error': True}) continue # build albums for album in chirp_albums: json = album_to_json(album, path) result.append(json)
def add_artists(self): error = False drop = dropbox.Dropbox() new_artists = set() for au_file in drop.tracks(): try: tpe1 = au_file.mutagen_id3["TPE1"].text[0] except: Messages.add_messaage('** file: %r' % au_file.path, 'error') error = True # TODO propagate error to client raise if artists.standardize(tpe1) is None: new_artists.add(tpe1) # do not write if errors if not error and new_artists: to_print = list(new_artists) to_print.extend(artists.all()) to_print.sort(key=artists.sort_key) output = codecs.open(artists._WHITELIST_FILE, "w", "utf-8") for tpe1 in to_print: output.write(tpe1) output.write("\n") output.close() # reload whitelist from file artists._init() message = "Artist whitelist updated.<br>New artists added:<br>" message += "<br>".join(list(new_artists)) Messages.add_message(message, 'success') # push to github self.push_to_github()
def dump_dropbox(self): drop = chirp.library.dropbox.Dropbox() result = [] for path in sorted(drop._dirs): try: chirp_albums = chirp.library.album.from_directory(path, fast=True) except (IOError, chirp.library.album.AlbumError), e: Messages.add_message('There was an error at %s.' % path, 'error') # propagate error to ui so the album may be removed result.append({ 'path': path, 'title': 'There was an error at %s' % path, 'error': True }) continue # build albums for album in chirp_albums: json = album_to_json(album, path) result.append(json)
class ScanDropbox(Resource): def dump_dropbox(self): drop = chirp.library.dropbox.Dropbox() result = [] for path in sorted(drop._dirs): try: chirp_albums = chirp.library.album.from_directory(path, fast=True) except (IOError, chirp.library.album.AlbumError), e: Messages.add_message('There was an error at %s.' % path, 'error') # propagate error to ui so the album may be removed result.append({ 'path': path, 'title': 'There was an error at %s' % path, 'error': True }) continue # build albums for album in chirp_albums: json = album_to_json(album, path) result.append(json) # check for new artists new_artists = [] for data in result: if not data.get('error'): if chirp.library.artists.standardize(data['artist']) is None: new_artists.append(data['artist']) data['warning'] = True if new_artists: Messages.add_message( 'New artists in dropbox: %s<br>' % '<br>'.join(sorted(set(new_artists))), 'warning') # only progress import process if there are albums in the dropbox if len(result) > 0: current_route.CURRENT_ROUTE = 'import' return result
def post(self): album_path = parser.parse_args()['path'] if not os.path.exists: Messages.add_message( 'Album at path %s not in dropbox.' % album_path, 'warning') # return True to remove the album detail from the UI return {'success': True} remove_from_dropbox = "/home/musiclib/.virtualenvs/chirpradio-machine/bin/remove_from_dropbox" exit_status = subprocess.call("sudo %s %s" % (remove_from_dropbox, album_path), shell=True) if exit_status == 0: Messages.add_message( 'Successfully removed album %s from dropbox.' % album_path, 'success') return {'success': True} else: Messages.add_message( 'Failed to remove album %s from dropbox, please remove manually' % album_path, 'error') return {'success': False}
def import_albums(self, inbox): prescan_timestamp = timestamp.now() # timestamp to be referenced by push step ImportTimeStamp.import_time_stamp = timestamp.now() Messages.add_message('Import time stamp set: %s' % ImportTimeStamp.import_time_stamp, 'warning') error_count = 0 album_count = 0 seen_fp = {} albums = [] transaction = [] db = database.Database(LIBRARY_DB) dirs = inbox._dirs for path in sorted(dirs): try: albs = album.from_directory(path) except analyzer.InvalidFileError, ex: album_message = "<br>***** INVALID FILE ERROR<br>" album_message += "<br>%s" % str(ex) Messages.add_message(album_message, 'error') error_count += 1 albums.append({'path': path, 'title': 'There was an error at %s' % path, 'error': True}) continue for alb in albs: # generate response album_path = path album_response = album_to_json(alb, album_path) # initialize error state # import process will halt if an error is seen album_error = False alb.drop_payloads() album_count += 1 # start album_message album_message = (u'"%s"<br>' % alb.title()).encode("utf-8") if alb.tags(): album_message += "(%s)" % ", ".join(alb.tags()) duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): album_message += "Compilation<br>" for i, au in enumerate(alb.all_au_files): album_message += " %02d:" % (i+1,) try: album_message += unicode(au.mutagen_id3["TPE1"]).encode("utf-8") except UnicodeDecodeError, e: album_message += "<br>***** Encoding ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True else: album_message += alb.artist_name().encode("utf-8") album_message += "<br>%d tracks / %d minutes<br>" % ( len(alb.all_au_files), int(duration_ms / 60000)) album_message += "ID=%015x<br>" % alb.album_id # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: album_message += "<br>***** ERROR: DUPLICATE TRACK WITHIN IMPORT<br>" collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: album_message += "<br>***** ERROR: TRACK ALREADY IN LIBRARY" collision = True break seen_fp[au.fingerprint] = au if collision: album_error = True error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() except (import_file.ImportFileError, album.AlbumError), ex: album_message += "<br>***** IMPORT ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True
Messages.add_message(album_message, 'error') else: Messages.add_message(album_message, 'success') albums.append(album_response) transaction.append(alb) if len(albums) == 0: current_route.CURRENT_ROUTE = 'dropbox' return None message = "----------<br>Found %d albums.<br>" % album_count if error_count > 0: message += "Saw %d errors" % error_count Messages.add_message(message, 'error') # return albums with errors attached # halt import before data is commited return albums message += "No errors found." Messages.add_message(message, 'success') Messages.add_message("Beginning import.", 'success') txn = None for alb in transaction: if txn is None: txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER, timestamp.now(), LIBRARY_TMP_PREFIX,
def do_push(self): # IMPORT_TIME_STAMP from import step START_TIMESTAMP = ImportTimeStamp.import_time_stamp # TODO(trow): Is this optimal? _NUM_ALBUMS_PER_FLUSH = 3 _DISC_NUM_RE = re.compile("disc\s+(\d+)", re.IGNORECASE) class UnknownArtistError(Exception): pass def get_artist_by_name(name): global _artist_cache if name in _artist_cache: return _artist_cache[name] while True: try: art = models.Artist.fetch_by_name(name) if art is None: raise UnknownArtistError("Unknown artist: %s" % name) _artist_cache[name] = art return art except urllib2.URLError: #print "Retrying fetch_by_name for '%s'" % name pass def seen_album(album_id): while True: try: for alb in models.Album.all().filter( "album_id =", album_id): if not alb.revoked: return True return False except urllib2.URLError: #print "Retrying fetch of album_id=%s" % album_id pass def process_one_album(idx, alb): # Build up an Album entity. kwargs = {} kwargs["parent"] = idx.transaction kwargs["title"] = alb.title() kwargs["album_id"] = alb.album_id kwargs["import_timestamp"] = datetime.datetime.utcfromtimestamp( alb.import_timestamp()) kwargs["num_tracks"] = len(alb.all_au_files) kwargs["import_tags"] = alb.tags() if alb.is_compilation(): kwargs["is_compilation"] = True else: kwargs["is_compilation"] = False kwargs["album_artist"] = get_artist_by_name(alb.artist_name()) #for key, val in sorted(kwargs.iteritems()): #print "%s: %s" % (key, val) if seen_album(alb.album_id): #print " Skipping" return album = models.Album(**kwargs) # Look for a disc number in the tags. for tag in kwargs["import_tags"]: m = _DISC_NUM_RE.search(tag) if m: album.disc_number = int(m.group(1)) break idx.add_album(album) for au_file in alb.all_au_files: track_title, import_tags = titles.split_tags(au_file.tit2()) track_num, _ = order.decode( unicode(au_file.mutagen_id3["TRCK"])) kwargs = {} if alb.is_compilation(): kwargs["track_artist"] = get_artist_by_name(au_file.tpe1()) track = models.Track( parent=idx.transaction, ufid=au_file.ufid(), album=album, title=track_title, import_tags=import_tags, track_num=track_num, sampling_rate_hz=au_file.mp3_header.sampling_rate_hz, bit_rate_kbps=int(au_file.mp3_header.bit_rate_kbps), channels=au_file.mp3_header.channels_str, duration_ms=au_file.duration_ms, **kwargs) idx.add_track(track) def flush(list_of_pending_albums): if not list_of_pending_albums: return idx = search.Indexer() for alb in list_of_pending_albums: process_one_album(idx, alb) # This runs as a batch job, so set a very long deadline. while True: try: rpc = db.create_rpc(deadline=120) idx.save(rpc=rpc) return except urllib2.URLError: #print "Retrying indexer flush" pass def maybe_flush(list_of_pending_albums): if len(list_of_pending_albums) < _NUM_ALBUMS_PER_FLUSH: return list_of_pending_albums flush(list_of_pending_albums) return [] # main #chirpradio.connect("10.0.1.98:8000") chirpradio.connect() Messages.add_message('Beginning to push albums.', 'warning') sql_db = database.Database(conf.LIBRARY_DB) pending_albums = [] this_album = [] # TODO(trow): Select the albums to import in a saner way. for vol, import_timestamp in sql_db.get_all_imports(): if START_TIMESTAMP is not None and import_timestamp < START_TIMESTAMP: continue #print "***" #print "*** import_timestamp = %s" % timestamp.get_human_readable( #import_timestamp) #print "***" for au_file in sql_db.get_by_import(vol, import_timestamp): if this_album and this_album[0].album_id != au_file.album_id: alb = album.Album(this_album) pending_albums.append(alb) Messages.add_message('Adding "%s"' % alb.title(), 'success') pending_albums = maybe_flush(pending_albums) this_album = [] this_album.append(au_file) # Add the last album to the list of pending albums, then do the # final flush. if this_album: alb = album.Album(this_album) Messages.add_message('Adding "%s"' % alb.title(), 'success') pending_albums.append(alb) this_album = [] flush(pending_albums) Messages.add_message('Album push complete. OK!', 'success') Messages.add_message('Import process complete. OK!', 'success') current_route.CURRENT_ROUTE = 'import'
def album_to_json(album, path): """ Takes a chirp library album and path. Returns a dict of album attributes """ error = False result = {} result['path'] = path try: result['title'] = (u'%s' % album.title()).encode('utf-8') # ensure tags album.tags() except UnicodeDecodeError: error = True try: result['compilation'] = album.is_compilation() except KeyError: error = True # if album error compilation doesn't matter result['compilation'] = False if result['compilation']: result['artist'] = 'Various Artists' else: try: result['artist'] = album.artist_name().encode('utf-8') # check encoding before data is commited unicode(result['artist']) except (KeyError, UnicodeDecodeError): error = True # build tracks result['tracks'] = [] for au_file in album.all_au_files: track = {} # extract track number try: track['number'] = re.search( '^[0-9]*', au_file.mutagen_id3['TRCK'].text[0]).group(0) except KeyError: error = True try: track['title'] = au_file.tit2().encode('utf-8') except (UnicodeDecodeError, AttributeError): error = True if result['compilation']: try: track['artist'] = unicode( au_file.mutagen_id3['TPE1']).encode('utf-8') except UnicodeDecodeError: error = True result['tracks'].append(track) if error: result['error'] = True Messages.add_message('There was an error at %s' % path, 'error') return result
def do_push(self): # IMPORT_TIME_STAMP from import step START_TIMESTAMP = ImportTimeStamp.import_time_stamp # TODO(trow): Is this optimal? _NUM_ALBUMS_PER_FLUSH = 3 _DISC_NUM_RE = re.compile("disc\s+(\d+)", re.IGNORECASE) class UnknownArtistError(Exception): pass def get_artist_by_name(name): global _artist_cache if name in _artist_cache: return _artist_cache[name] while True: try: art = models.Artist.fetch_by_name(name) if art is None: raise UnknownArtistError("Unknown artist: %s" % name) _artist_cache[name] = art return art except urllib2.URLError: #print "Retrying fetch_by_name for '%s'" % name pass def seen_album(album_id): while True: try: for alb in models.Album.all().filter("album_id =", album_id): if not alb.revoked: return True return False except urllib2.URLError: #print "Retrying fetch of album_id=%s" % album_id pass def process_one_album(idx, alb): # Build up an Album entity. kwargs = {} kwargs["parent"] = idx.transaction kwargs["title"] = alb.title() kwargs["album_id"] = alb.album_id kwargs["import_timestamp"] = datetime.datetime.utcfromtimestamp( alb.import_timestamp()) kwargs["num_tracks"] = len(alb.all_au_files) kwargs["import_tags"] = alb.tags() if alb.is_compilation(): kwargs["is_compilation"] = True else: kwargs["is_compilation"] = False kwargs["album_artist"] = get_artist_by_name(alb.artist_name()) #for key, val in sorted(kwargs.iteritems()): #print "%s: %s" % (key, val) if seen_album(alb.album_id): #print " Skipping" return album = models.Album(**kwargs) # Look for a disc number in the tags. for tag in kwargs["import_tags"]: m = _DISC_NUM_RE.search(tag) if m: album.disc_number = int(m.group(1)) break idx.add_album(album) for au_file in alb.all_au_files: track_title, import_tags = titles.split_tags(au_file.tit2()) track_num, _ = order.decode(unicode(au_file.mutagen_id3["TRCK"])) kwargs = {} if alb.is_compilation(): kwargs["track_artist"] = get_artist_by_name(au_file.tpe1()) track = models.Track( parent=idx.transaction, ufid=au_file.ufid(), album=album, title=track_title, import_tags=import_tags, track_num=track_num, sampling_rate_hz=au_file.mp3_header.sampling_rate_hz, bit_rate_kbps=int(au_file.mp3_header.bit_rate_kbps), channels=au_file.mp3_header.channels_str, duration_ms=au_file.duration_ms, **kwargs) idx.add_track(track) def flush(list_of_pending_albums): if not list_of_pending_albums: return idx = search.Indexer() for alb in list_of_pending_albums: process_one_album(idx, alb) # This runs as a batch job, so set a very long deadline. while True: try: rpc = db.create_rpc(deadline=120) idx.save(rpc=rpc) return except urllib2.URLError: #print "Retrying indexer flush" pass def maybe_flush(list_of_pending_albums): if len(list_of_pending_albums) < _NUM_ALBUMS_PER_FLUSH: return list_of_pending_albums flush(list_of_pending_albums) return [] # main #chirpradio.connect("10.0.1.98:8000") chirpradio.connect() Messages.add_message('Beginning to push albums.', 'warning') sql_db = database.Database(conf.LIBRARY_DB) pending_albums = [] this_album = [] # TODO(trow): Select the albums to import in a saner way. for vol, import_timestamp in sql_db.get_all_imports(): if START_TIMESTAMP is not None and import_timestamp < START_TIMESTAMP: continue #print "***" #print "*** import_timestamp = %s" % timestamp.get_human_readable( #import_timestamp) #print "***" for au_file in sql_db.get_by_import(vol, import_timestamp): if this_album and this_album[0].album_id != au_file.album_id: alb = album.Album(this_album) pending_albums.append(alb) Messages.add_message('Adding "%s"' % alb.title(), 'success') pending_albums = maybe_flush(pending_albums) this_album = [] this_album.append(au_file) # Add the last album to the list of pending albums, then do the # final flush. if this_album: alb = album.Album(this_album) Messages.add_message('Adding "%s"' % alb.title(), 'success') pending_albums.append(alb) this_album = [] flush(pending_albums) Messages.add_message('Album push complete. OK!', 'success') Messages.add_message('Import process complete. OK!', 'success') current_route.CURRENT_ROUTE = 'import'
def album_to_json(album, path): """ Takes a chirp library album and path. Returns a dict of album attributes """ error = False result = {} result['path'] = path try: result['title'] = (u'%s' % album.title()).encode('utf-8') # ensure tags album.tags() except UnicodeDecodeError: error = True try: result['compilation'] = album.is_compilation() except KeyError: error = True # if album error compilation doesn't matter result['compilation'] = False if result['compilation']: result['artist'] = 'Various Artists' else: try: result['artist'] = album.artist_name().encode('utf-8') # check encoding before data is commited unicode(result['artist']) except (KeyError, UnicodeDecodeError): error = True # build tracks result['tracks'] = [] for au_file in album.all_au_files: track = {} # extract track number try: track['number'] = re.search('^[0-9]*', au_file.mutagen_id3['TRCK'].text[0]).group(0) except KeyError: error = True try: track['title'] = au_file.tit2().encode('utf-8') except (UnicodeDecodeError, AttributeError): error = True if result['compilation']: try: track['artist'] = unicode(au_file.mutagen_id3['TPE1']).encode('utf-8') except UnicodeDecodeError: error = True result['tracks'].append(track) if error: result['error'] = True Messages.add_message('There was an error at %s' % path, 'error') return result
def import_albums(self, inbox): prescan_timestamp = timestamp.now() # timestamp to be referenced by push step ImportTimeStamp.import_time_stamp = timestamp.now() Messages.add_message( 'Import time stamp set: %s' % ImportTimeStamp.import_time_stamp, 'warning') error_count = 0 album_count = 0 seen_fp = {} albums = [] transaction = [] db = database.Database(LIBRARY_DB) dirs = inbox._dirs for path in sorted(dirs): try: albs = album.from_directory(path) except analyzer.InvalidFileError, ex: album_message = "<br>***** INVALID FILE ERROR<br>" album_message += "<br>%s" % str(ex) Messages.add_message(album_message, 'error') error_count += 1 albums.append({ 'path': path, 'title': 'There was an error at %s' % path, 'error': True }) continue for alb in albs: # generate response album_path = path album_response = album_to_json(alb, album_path) # initialize error state # import process will halt if an error is seen album_error = False alb.drop_payloads() album_count += 1 # start album_message album_message = (u'"%s"<br>' % alb.title()).encode("utf-8") if alb.tags(): album_message += "(%s)" % ", ".join(alb.tags()) duration_ms = sum(au.duration_ms for au in alb.all_au_files) if alb.is_compilation(): album_message += "Compilation<br>" for i, au in enumerate(alb.all_au_files): album_message += " %02d:" % (i + 1, ) try: album_message += unicode( au.mutagen_id3["TPE1"]).encode("utf-8") except UnicodeDecodeError, e: album_message += "<br>***** Encoding ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True else: album_message += alb.artist_name().encode("utf-8") album_message += "<br>%d tracks / %d minutes<br>" % (len( alb.all_au_files), int(duration_ms / 60000)) album_message += "ID=%015x<br>" % alb.album_id # Check that the album isn't already in library. collision = False for au in alb.all_au_files: if au.fingerprint in seen_fp: album_message += "<br>***** ERROR: DUPLICATE TRACK WITHIN IMPORT<br>" collision = True break fp_au_file = db.get_by_fingerprint(au.fingerprint) if fp_au_file is not None: album_message += "<br>***** ERROR: TRACK ALREADY IN LIBRARY" collision = True break seen_fp[au.fingerprint] = au if collision: album_error = True error_count += 1 # Attach a dummy volume # and timestamp alb.set_volume_and_import_timestamp(0xff, prescan_timestamp) try: alb.standardize() except (import_file.ImportFileError, album.AlbumError), ex: album_message += "<br>***** IMPORT ERROR<br>" album_message += "<br>%s" % str(ex) error_count += 1 album_error = True
Messages.add_message(album_message, 'error') else: Messages.add_message(album_message, 'success') albums.append(album_response) transaction.append(alb) if len(albums) == 0: current_route.CURRENT_ROUTE = 'dropbox' return None message = "----------<br>Found %d albums.<br>" % album_count if error_count > 0: message += "Saw %d errors" % error_count Messages.add_message(message, 'error') # return albums with errors attached # halt import before data is commited return albums message += "No errors found." Messages.add_message(message, 'success') Messages.add_message("Beginning import.", 'success') txn = None for alb in transaction: if txn is None: txn = import_transaction.ImportTransaction(db, VOLUME_NUMBER, timestamp.now(),