Exemplo n.º 1
0
 def process_wikidata(self,wikidata_url,item_id):
     item=wikidata_url.split('/')[4]
     path="/wiki/Special:EntityData/"+item+".rdf"
     log.info('WIKIDATA: fetching the folowing url wikidata.org%s' % path)
     self.xmlws.get('www.wikidata.org', 443, path,
                    partial(self.parse_wikidata_response, item,item_id),
                             xml=True, priority=False, important=False)
Exemplo n.º 2
0
 def add_files(self, filenames, target=None):
     """Add files to the tagger."""
     ignoreregex = None
     pattern = config.setting['ignore_regex']
     if pattern:
         ignoreregex = re.compile(pattern)
     ignore_hidden = config.setting["ignore_hidden_files"]
     new_files = []
     for filename in filenames:
         filename = os.path.normpath(os.path.realpath(filename))
         if ignore_hidden and is_hidden(filename):
             log.debug("File ignored (hidden): %r" % (filename))
             continue
         # Ignore .smbdelete* files which Applie iOS SMB creates by renaming a file when it cannot delete it
         if os.path.basename(filename).startswith(".smbdelete"):
             log.debug("File ignored (.smbdelete): %r", filename)
             continue
         if ignoreregex is not None and ignoreregex.search(filename):
             log.info("File ignored (matching %r): %r" %
                      (pattern, filename))
             continue
         if filename not in self.files:
             file = open_file(filename)
             if file:
                 self.files[filename] = file
                 new_files.append(file)
     if new_files:
         log.debug("Adding files %r", new_files)
         new_files.sort(key=lambda x: x.filename)
         if target is None or target is self.unclustered_files:
             self.unclustered_files.add_files(new_files)
             target = None
         for file in new_files:
             file.load(partial(self._file_loaded, target=target))
Exemplo n.º 3
0
    def process_album_tags(self):
        """
        this is called after all last.fm data is received to process the
        collected data for album tags.
        """
        log.info(
            '>>> process album tags: %s - %s',
            self.metadata['albumartist'],
            self.metadata['album'],
        )
        if settings.DEBUG_STATS_ALBUM:
            self.print_toptag_stats('album', 'album', len(self.tracks))
            self.print_toptag_stats('album', 'all_artist')
            self.print_toptag_stats('album', 'all_track')

        # get complete, balanced, sorted list (high first) of tags:
        album_weight = settings.CONFIG['album']['weight']['album'] * len(
            self.tracks)
        all_track_weight = settings.CONFIG['album']['weight']['all_track']
        all_artist_weight = settings.CONFIG['album']['weight']['all_artist']
        # album tag score gets multiplied by the total number of tracks
        # in the release to even out weight of all_* tags before merging
        all_tags = apply_tag_weight(
            (self.toptags['album'], album_weight),
            (self.toptags['all_track'], all_track_weight),
            (self.toptags['all_artist'], all_artist_weight))

        self.filter_and_set_metadata('album', all_tags)
Exemplo n.º 4
0
Arquivo: plugin.py Projeto: zas/picard
 def load_plugindir(self, plugindir):
     plugindir = os.path.normpath(plugindir)
     if not os.path.isdir(plugindir):
         log.info("Plugin directory %r doesn't exist", plugindir)
         return
     # first, handle eventual plugin updates
     for updatepath in [os.path.join(plugindir, file) for file in
                        os.listdir(plugindir) if file.endswith('.update')]:
         path = os.path.splitext(updatepath)[0]
         name = is_zip(path)
         if not name:
             name = _plugin_name_from_path(path)
         if name:
             self._remove_plugin(name)
             os.rename(updatepath, path)
             log.debug('Updating plugin %r (%r))', name, path)
         else:
             log.error('Cannot get plugin name from %r', updatepath)
     # now load found plugins
     names = set()
     for path in [os.path.join(plugindir, file) for file in os.listdir(plugindir)]:
         name = is_zip(path)
         if not name:
             name = _plugin_name_from_path(path)
         if name:
             names.add(name)
     log.debug("Looking for plugins in directory %r, %d names found",
               plugindir,
               len(names))
     for name in sorted(names):
         try:
             self.load_plugin(name, plugindir)
         except Exception as e:
             log.error('Unable to load plugin: %s.\nError occured: %s', name, e)
Exemplo n.º 5
0
 def load_plugindir(self, plugindir):
     plugindir = os.path.normpath(plugindir)
     if not os.path.isdir(plugindir):
         log.info("Plugin directory %r doesn't exist", plugindir)
         return
     # first, handle eventual plugin updates
     for updatepath in [
             os.path.join(plugindir, file) for file in os.listdir(plugindir)
             if file.endswith('.update')
     ]:
         path = os.path.splitext(updatepath)[0]
         name = is_zip(path)
         if not name:
             name = _plugin_name_from_path(path)
         if name:
             self.remove_plugin(name)
             os.rename(updatepath, path)
             log.debug('Updating plugin %r (%r))', name, path)
         else:
             log.error('Cannot get plugin name from %r', updatepath)
     # now load found plugins
     names = set()
     for path in [
             os.path.join(plugindir, file) for file in os.listdir(plugindir)
     ]:
         name = is_zip(path)
         if not name:
             name = _plugin_name_from_path(path)
         if name:
             names.add(name)
     log.debug("Looking for plugins in directory %r, %d names found",
               plugindir, len(names))
     for name in sorted(names):
         self.load_plugin(name, plugindir)
Exemplo n.º 6
0
 def add_files(self, filenames, target=None):
     """Add files to the tagger."""
     ignoreregex = None
     pattern = config.setting['ignore_regex']
     if pattern:
         ignoreregex = re.compile(pattern)
     ignore_hidden = not config.persist["show_hidden_files"]
     new_files = []
     for filename in filenames:
         filename = os.path.normpath(os.path.realpath(filename))
         if ignore_hidden and is_hidden_path(filename):
             log.debug("File ignored (hidden): %s" % (filename))
             continue
         if ignoreregex is not None and ignoreregex.search(filename):
             log.info("File ignored (matching %s): %s" % (pattern, filename))
             continue
         if filename not in self.files:
             file = open_file(filename)
             if file:
                 self.files[filename] = file
                 new_files.append(file)
     if new_files:
         log.debug("Adding files %r", new_files)
         new_files.sort(key=lambda x: x.filename)
         if target is None or target is self.unmatched_files:
             self.unmatched_files.add_files(new_files)
             target = None
         for file in new_files:
             file.load(partial(self._file_loaded, target=target))
Exemplo n.º 7
0
 def load(self):
     if self._requests:
         log.info("Not reloading, some requests are still active.")
         return
     self.tagger.window.set_statusbar_message('Loading album %s...', self.id)
     self.loaded = False
     if self.release_group:
         self.release_group.loaded = False
         self.release_group.folksonomy_tags.clear()
     self.metadata.clear()
     self.folksonomy_tags.clear()
     self.metadata['album'] = _("[loading album information]")
     self.update()
     self._new_metadata = Metadata()
     self._new_tracks = []
     self._requests = 1
     require_authentication = False
     inc = ['release-groups', 'media', 'recordings', 'artist-credits',
            'artists', 'aliases', 'labels', 'isrcs', 'collections']
     if config.setting['release_ars'] or config.setting['track_ars']:
         inc += ['artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels']
         if config.setting['track_ars']:
             inc += ['recording-level-rels', 'work-level-rels']
     if config.setting['folksonomy_tags']:
         if config.setting['only_my_tags']:
             require_authentication = True
             inc += ['user-tags']
         else:
             inc += ['tags']
     if config.setting['enable_ratings']:
         require_authentication = True
         inc += ['user-ratings']
     self.load_task = self.tagger.xmlws.get_release_by_id(
         self.id, self._release_request_finished, inc=inc,
         mblogin=require_authentication)
Exemplo n.º 8
0
    def start(self):
        if self.server:
            self.stop()

        config = get_config()
        if config.setting["browser_integration_localhost_only"]:
            host_address = '127.0.0.1'
        else:
            host_address = '0.0.0.0'  # nosec

        for port in range(config.setting["browser_integration_port"], 65535):
            try:
                self.server = ThreadingHTTPServer((host_address, port),
                                                  RequestHandler)
            except OSError:
                continue
            log.info("Starting the browser integration (%s:%d)", host_address,
                     port)
            self.listen_port_changed.emit(port)
            threading.Thread(target=self.server.serve_forever).start()
            break
        else:
            log.error(
                "Failed finding an available port for the browser integration."
            )
            self.stop()
Exemplo n.º 9
0
 def load_plugins_from_directory(self, plugindir):
     plugindir = os.path.normpath(plugindir)
     if not os.path.isdir(plugindir):
         log.info("Plugin directory %r doesn't exist", plugindir)
         return
     if plugindir == self.plugins_directory:
         # .update trick is only for plugins installed through the Picard UI
         # and only for plugins in plugins_directory (USER_PLUGIN_DIR by default)
         self.handle_plugin_updates()
     # now load found plugins
     names = set()
     for path in [
             os.path.join(plugindir, file) for file in os.listdir(plugindir)
     ]:
         name = _plugin_name_from_path(path)
         if name:
             names.add(name)
     log.debug("Looking for plugins in directory %r, %d names found",
               plugindir, len(names))
     for name in sorted(names):
         try:
             self._load_plugin_from_directory(name, plugindir)
         except Exception:
             self.plugin_error(name,
                               _("Unable to load plugin '%s'"),
                               name,
                               log_func=log.exception)
Exemplo n.º 10
0
Arquivo: tagger.py Projeto: phw/picard
 def add_files(self, filenames, target=None):
     """Add files to the tagger."""
     ignoreregex = None
     pattern = config.setting['ignore_regex']
     if pattern:
         ignoreregex = re.compile(pattern)
     ignore_hidden = config.setting["ignore_hidden_files"]
     new_files = []
     for filename in filenames:
         filename = os.path.normpath(os.path.realpath(filename))
         if ignore_hidden and is_hidden(filename):
             log.debug("File ignored (hidden): %r" % (filename))
             continue
         # Ignore .smbdelete* files which Applie iOS SMB creates by renaming a file when it cannot delete it
         if os.path.basename(filename).startswith(".smbdelete"):
             log.debug("File ignored (.smbdelete): %r", filename)
             continue
         if ignoreregex is not None and ignoreregex.search(filename):
             log.info("File ignored (matching %r): %r" % (pattern, filename))
             continue
         if filename not in self.files:
             file = open_file(filename)
             if file:
                 self.files[filename] = file
                 new_files.append(file)
     if new_files:
         log.debug("Adding files %r", new_files)
         new_files.sort(key=lambda x: x.filename)
         if target is None or target is self.unclustered_files:
             self.unclustered_files.add_files(new_files)
             target = None
         for file in new_files:
             file.load(partial(self._file_loaded, target=target))
Exemplo n.º 11
0
 def add_files(self, filenames, target=None):
     """Add files to the tagger."""
     ignoreregex = None
     pattern = config.setting['ignore_regex']
     if pattern:
         ignoreregex = re.compile(pattern)
     ignore_hidden = config.setting["ignore_hidden_files"]
     new_files = []
     for filename in filenames:
         filename = os.path.normpath(os.path.realpath(filename))
         if ignore_hidden and is_hidden(filename):
             log.debug("File ignored (hidden): %r" % (filename))
             continue
         if ignoreregex is not None and ignoreregex.search(filename):
             log.info("File ignored (matching %r): %r" %
                      (pattern, filename))
             continue
         if filename not in self.files:
             file = open_file(filename)
             if file:
                 self.files[filename] = file
                 new_files.append(file)
     if new_files:
         log.debug("Adding files %r", new_files)
         new_files.sort(key=lambda x: x.filename)
         if target is None or target is self.unmatched_files:
             self.unmatched_files.add_files(new_files)
             target = None
         for file in new_files:
             file.load(partial(self._file_loaded, target=target))
Exemplo n.º 12
0
    def load(self, priority=False, refresh=False):
        if self._requests:
            log.info("Not reloading, some requests are still active.")
            return
        self.tagger.window.set_statusbar_message(
            N_('Loading album %(id)s ...'), {'id': self.id})
        self.loaded = False
        self.status = AlbumStatus.LOADING
        if self.release_group:
            self.release_group.loaded = False
            self.release_group.genres.clear()
        self.metadata.clear()
        self.genres.clear()
        self.update(update_selection=False)
        self._new_metadata = Metadata()
        self._new_tracks = []
        self._requests = 1
        self.clear_errors()
        config = get_config()
        require_authentication = False
        inc = {
            'aliases',
            'annotation',
            'artist-credits',
            'artists',
            'collections',
            'discids',
            'isrcs',
            'labels',
            'media',
            'recordings',
            'release-groups',
        }
        if self.tagger.webservice.oauth_manager.is_authorized():
            require_authentication = True
            inc |= {'user-collections'}
        if config.setting['release_ars'] or config.setting['track_ars']:
            inc |= {
                'artist-rels', 'recording-rels', 'release-rels', 'url-rels',
                'work-rels'
            }
            if config.setting['track_ars']:
                inc |= {
                    'recording-level-rels',
                    'work-level-rels',
                }
        require_authentication = self.set_genre_inc_params(
            inc, config) or require_authentication
        if config.setting['enable_ratings']:
            require_authentication = True
            inc |= {'user-ratings'}

        self.load_task = self.tagger.mb_api.get_release_by_id(
            self.id,
            self._release_request_finished,
            inc=tuple(inc),
            mblogin=require_authentication,
            priority=priority,
            refresh=refresh)
Exemplo n.º 13
0
    def extract_discnumber(self, metadata):
        """
        >>> from picard.metadata import Metadata
        >>> m = Metadata()
        >>> AddClusterAsRelease().extract_discnumber(m)
        0
        >>> m["discnumber"] = "boop"
        >>> AddClusterAsRelease().extract_discnumber(m)
        0
        >>> m["discnumber"] = "1"
        >>> AddClusterAsRelease().extract_discnumber(m)
        0
        >>> m["discnumber"] = 1
        >>> AddClusterAsRelease().extract_discnumber(m)
        0
        >>> m["discnumber"] = -1
        >>> AddClusterAsRelease().extract_discnumber(m)
        0
        >>> m["discnumber"] = "1/1"
        >>> AddClusterAsRelease().extract_discnumber(m)
        0
        >>> m["discnumber"] = "2/2"
        >>> AddClusterAsRelease().extract_discnumber(m)
        1
        >>> a = AddClusterAsRelease()
        >>> m["discnumber"] = "-2/2"
        >>> a.extract_discnumber(m)
        0
        >>> m["discnumber"] = "-1/4"
        >>> a.extract_discnumber(m)
        1
        >>> m["discnumber"] = "1/4"
        >>> a.extract_discnumber(m)
        3

        """
        # As per https://musicbrainz.org/doc/Development/Release_Editor_Seeding#Tracklists_data
        # the medium numbers ("m") must be starting with 0.
        # Maybe the existing tags don't have disc numbers in them or
        # they're starting with something smaller than or equal to 0, so try
        # to produce a sane disc number.
        try:
            discnumber = metadata.get("discnumber", "1")
            # Split off any totaldiscs information
            discnumber = discnumber.split("/", 1)[0]
            m = int(discnumber)
            if m <= 0:
                # A disc number was smaller than or equal to 0 - all other
                # disc numbers need to be changed to accommodate that.
                self.discnumber_shift = max(self.discnumber_shift, 0 - m)
            m = m + self.discnumber_shift
        except ValueError as e:
            # The most likely reason for an exception at this point is because
            # the disc number in the tags was not a number. Just log the
            # exception and assume the medium number is 0.
            log.info("Trying to get the disc number of %s caused the following error: %s; assuming 0",
                     metadata["~filename"], e)
            m = 0
        return m
Exemplo n.º 14
0
 def debug(self, debug):
     self._debug = debug
     if debug:
         log.debug_mode(True)
         log.debug("Debug mode on")
     else:
         log.info("Debug mode off")
         log.debug_mode(False)
Exemplo n.º 15
0
 def print_toptag_stats(self, scope, name, correction=1):
     toptags = self.toptags[name]
     weight = settings.CONFIG[scope]['weight'][name]
     log.info("got %s %s tags", len(toptags), name)
     if toptags:
         log.info("applying weight x%s:", weight)
         merged = apply_tag_weight((toptags, correction))[:10]
         self.print_toplist(merged)
def extra_relationships(tagger, metadata, release, track):
    recording_id = metadata['musicbrainz_recordingid']
    recording_live_string = get_recording_live_string(recording_id)
    if recording_live_string:
        old_comment = metadata.get('~recordingcomment', None)
        log.info("%s: recording id %s -> %s (%s)", PLUGIN_NAME, recording_id,
                 recording_live_string, old_comment)
        metadata.add_unique('~recordinglivecomment', recording_live_string)
Exemplo n.º 17
0
 def _backup_settings(self):
     if Version(0, 0, 0) < self._version < PICARD_VERSION:
         backup_path = self._versioned_config_filename()
         log.info('Backing up config file to %s', backup_path)
         try:
             shutil.copyfile(self.fileName(), backup_path)
         except OSError:
             log.error('Failed backing up config file to %s', backup_path)
Exemplo n.º 18
0
 def _log_tags(self, tags, message, limit=5):
     log.info('%s: %s tag(s) %s:', self, len(tags), message)
     log.info(
         '%s: %s%s',
         self,
         ', '.join(['{} ({})'.format(t, s) for t, s in tags][:limit]),
         ', ...' if len(tags) > limit else '',
     )
Exemplo n.º 19
0
 def stop(self):
     if self.server:
         log.info("Stopping the browser integration")
         self.server.shutdown()
         self.server.server_close()
         self.server = None
         self.listen_port_changed.emit(self.port)
     else:
         log.debug("Browser integration inactive, no need to stop")
 def make_album_vars(self, mbz_tagger, metadata, release):
     try:
         mbz_id = release['id']
     except (KeyError, TypeError, ValueError, AttributeError):
         mbz_id = 'N/A'
     if metadata['script'].lower() == 'jpan':
         self.make_vars(mbz_tagger, metadata, release, 'album')
     else:
         log.info('%s: Script is not Japanese, skipping release ID "%s"',
                  PLUGIN_NAME, mbz_id)
Exemplo n.º 21
0
def get_release_url(release):
    try:
        for relation_list in release.relation_list:
            if relation_list.target_type == 'Url':
                for relation in relation_list.relation:
                    if relation.target.startswith(discogs_release_url):
                        return relation.target
    except AttributeError:
        log.info('Error retrieving release discogs url')
        pass
Exemplo n.º 22
0
def track_metadata_processor(album, metadata, track_node, release_node):
    """
    Determine track metadata using track and artist last.fm tags
    """
    log.info('received track metadata trigger')
    lfmws = LastFMTagger(album, metadata, release_node)
    lfmws.before_finalize.append(lfmws.process_track_tags)

    lfmws.request_track_toptags()
    lfmws.request_artist_toptags()
Exemplo n.º 23
0
    def process_release(self, album, metadata, release):
        self.ws = album.tagger.webservice
        self.log = album.log
        item_id = dict.get(metadata, 'musicbrainz_releasegroupid')[0]

        log.info('WIKIDATA: Processing release group %s ' % item_id)
        self.process_request(metadata, album, item_id, type='release-group')
        for artist in dict.get(metadata, 'musicbrainz_albumartistid'):
            item_id = artist
            log.info('WIKIDATA: Processing release artist %s' % item_id)
            self.process_request(metadata, album, item_id, type='artist')
Exemplo n.º 24
0
 def process_wikidata(self, wikidata_url, item_id):
     item = wikidata_url.split('/')[4]
     path = "/wiki/Special:EntityData/" + item + ".rdf"
     log.info('WIKIDATA: fetching the folowing url wikidata.org%s' % path)
     self.xmlws.get('www.wikidata.org',
                    443,
                    path,
                    partial(self.parse_wikidata_response, item, item_id),
                    xml=True,
                    priority=False,
                    important=False)
Exemplo n.º 25
0
    def process_release(self,tagger, metadata, release):
	    
        self.xmlws=tagger.tagger.xmlws
        self.log=tagger.log
        item_id = dict.get(metadata,'musicbrainz_releasegroupid')[0]
        
        log.info('WIKIDATA: processing release group %s ' % item_id)
        self.process_request(metadata,tagger,item_id,type='release-group')
        for artist in dict.get(metadata,'musicbrainz_albumartistid'):
            item_id=artist
            log.info('WIKIDATA: processing release artist %s' % item_id)
            self.process_request(metadata,tagger,item_id,type='artist')
Exemplo n.º 26
0
    def print_toplist(merged):
        def p(score):
            return int(float(score) / float(topscore) * 100.0)

        try:
            topscore = merged[0][1]
            toplist = [
                "{0}: {1} ({2}%)".format(n, s, p(s)) for n, s in merged[:10]
            ]
            log.info(", ".join(toplist))
        except:
            log.info("None")
Exemplo n.º 27
0
def album_metadata_processor(album, metadata, release_node):
    """
    Determine album metadata using album and all artist and all track last.fm
    tags in the album.
    """
    log.info('received album metadata trigger')
    lfmws = LastFMTagger(album, metadata, release_node)
    lfmws.before_finalize.append(lfmws.process_album_tags)

    lfmws.request_album_toptags()
    lfmws.request_all_track_toptags()
    lfmws.request_all_artist_toptags()
Exemplo n.º 28
0
    def process_release(self, tagger, metadata, release):

        self.xmlws = tagger.tagger.xmlws
        self.log = tagger.log
        item_id = dict.get(metadata, 'musicbrainz_releasegroupid')[0]

        log.info('WIKIDATA: processing release group %s ' % item_id)
        self.process_request(metadata, tagger, item_id, type='release-group')
        for artist in dict.get(metadata, 'musicbrainz_albumartistid'):
            item_id = artist
            log.info('WIKIDATA: processing release artist %s' % item_id)
            self.process_request(metadata, tagger, item_id, type='artist')
Exemplo n.º 29
0
 def stop(self):
     if self.server:
         try:
             log.info("Stopping the browser integration")
             self.server.shutdown()
             self.server.server_close()
             self.server = None
             self.listen_port_changed.emit(self.port)
         except Exception:
             log.error("Failed stopping the browser integration",
                       exc_info=True)
     else:
         log.debug("Browser integration inactive, no need to stop")
Exemplo n.º 30
0
    def filter_tags(self, all_tags):
        # exclude tags not relevant for this category
        tags = self._filter_by_searchlist(all_tags)

        # exclude tags below the threshold
        if tags:
            self._log_tags(tags, 'before threshold filter')
            tags = self._filter_by_threshold(tags)
            self._log_tags(tags, 'filtered')
        else:
            log.info('%s: no tags', self)

        return tags
Exemplo n.º 31
0
    def process_request(self, metadata, tagger, item_id, type):
        self.lock.acquire()
        log.debug('WIKIDATA: Looking up cache for item  %s' % item_id)
        log.debug('WIKIDATA: requests %s' % tagger._requests)
        log.debug('WIKIDATA: TYPE %s' % type)
        if item_id in self.cache.keys():
            log.info('WIKIDATA: found in cache')
            genre_list = self.cache.get(item_id)
            new_genre = set(metadata.getall("genre"))
            new_genre.update(genre_list)
            metadata["genre"] = list(new_genre)

            if tagger._requests == 0:
                tagger._finalize_loading(None)
            self.lock.release()
            return
        else:
            # pending requests are handled by adding the metadata object to a list of things to be updated when the genre is found
            if item_id in self.requests.keys():
                log.debug(
                    'WIKIDATA: request already pending, add it to the list of items to update once this has been found'
                )
                self.requests[item_id].append(metadata)

                tagger._requests += 1
                self.taggers[item_id].append(tagger)
                self.lock.release()
                return
            self.requests[item_id] = [metadata]
            tagger._requests += 1
            self.taggers[item_id] = [tagger]
            log.debug('WIKIDATA: first request for this item')

            self.lock.release()
            log.info('WIKIDATA: about to call musicbrainz to look up %s ' %
                     item_id)
            # find the wikidata url if this exists
            host = config.setting["server_host"]
            port = config.setting["server_port"]

            path = '/ws/2/%s/%s' % (type, item_id)
            queryargs = {"inc": "url-rels"}
            self.xmlws.get(host,
                           port,
                           path,
                           partial(self.musicbrainz_release_lookup, item_id,
                                   metadata),
                           xml=True,
                           priority=False,
                           important=False,
                           queryargs=queryargs)
Exemplo n.º 32
0
 def add_files(self, filenames, target=None):
     """Add files to the tagger."""
     ignoreregex = None
     config = get_config()
     pattern = config.setting['ignore_regex']
     if pattern:
         try:
             ignoreregex = re.compile(pattern)
         except re.error as e:
             log.error(
                 "Failed evaluating regular expression for ignore_regex: %s",
                 e)
     ignore_hidden = config.setting["ignore_hidden_files"]
     new_files = []
     for filename in filenames:
         filename = normpath(filename)
         if ignore_hidden and is_hidden(filename):
             log.debug("File ignored (hidden): %r" % (filename))
             continue
         # Ignore .smbdelete* files which Applie iOS SMB creates by renaming a file when it cannot delete it
         if os.path.basename(filename).startswith(".smbdelete"):
             log.debug("File ignored (.smbdelete): %r", filename)
             continue
         if ignoreregex is not None and ignoreregex.search(filename):
             log.info("File ignored (matching %r): %r" %
                      (pattern, filename))
             continue
         if filename not in self.files:
             file = open_file(filename)
             if file:
                 self.files[filename] = file
                 new_files.append(file)
             QtCore.QCoreApplication.processEvents()
     if new_files:
         log.debug("Adding files %r", new_files)
         new_files.sort(key=lambda x: x.filename)
         self.window.set_sorting(False)
         self._pending_files_count += len(new_files)
         unmatched_files = []
         for i, file in enumerate(new_files):
             file.load(
                 partial(self._file_loaded,
                         target=target,
                         unmatched_files=unmatched_files))
             # Calling processEvents helps processing the _file_loaded
             # callbacks in between, which keeps the UI more responsive.
             # Avoid calling it to often to not slow down the loading to much
             # Using an uneven number to have the unclustered file counter
             # not look stuck in certain digits.
             if i % 17 == 0:
                 QtCore.QCoreApplication.processEvents()
Exemplo n.º 33
0
def open(url):
    try:
        webbrowser.open(url)
    except webbrowser.Error as e:
        QtWidgets.QMessageBox.critical(None, _("Web Browser Error"), _("Error while launching a web browser:\n\n%s") % (e,))
    except TypeError as e:
        if version_info.major == 3 and version_info.minor == 7 and version_info.micro == 0:
            # See https://bugs.python.org/issue31014, webbrowser.open doesn't
            # work on 3.7.0 the first time it's called. The initialization code
            # in it will be skipped after the first call, making it possibly to
            # use it, although it might not accurately identify the users
            # preferred browser.
            log.info("Working around https://bugs.python.org/issue31014 - URLs might not be opened in the correct browser")
            webbrowser.open(url)
Exemplo n.º 34
0
 def send2beets(self, album):
     """
     Locates path for album and send to `beet import` command.
     """
     res = True
     log.info('beet import "{}"'.format(album))
     album_filenames = album.tagger.get_files_from_objects([album])
     albumpaths = set()
     for track in album.tracks:
         trackno = track.metadata['tracknumber']
         discno = track.metadata['discnumber']
         track_file = None
         for album_file in album_filenames:
             if (str(album_file.tracknumber) == trackno
                     and str(album_file.discnumber) == discno):
                 track_file = album_file.filename
                 break
         # log.info(u'  track "{}"'.format(track_file))
         path = os.path.dirname(track_file)
         # log.info(u'  path "{}"'.format(path))
         albumpaths.add(os.path.abspath(path))
     for path in albumpaths:
         log.info(u'album path: {}'.format(path))
         commandlist = [u'gnome-terminal', u'--', u'beet', 'import', path]
         log.info(u'Launching: {}'.format(u" ".join(commandlist)))
         try:
             res = res and (subprocess.run(commandlist) == 0)
         except AttributeError:  # Python2
             res = res and (subprocess.call(commandlist) == 0)
         log.info(u'Resultado: {}'.format(res))
     return res
Exemplo n.º 35
0
 def load(self, priority=False, refresh=False):
     if self._requests:
         log.info("Not reloading, some requests are still active.")
         return
     self.tagger.window.set_statusbar_message(N_("Loading album %(id)s ..."), {"id": self.id})
     self.loaded = False
     self.status = _("[loading album information]")
     if self.release_group:
         self.release_group.loaded = False
         self.release_group.folksonomy_tags.clear()
     self.metadata.clear()
     self.folksonomy_tags.clear()
     self.update()
     self._new_metadata = Metadata()
     self._new_tracks = []
     self._requests = 1
     self.errors = []
     require_authentication = False
     inc = [
         "release-groups",
         "media",
         "recordings",
         "artist-credits",
         "artists",
         "aliases",
         "labels",
         "isrcs",
         "collections",
     ]
     if config.setting["release_ars"] or config.setting["track_ars"]:
         inc += ["artist-rels", "release-rels", "url-rels", "recording-rels", "work-rels"]
         if config.setting["track_ars"]:
             inc += ["recording-level-rels", "work-level-rels"]
     if config.setting["folksonomy_tags"]:
         if config.setting["only_my_tags"]:
             require_authentication = True
             inc += ["user-tags"]
         else:
             inc += ["tags"]
     if config.setting["enable_ratings"]:
         require_authentication = True
         inc += ["user-ratings"]
     self.load_task = self.tagger.xmlws.get_release_by_id(
         self.id,
         self._release_request_finished,
         inc=inc,
         mblogin=require_authentication,
         priority=priority,
         refresh=refresh,
     )
Exemplo n.º 36
0
 def process_wikidata(self, wikidata_url, item_id):
     with self.lock:
         for album in self.albums[item_id]:
             album._requests += 1
     item = wikidata_url.split('/')[4]
     path = "/wiki/Special:EntityData/" + item + ".rdf"
     log.info('WIKIDATA: fetching the folowing url wikidata.org%s' % path)
     self.ws.get('www.wikidata.org',
                 443,
                 path,
                 partial(self.parse_wikidata_response, item, item_id),
                 parse_response_type="xml",
                 priority=False,
                 important=False)
Exemplo n.º 37
0
    def musicbrainz_release_lookup(self, item_id, metadata, response, reply,
                                   error):
        found = False
        if error:
            log.info('WIKIDATA: error retrieving release group info')
        else:
            if 'metadata' in response.children:
                if 'release_group' in response.metadata[0].children:
                    if 'relation_list' in response.metadata[0].release_group[
                            0].children:
                        for relation in response.metadata[0].release_group[
                                0].relation_list[0].relation:
                            if relation.type == 'wikidata' and 'target' in relation.children:
                                found = True
                                wikidata_url = relation.target[0].text
                                item_id = item_id
                                self.process_wikidata(wikidata_url, item_id)
                if 'artist' in response.metadata[0].children:
                    if 'relation_list' in response.metadata[0].artist[
                            0].children:
                        for relation in response.metadata[0].artist[
                                0].relation_list[0].relation:
                            if relation.type == 'wikidata' and 'target' in relation.children:
                                found = True
                                wikidata_url = relation.target[0].text
                                item_id = item_id
                                self.process_wikidata(wikidata_url, item_id)

                if 'work' in response.metadata[0].children:
                    if 'relation_list' in response.metadata[0].work[
                            0].children:
                        for relation in response.metadata[0].work[
                                0].relation_list[0].relation:
                            if relation.type == 'wikidata' and 'target' in relation.children:
                                found = True
                                wikidata_url = relation.target[0].text
                                item_id = item_id
                                self.process_wikidata(wikidata_url, item_id)
        if not found:
            log.info('WIKIDATA: no wikidata url')
            self.lock.acquire()
            for tagger in self.taggers[item_id]:
                tagger._requests -= 1
                if tagger._requests == 0:
                    tagger._finalize_loading(None)
                log.debug('WIKIDATA:  TOTAL REMAINING REQUESTS %s' %
                          tagger._requests)
            del self.requests[item_id]
            self.lock.release()
Exemplo n.º 38
0
 def load(self, priority=False, refresh=False):
     if self._requests:
         log.info("Not reloading, some requests are still active.")
         return
     self.tagger.window.set_statusbar_message(
         N_('Loading album %(id)s ...'), {'id': self.id})
     self.loaded = False
     self.status = _("[loading album information]")
     if self.release_group:
         self.release_group.loaded = False
         self.release_group.folksonomy_tags.clear()
     self.metadata.clear()
     self.folksonomy_tags.clear()
     self.update()
     self._new_metadata = Metadata()
     self._new_tracks = []
     self._requests = 1
     self.errors = []
     require_authentication = False
     inc = [
         'release-groups', 'media', 'recordings', 'artist-credits',
         'artists', 'aliases', 'labels', 'isrcs', 'collections'
     ]
     if config.setting['release_ars'] or config.setting['track_ars']:
         inc += [
             'artist-rels', 'release-rels', 'url-rels', 'recording-rels',
             'work-rels'
         ]
         if config.setting['track_ars']:
             inc += ['recording-level-rels', 'work-level-rels']
     if config.setting['folksonomy_tags']:
         if config.setting['only_my_tags']:
             require_authentication = True
             inc += ['user-tags']
         else:
             inc += ['tags']
     if config.setting['enable_ratings']:
         require_authentication = True
         inc += ['user-ratings']
     self.load_task = self.tagger.xmlws.get_release_by_id(
         self.id,
         self._release_request_finished,
         inc=inc,
         mblogin=require_authentication,
         priority=priority,
         refresh=refresh)
Exemplo n.º 39
0
 def musicbrainz_release_lookup(self, item_id, metadata, response, reply,
                                error):
     found = False
     if error:
         log.info('WIKIDATA: error retrieving release group info')
     else:
         if 'metadata' in response.children:
             if 'release_group' in response.metadata[0].children:
                 if 'relation_list' in response.metadata[0].release_group[
                         0].children:
                     for relation in response.metadata[0].release_group[
                             0].relation_list[0].relation:
                         if relation.type == 'wikidata' and 'target' in relation.children:
                             found = True
                             wikidata_url = relation.target[0].text
                             self.process_wikidata(wikidata_url, item_id)
             if 'artist' in response.metadata[0].children:
                 if 'relation_list' in response.metadata[0].artist[
                         0].children:
                     for relation in response.metadata[0].artist[
                             0].relation_list[0].relation:
                         if relation.type == 'wikidata' and 'target' in relation.children:
                             found = True
                             wikidata_url = relation.target[0].text
                             self.process_wikidata(wikidata_url, item_id)
             if 'work' in response.metadata[0].children:
                 if 'relation_list' in response.metadata[0].work[
                         0].children:
                     for relation in response.metadata[0].work[
                             0].relation_list[0].relation:
                         if relation.type == 'wikidata' and 'target' in relation.children:
                             found = True
                             wikidata_url = relation.target[0].text
                             self.process_wikidata(wikidata_url, item_id)
     if not found:
         log.info('WIKIDATA: no wikidata url found for item_id: %s ',
                  item_id)
     with self.lock:
         for album in self.albums[item_id]:
             album._requests -= 1
             log.debug('WIKIDATA:  TOTAL REMAINING REQUESTS %s' %
                       album._requests)
             if not album._requests:
                 self.albums[item_id].remove(album)
                 album._finalize_loading(None)
Exemplo n.º 40
0
 def process_request(self,metadata,tagger,item_id,type):
     self.lock.acquire()
     log.debug('WIKIDATA: Looking up cache for item  %s' % item_id)
     log.debug('WIKIDATA: requests %s'  % tagger._requests)
     log.debug('WIKIDATA: TYPE %s'  % type)
     if item_id in self.cache.keys():
         log.info('WIKIDATA: found in cache')
         genre_list=self.cache.get(item_id);
         metadata["genre"] = genre_list
         
         if tagger._requests==0:
             tagger._finalize_loading(None)
         self.lock.release()
         return
     else:
         # pending requests are handled by adding the metadata object to a list of things to be updated when the genre is found
         if item_id in self.requests.keys():
             log.debug('WIKIDATA: request already pending, add it to the list of items to update once this has been found')
             self.requests[item_id].append(metadata)
             
             tagger._requests += 1
             self.taggers[item_id].append(tagger)
             self.lock.release()
             return
         self.requests[item_id]=[metadata]
         tagger._requests += 1
         self.taggers[item_id]=[tagger]
         log.debug('WIKIDATA: first request for this item')
         
         self.lock.release()
         log.info('WIKIDATA: about to call musicbrainz to look up %s ' % item_id)
         # find the wikidata url if this exists
         host = config.setting["server_host"]
         port = config.setting["server_port"]
         
         
         path = '/ws/2/%s/%s' % (type,item_id)
         queryargs = {"inc": "url-rels"}
         self.xmlws.get(host, port, path,
                       partial(self.musicbrainz_release_lookup, item_id,metadata),
                                xml=True, priority=False, important=False,queryargs=queryargs)
Exemplo n.º 41
0
 def musicbrainz_release_lookup(self,item_id,metadata, response, reply, error):
     found=False;
     if error:
         log.info('WIKIDATA: error retrieving release group info')
     else:
         if 'metadata' in response.children:
             if 'release_group' in response.metadata[0].children:
                 if 'relation_list' in response.metadata[0].release_group[0].children:
                     for relation in response.metadata[0].release_group[0].relation_list[0].relation:
                         if relation.type == 'wikidata' and 'target' in relation.children:
                             found=True
                             wikidata_url=relation.target[0].text
                             item_id=item_id
                             self.process_wikidata(wikidata_url,item_id)
             if 'artist' in response.metadata[0].children:
                 if 'relation_list' in response.metadata[0].artist[0].children:
                     for relation in response.metadata[0].artist[0].relation_list[0].relation:
                         if relation.type == 'wikidata' and 'target' in relation.children:
                             found=True
                             wikidata_url=relation.target[0].text
                             item_id=item_id
                             self.process_wikidata(wikidata_url,item_id)
                             
             if 'work' in response.metadata[0].children:
                 if 'relation_list' in response.metadata[0].work[0].children:
                     for relation in response.metadata[0].work[0].relation_list[0].relation:
                         if relation.type == 'wikidata' and 'target' in relation.children:
                             found=True
                             wikidata_url=relation.target[0].text
                             item_id=item_id
                             self.process_wikidata(wikidata_url,item_id)
     if not found:
         log.info('WIKIDATA: no wikidata url')
         self.lock.acquire()
         for tagger in self.taggers[item_id]:
             tagger._requests -= 1
             if tagger._requests==0: 
                 tagger._finalize_loading(None)
             log.debug('WIKIDATA:  TOTAL REMAINING REQUESTS %s' % tagger._requests)
         self.lock.release()
Exemplo n.º 42
0
    def load_plugin(self, name, plugindir):
        self.log.debug("Loading plugin %r", name)
        try:
            info = imp.find_module(name, [plugindir])
        except ImportError:
            log.error("Failed loading plugin %r", name)
            return None

        plugin = None
        try:
            index = None
            for i, p in enumerate(self.plugins):
                if name == p.module_name:
                    _unregister_module_extensions(name)
                    index = i
                    break
            plugin_module = imp.load_module("picard.plugins." + name, *info)
            plugin = PluginWrapper(plugin_module, plugindir)
            for version in list(plugin.api_versions):
                for api_version in picard.api_versions:
                    if api_version.startswith(version):
                        plugin.compatible = True
                        setattr(picard.plugins, name, plugin_module)
                        if index:
                            self.plugins[index] = plugin
                        else:
                            self.plugins.append(plugin)
                        break
                else:
                    continue
                break
            else:
                log.info("Plugin '%s' from '%s' is not compatible"
                    " with this version of Picard." % (plugin.name, plugin.file))
        except:
            log.error(traceback.format_exc())
        if info[0] is not None:
            info[0].close()
        return plugin
Exemplo n.º 43
0
 def load_plugins_from_directory(self, plugindir):
     plugindir = os.path.normpath(plugindir)
     if not os.path.isdir(plugindir):
         log.info("Plugin directory %r doesn't exist", plugindir)
         return
     if plugindir == self.plugins_directory:
         # .update trick is only for plugins installed through the Picard UI
         # and only for plugins in plugins_directory (USER_PLUGIN_DIR by default)
         self.handle_plugin_updates()
     # now load found plugins
     names = set()
     for path in [os.path.join(plugindir, file) for file in os.listdir(plugindir)]:
         name = _plugin_name_from_path(path)
         if name:
             names.add(name)
     log.debug("Looking for plugins in directory %r, %d names found",
               plugindir,
               len(names))
     for name in sorted(names):
         try:
             self._load_plugin_from_directory(name, plugindir)
         except Exception as e:
             log.error("Unable to load plugin '%s': %s", name, e)
Exemplo n.º 44
0
 def load(self, priority=False, refresh=False):
     if self._requests:
         log.info("Not reloading, some requests are still active.")
         return
     self.tagger.window.set_statusbar_message(
         N_('Loading album %(id)s ...'),
         {'id': self.id}
     )
     self.loaded = False
     self.status = _("[loading album information]")
     if self.release_group:
         self.release_group.loaded = False
         self.release_group.genres.clear()
     self.metadata.clear()
     self.genres.clear()
     self.update()
     self._new_metadata = Metadata()
     self._new_tracks = []
     self._requests = 1
     self.errors = []
     require_authentication = False
     inc = ['release-groups', 'media', 'discids', 'recordings', 'artist-credits',
            'artists', 'aliases', 'labels', 'isrcs', 'collections']
     if self.tagger.webservice.oauth_manager.is_authorized():
         require_authentication = True
         inc += ['user-collections']
     if config.setting['release_ars'] or config.setting['track_ars']:
         inc += ['artist-rels', 'release-rels', 'url-rels', 'recording-rels', 'work-rels']
         if config.setting['track_ars']:
             inc += ['recording-level-rels', 'work-level-rels']
     require_authentication = self.set_genre_inc_params(inc) or require_authentication
     if config.setting['enable_ratings']:
         require_authentication = True
         inc += ['user-ratings']
     self.load_task = self.tagger.mb_api.get_release_by_id(
         self.id, self._release_request_finished, inc=inc,
         mblogin=require_authentication, priority=priority, refresh=refresh)
Exemplo n.º 45
0
 def process_track(self, album, metadata, trackXmlNode, releaseXmlNode):
     self.xmlws=album.tagger.xmlws
     self.log=album.log
     tagger=album
     
     item_id = dict.get(metadata,'musicbrainz_releasegroupid')[0]		
     log.debug('WIKIDATA: looking up release metadata for %s ' % item_id)
     self.process_request(metadata,tagger,item_id,type='release-group')
     
     for artist in dict.get(metadata,'musicbrainz_albumartistid'):
         item_id=artist
         log.info('WIKIDATA: processing release artist %s' % item_id)
         self.process_request(metadata,tagger,item_id,type='artist')
     
     for artist in dict.get(metadata,'musicbrainz_artistid'):
         item_id=artist
         log.info('WIKIDATA: processing track artist %s' % item_id)
         self.process_request(metadata,tagger,item_id,type='artist')
     
     if 'musicbrainz_workid' in metadata:
         for workid in dict.get(metadata,'musicbrainz_workid'):
             item_id=workid
             log.info('WIKIDATA: processing track artist %s' % item_id)
             self.process_request(metadata,tagger,item_id,type='work')
Exemplo n.º 46
0
def upgrade_to_v1_3_0_dev_2():
    if "preserved_tags" in _s:
        _s["preserved_tags"] = re.sub(r"\s+", ",", _s["preserved_tags"].strip())
        log.info(_('Config upgrade: option "preserved_tags" is now using '
                    'comma instead of spaces as tag separator (PICARD-536).'))
Exemplo n.º 47
0
    def __init__(self, args, localedir, autoupdate, debug=False):
        QtGui.QApplication.__init__(self, args)
        self.__class__.__instance = self

        self._args = args
        self._autoupdate = autoupdate

        # Initialize threading and allocate threads
        self.thread_pool = thread.ThreadPool(self)

        self.load_queue = queue.Queue()
        self.save_queue = queue.Queue()
        self.analyze_queue = queue.Queue()
        self.other_queue = queue.Queue()

        threads = self.thread_pool.threads
        for i in range(4):
            threads.append(thread.Thread(self.thread_pool, self.load_queue))
        threads.append(thread.Thread(self.thread_pool, self.save_queue))
        threads.append(thread.Thread(self.thread_pool, self.other_queue))
        threads.append(thread.Thread(self.thread_pool, self.other_queue))
        threads.append(thread.Thread(self.thread_pool, self.analyze_queue))

        self.thread_pool.start()
        self.stopping = False

        # Setup logging
        log._log_debug_messages = debug or "PICARD_DEBUG" in os.environ
        log.debug("Starting Picard %s from %r", picard.__version__, os.path.abspath(__file__))

        # TODO remove this before the final release
        if sys.platform == "win32":
            olduserdir = "~\\Local Settings\\Application Data\\MusicBrainz Picard"
        else:
            olduserdir = "~/.picard"
        olduserdir = os.path.expanduser(olduserdir)
        if os.path.isdir(olduserdir):
            log.info("Moving %s to %s", olduserdir, USER_DIR)
            try:
                shutil.move(olduserdir, USER_DIR)
            except:
                pass

        # for compatibility with pre-1.3 plugins
        QtCore.QObject.tagger = self
        QtCore.QObject.config = config
        QtCore.QObject.log = log

        check_io_encoding()

        setup_gettext(localedir, config.setting["ui_language"], log.debug)

        self.xmlws = XmlWebService()

        load_user_collections()

        # Initialize fingerprinting
        self._acoustid = acoustid.AcoustIDClient()
        self._acoustid.init()

        # Load plugins
        self.pluginmanager = PluginManager()
        if hasattr(sys, "frozen"):
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(sys.argv[0]), "plugins"))
        else:
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(__file__), "plugins"))

        if not os.path.exists(USER_PLUGIN_DIR):
            os.makedirs(USER_PLUGIN_DIR)
        self.pluginmanager.load_plugindir(USER_PLUGIN_DIR)

        self.acoustidmanager = AcoustIDManager()
        self.browser_integration = BrowserIntegration()

        self.files = {}
        self.clusters = ClusterList()
        self.albums = {}
        self.release_groups = {}
        self.mbid_redirects = {}
        self.unmatched_files = UnmatchedFiles()
        self.nats = None
        self.window = MainWindow()

        def remove_va_file_naming_format(merge=True):
            if merge:
                config.setting["file_naming_format"] = \
                    "$if($eq(%compilation%,1),\n$noop(Various Artist albums)\n"+\
                    "%s,\n$noop(Single Artist Albums)\n%s)" %\
                    (config.setting["va_file_naming_format"].toString(),
                     config.setting["file_naming_format"])
            config.setting.remove("va_file_naming_format")
            config.setting.remove("use_va_format")

        if "va_file_naming_format" in config.setting\
                and "use_va_format" in config.setting:
            if config.setting["use_va_format"].toBool():
                remove_va_file_naming_format()
                self.window.show_va_removal_notice()
            elif config.setting["va_file_naming_format"].toString() !=\
                r"$if2(%albumartist%,%artist%)/%album%/$if($gt(%totaldiscs%,1),%discnumber%-,)$num(%tracknumber%,2) %artist% - %title%":
                    if self.window.confirm_va_removal():
                        remove_va_file_naming_format(merge=False)
                    else:
                        remove_va_file_naming_format()
            else:
                # default format, disabled
                remove_va_file_naming_format(merge=False)
Exemplo n.º 48
0
    def __init__(self, args, localedir, autoupdate, debug=False):
        QtGui.QApplication.__init__(self, args)
        self.__class__.__instance = self

        self._args = args
        self._autoupdate = autoupdate
        self._debug = False

        # FIXME: Figure out what's wrong with QThreadPool.globalInstance().
        # It's a valid reference, but its start() method doesn't work.
        self.thread_pool = QtCore.QThreadPool(self)

        # Use a separate thread pool for file saving, with a thread count of 1,
        # to avoid race conditions in File._save_and_rename.
        self.save_thread_pool = QtCore.QThreadPool(self)
        self.save_thread_pool.setMaxThreadCount(1)

        if not sys.platform == "win32":
            # Set up signal handling
            # It's not possible to call all available functions from signal
            # handlers, therefore we need to set up a QSocketNotifier to listen
            # on a socket. Sending data through a socket can be done in a
            # signal handler, so we use the socket to notify the application of
            # the signal.
            # This code is adopted from
            # https://qt-project.org/doc/qt-4.8/unix-signals.html

            # To not make the socket module a requirement for the Windows
            # installer, import it here and not globally
            import socket
            self.signalfd = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM, 0)

            self.signalnotifier = QtCore.QSocketNotifier(self.signalfd[1].fileno(),
                                                         QtCore.QSocketNotifier.Read, self)
            self.signalnotifier.activated.connect(self.sighandler)

            signal.signal(signal.SIGHUP, self.signal)
            signal.signal(signal.SIGINT, self.signal)
            signal.signal(signal.SIGTERM, self.signal)

        # Setup logging
        self.debug(debug or "PICARD_DEBUG" in os.environ)
        log.debug("Starting Picard %s from %r", picard.__version__, os.path.abspath(__file__))
        log.debug("Platform: %s %s %s", platform.platform(),
                  platform.python_implementation(), platform.python_version())
        if config.storage_type == config.REGISTRY_PATH:
            log.debug("Configuration registry path: %s", config.storage)
        else:
            log.debug("Configuration file path: %s", config.storage)

        # TODO remove this before the final release
        if sys.platform == "win32":
            olduserdir = "~\\Local Settings\\Application Data\\MusicBrainz Picard"
        else:
            olduserdir = "~/.picard"
        olduserdir = os.path.expanduser(olduserdir)
        if os.path.isdir(olduserdir):
            log.info("Moving %s to %s", olduserdir, USER_DIR)
            try:
                shutil.move(olduserdir, USER_DIR)
            except:
                pass
        log.debug("User directory: %s", os.path.abspath(USER_DIR))

        # for compatibility with pre-1.3 plugins
        QtCore.QObject.tagger = self
        QtCore.QObject.config = config
        QtCore.QObject.log = log

        check_io_encoding()

        # Must be before config upgrade because upgrade dialogs need to be
        # translated
        setup_gettext(localedir, config.setting["ui_language"], log.debug)

        upgrade_config()

        self.xmlws = XmlWebService()

        load_user_collections()

        # Initialize fingerprinting
        self._acoustid = acoustid.AcoustIDClient()
        self._acoustid.init()

        # Load plugins
        self.pluginmanager = PluginManager()
        if hasattr(sys, "frozen"):
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(sys.argv[0]), "plugins"))
        else:
            mydir = os.path.dirname(os.path.abspath(__file__))
            self.pluginmanager.load_plugindir(os.path.join(mydir, "plugins"))
            self.pluginmanager.load_plugindir(os.path.join(mydir, os.pardir, "contrib", "plugins"))

        if not os.path.exists(USER_PLUGIN_DIR):
            os.makedirs(USER_PLUGIN_DIR)
        self.pluginmanager.load_plugindir(USER_PLUGIN_DIR)

        self.acoustidmanager = AcoustIDManager()
        self.browser_integration = BrowserIntegration()

        self.files = {}
        self.clusters = ClusterList()
        self.albums = {}
        self.release_groups = {}
        self.mbid_redirects = {}
        self.unmatched_files = UnmatchedFiles()
        self.nats = None
        self.window = MainWindow()
        self.exit_cleanup = []
Exemplo n.º 49
0
def upgrade_to_v1_3_0_dev_1():
    if "windows_compatible_filenames" in _s:
        _s["windows_compatibility"] = _s["windows_compatible_filenames"]
        _s.remove("windows_compatible_filenames")
        log.info(_('Config upgrade: option "windows_compatible_filenames" '
                    ' was renamed "windows_compatibility" (PICARD-110).'))
Exemplo n.º 50
0
    def __init__(self, args, localedir, autoupdate, debug=False):
        QtGui.QApplication.__init__(self, args)
        self.__class__.__instance = self

        self._args = args
        self._autoupdate = autoupdate

        # Initialize threading and allocate threads
        self.thread_pool = thread.ThreadPool(self)

        self.load_queue = queue.Queue()
        self.save_queue = queue.Queue()
        self.analyze_queue = queue.Queue()
        self.other_queue = queue.Queue()

        threads = self.thread_pool.threads
        for i in range(4):
            threads.append(thread.Thread(self.thread_pool, self.load_queue))
        threads.append(thread.Thread(self.thread_pool, self.save_queue))
        threads.append(thread.Thread(self.thread_pool, self.other_queue))
        threads.append(thread.Thread(self.thread_pool, self.other_queue))
        threads.append(thread.Thread(self.thread_pool, self.analyze_queue))

        self.thread_pool.start()
        self.stopping = False

        # Setup logging
        if debug or "PICARD_DEBUG" in os.environ:
            log.log_levels = log.log_levels|log.LOG_DEBUG
        log.debug("Starting Picard %s from %r", picard.__version__, os.path.abspath(__file__))

        # TODO remove this before the final release
        if sys.platform == "win32":
            olduserdir = "~\\Local Settings\\Application Data\\MusicBrainz Picard"
        else:
            olduserdir = "~/.picard"
        olduserdir = os.path.expanduser(olduserdir)
        if os.path.isdir(olduserdir):
            log.info("Moving %s to %s", olduserdir, USER_DIR)
            try:
                shutil.move(olduserdir, USER_DIR)
            except:
                pass

        # for compatibility with pre-1.3 plugins
        QtCore.QObject.tagger = self
        QtCore.QObject.config = config
        QtCore.QObject.log = log

        check_io_encoding()

        self._upgrade_config()

        setup_gettext(localedir, config.setting["ui_language"], log.debug)

        self.xmlws = XmlWebService()

        load_user_collections()

        # Initialize fingerprinting
        self._acoustid = acoustid.AcoustIDClient()
        self._acoustid.init()

        # Load plugins
        self.pluginmanager = PluginManager()
        if hasattr(sys, "frozen"):
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(sys.argv[0]), "plugins"))
        else:
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(__file__), "plugins"))

        if not os.path.exists(USER_PLUGIN_DIR):
            os.makedirs(USER_PLUGIN_DIR)
        self.pluginmanager.load_plugindir(USER_PLUGIN_DIR)

        self.acoustidmanager = AcoustIDManager()
        self.browser_integration = BrowserIntegration()

        self.files = {}
        self.clusters = ClusterList()
        self.albums = {}
        self.release_groups = {}
        self.mbid_redirects = {}
        self.unmatched_files = UnmatchedFiles()
        self.nats = None
        self.window = MainWindow()
Exemplo n.º 51
0
    def __init__(self, args, localedir, autoupdate, debug=False):
        QtGui.QApplication.__init__(self, args)
        self.__class__.__instance = self

        self._args = args
        self._autoupdate = autoupdate

        # FIXME: Figure out what's wrong with QThreadPool.globalInstance().
        # It's a valid reference, but its start() method doesn't work.
        self.thread_pool = QtCore.QThreadPool(self)

        # Use a separate thread pool for file saving, with a thread count of 1,
        # to avoid race conditions in File._save_and_rename.
        self.save_thread_pool = QtCore.QThreadPool(self)
        self.save_thread_pool.setMaxThreadCount(1)

        # Setup logging
        if debug or "PICARD_DEBUG" in os.environ:
            log.log_levels = log.log_levels|log.LOG_DEBUG
        log.debug("Starting Picard %s from %r", picard.__version__, os.path.abspath(__file__))

        # TODO remove this before the final release
        if sys.platform == "win32":
            olduserdir = "~\\Local Settings\\Application Data\\MusicBrainz Picard"
        else:
            olduserdir = "~/.picard"
        olduserdir = os.path.expanduser(olduserdir)
        if os.path.isdir(olduserdir):
            log.info("Moving %s to %s", olduserdir, USER_DIR)
            try:
                shutil.move(olduserdir, USER_DIR)
            except:
                pass

        # for compatibility with pre-1.3 plugins
        QtCore.QObject.tagger = self
        QtCore.QObject.config = config
        QtCore.QObject.log = log

        check_io_encoding()

        self._upgrade_config()

        setup_gettext(localedir, config.setting["ui_language"], log.debug)

        self.xmlws = XmlWebService()

        load_user_collections()

        # Initialize fingerprinting
        self._acoustid = acoustid.AcoustIDClient()
        self._acoustid.init()

        # Load plugins
        self.pluginmanager = PluginManager()
        if hasattr(sys, "frozen"):
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(sys.argv[0]), "plugins"))
        else:
            self.pluginmanager.load_plugindir(os.path.join(os.path.dirname(__file__), "plugins"))

        if not os.path.exists(USER_PLUGIN_DIR):
            os.makedirs(USER_PLUGIN_DIR)
        self.pluginmanager.load_plugindir(USER_PLUGIN_DIR)

        self.acoustidmanager = AcoustIDManager()
        self.browser_integration = BrowserIntegration()

        self.files = {}
        self.clusters = ClusterList()
        self.albums = {}
        self.release_groups = {}
        self.mbid_redirects = {}
        self.unmatched_files = UnmatchedFiles()
        self.nats = None
        self.window = MainWindow()
Exemplo n.º 52
0
 def parse_wikidata_response(self,item,item_id, response, reply, error):
     genre_entries=[]
     genre_list=[]
     if error:
         log.error('WIKIDATA: error getting data from wikidata.org')
     else:
         if 'RDF' in response.children:
             node = response.RDF[0]
             for node1 in node.Description:
                 if 'about' in node1.attribs:
                     if node1.attribs.get('about') == 'http://www.wikidata.org/entity/%s' % item:
                         for key,val in node1.children.items():
                             if key=='P136':
                                 for i in val:
                                     if 'resource' in i.attribs:
                                         tmp=i.attribs.get('resource')
                                         if 'entity' ==tmp.split('/')[3] and len(tmp.split('/'))== 5:
                                             genre_id=tmp.split('/')[4]
                                             log.info('WIKIDATA: Found the wikidata id for the genre: %s' % genre_id)
                                             genre_entries.append(tmp)
                     else:
                         for tmp in genre_entries:
                             if tmp == node1.attribs.get('about'):
                                 list1=node1.children.get('name')
                                 for node2 in list1:
                                     if node2.attribs.get('lang')=='en':
                                         genre=node2.text
                                         genre_list.append(genre)
                                         log.debug('Our genre is: %s' % genre)
                                         
     self.lock.acquire()
     if len(genre_list) > 0:
         log.info('WiKIDATA: final list of wikidata id found: %s' % genre_entries)
         log.info('WIKIDATA: final list of genre: %s' % genre_list)
         
         log.debug('WIKIDATA: total items to update: %s ' % len(self.requests[item_id]))
         for metadata in self.requests[item_id]:
             new_genre=[]
             new_genre.append(metadata["genre"])
             for str in genre_list:
                 if str not in new_genre:
                     new_genre.append(str)
                     log.debug('WIKIDATA: appending genre %s' % str)
             metadata["genre"] = new_genre
             self.cache[item_id]=genre_list
             log.debug('WIKIDATA: setting genre : %s ' % genre_list)
             
     else:
         log.info('WIKIDATA: Genre not found in wikidata')
     
     log.info('WIKIDATA: Seeing if we can finalize tags %s  ' % len(self.taggers[item_id]))
     
     for tagger in self.taggers[item_id]:
         tagger._requests -= 1
         if tagger._requests==0:
             tagger._finalize_loading(None)
         log.info('WIKIDATA:  TOTAL REMAINING REQUESTS %s' % tagger._requests)
     self.lock.release()