def getFilesStreamsUnicode(self, mtimes, oldmtimes, oldstreams, newsharedfiles, yieldcall = None): streams = {} shared = self.config.sections["transfers"]["shared"] virtual_dirs = [x[0] for x in shared] for directory in mtimes.keys(): virtualdir = self.real2virtual(directory) # force Unicode for reading from disk u_directory = u"%s" % directory str_directory = str(directory) if self.hiddenCheck(directory): continue if directory in oldmtimes and directory not in oldstreams: # Partial information, happened with unicode paths that N+ couldn't handle properly del oldmtimes[directory] if directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(u_directory): # No change try: streams[virtualdir] = oldstreams[virtualdir] continue except KeyError: log.addwarning("Inconsistent cache for '%s', rebuilding '%s'" % (virtualdir, directory)) else: print "2U. Dropping missing directory %s %s" % (type(u_directory), repr(u_directory)) continue streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir]) if yieldcall is not None: yieldcall() return streams
def getFilesList(self, mtimes, oldmtimes, oldlist, yieldcall = None, progress=None, rebuild=False): """ Get a list of files with their filelength and (if mp3) bitrate and track length in seconds """ list = {} count = 0 for directory in mtimes: directory = os.path.expanduser(directory) virtualdir = self.real2virtual(directory) count +=1 if progress: percent = float(count)/len(mtimes) if percent <= 1.0: gobject.idle_add(progress.set_fraction, percent) if self.hiddenCheck(directory): continue if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): try: list[virtualdir] = oldlist[virtualdir] continue except KeyError: log.addwarning("Inconsistent cache for '%s', rebuilding '%s'" % (virtualdir, directory)) else: print "Dropping removed directory %s" % directory continue list[virtualdir] = [] try: contents = os.listdir(directory) except OSError, errtuple: print str(errtuple) self.logMessage(str(errtuple)) continue contents.sort() for filename in contents: if self.hiddenCheck(filename): continue path = os.path.join(directory, filename) try: isfile = os.path.isfile(path) except OSError, errtuple: message = _("Scanning Error: %(error)s Path: %(path)s") % {'error':errtuple, 'path':path} print str(message) self.logMessage(message) displayTraceback(sys.exc_info()[2]) continue else: if isfile: # It's a file, check if it is mp3 or ogg data = self.getFileInfo(filename, path) if data is not None: list[virtualdir].append(data) if yieldcall is not None: yieldcall()
def writeConfiguration(self): self.config_lock.acquire() external_sections = [ "sharedfiles", "sharedfilesstreams", "wordindex", "fileindex", "sharedmtimes", "bsharedfiles", "bsharedfilesstreams", "bwordindex", "bfileindex", "bsharedmtimes", "downloads" ] for i in self.sections.keys(): if not self.parser.has_section(i): self.parser.add_section(i) for j in self.sections[i].keys(): if j not in external_sections: self.parser.set(i, j, self.sections[i][j]) else: self.parser.remove_option(i, j) path, fn = os.path.split(self.filename) try: if not os.path.isdir(path): os.makedirs(path) except OSError, msg: log.addwarning(_("Can't create directory '%(path)s', reported error: %(error)s") % {'path': path, 'error': msg})
def getFilesStreams(self, mtimes, oldmtimes, oldstreams, newsharedfiles, rebuild=False, yieldcall=None): streams = {} shared = self.config.sections["transfers"]["shared"] for directory in mtimes.keys(): virtualdir = self.real2virtual(directory) if self.hiddenCheck({'dir': directory}): continue if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): # No change try: streams[virtualdir] = oldstreams[virtualdir] continue except KeyError: log.addwarning(_("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'") % { 'vdir': virtualdir, 'dir': directory }) else: log.adddebug(_("Dropping missing directory %(dir)s") % {'dir': directory}) continue streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir]) if yieldcall is not None: yieldcall() return streams
def getFilesStreams(self, mtimes, oldmtimes, oldstreams, newsharedfiles, yieldcall=None): streams = {} shared = self.config.sections["transfers"]["shared"] for directory in mtimes.keys(): virtualdir = self.real2virtual(directory) if self.hiddenCheck(directory): continue if directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): # No change try: streams[virtualdir] = oldstreams[virtualdir] continue except KeyError: log.addwarning("Inconsistent cache for '%s', rebuilding '%s'" % (virtualdir, directory)) else: print "2S. Dropping missing directory %s" % directory continue streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir]) if yieldcall is not None: yieldcall() return streams
def getByteStream(self, fileinfo): message = slskmessages.SlskMessage() stream = chr(1) + message.packObject(fileinfo[0]) + message.packObject(NetworkLongLongType(fileinfo[1])) if fileinfo[2] is not None: try: msgbytes = '' msgbytes += message.packObject('mp3') + message.packObject(3) msgbytes += ( message.packObject(0) + message.packObject(NetworkIntType(fileinfo[2][0])) + message.packObject(1) + message.packObject(NetworkIntType(fileinfo[3])) + message.packObject(2) + message.packObject(NetworkIntType(fileinfo[2][1])) ) stream += msgbytes except: log.addwarning(_("Found meta data that couldn't be encoded, possible corrupt file: '%(file)s' has a bitrate of %(bitrate)s kbs, a length of %(length)s seconds and a VBR of %(vbr)s" % { 'file': fileinfo[0], 'bitrate': fileinfo[2][0], 'length': fileinfo[3], 'vbr': fileinfo[2][1] })) stream += message.packObject('') + message.packObject(0) else: stream += message.packObject('') + message.packObject(0) return stream
def getByteStream(self, fileinfo): message = slskmessages.SlskMessage() stream = chr(1) + message.packObject(fileinfo[0]) + message.packObject( NetworkLongLongType(fileinfo[1])) if fileinfo[2] is not None: try: msgbytes = '' msgbytes += message.packObject('mp3') + message.packObject(3) msgbytes += ( message.packObject(0) + message.packObject(NetworkIntType(fileinfo[2][0])) + message.packObject(1) + message.packObject(NetworkIntType(fileinfo[3])) + message.packObject(2) + message.packObject(NetworkIntType(fileinfo[2][1]))) stream += msgbytes except: log.addwarning( _( "Found meta data that couldn't be encoded, possible corrupt file: '%(file)s' has a bitrate of %(bitrate)s kbs, a length of %(length)s seconds and a VBR of %(vbr)s" % { 'file': fileinfo[0], 'bitrate': fileinfo[2][0], 'length': fileinfo[3], 'vbr': fileinfo[2][1] })) stream += message.packObject('') + message.packObject(0) else: stream += message.packObject('') + message.packObject(0) return stream
def RescanShares(self, msg, rebuild=False): try: files, streams, wordindex, fileindex, mtimes, lowercase = self.rescandirs(msg.shared, self.config.sections["transfers"]["sharedmtimes"], self.config.sections["transfers"]["sharedfiles"], self.config.sections["transfers"]["sharedfilesstreams"], msg.yieldfunction, self.np.frame.SharesProgress, name=_("Shares"), rebuild=rebuild) time.sleep(0.5) self.np.frame.RescanFinished([files, streams, wordindex, fileindex, mtimes, lowercase], "normal") except Exception, ex: log.addwarning("Failed to rebuild share, serious error occurred. If this problem persists delete ~/.nicotine/*.db and try again. If that doesn't help please file a bug report with the stack trace included (see terminal output after this message). Technical details: %s" % ex) raise
def _convert_to_virtual(x): if isinstance(x, tuple): return x virtual = x.replace('/', '_').replace('\\', '_').strip('_') log.addwarning( "Renaming shared folder '%s' to '%s'. A rescan of your share is required." % (x, virtual)) return (virtual, x)
def detect(path): try: audio = mutagen.File(path) except IOError: return None except Exception, e: log.addwarning("Mutagen crashed on '%s': %s" % (path, e)) return None
def run(self): """ Actual networking loop is here.""" # @var p Peer / Listen Port p = self._p # @var s Server Port self._server_socket = server_socket = None conns = self._conns connsinprogress = self._connsinprogress queue = self._queue while not self._want_abort: if not queue.empty(): conns, connsinprogress, server_socket = self.process_queue(queue, conns, connsinprogress, server_socket) self._server_socket = server_socket for i in conns.keys()[:]: if conns[i].__class__ is ServerConnection and i is not server_socket: del conns[i] outsocks = [i for i in conns.keys() if len(conns[i].obuf) > 0 or (i is not server_socket and conns[i].fileupl is not None and conns[i].fileupl.offset is not None)] outsock = [] self._limits = {} self._dlimits = {} for i in outsocks: if self._isUpload(conns[i]): limit = self._uploadlimit[0](conns, conns[i]) if limit is None or limit > 0: self._limits[i] = limit outsock.append(i) else: outsock.append(i) try: # Select Networking Input and Output sockets if sys.platform == "win32": input, output, exc = multiselect(conns.keys() + connsinprogress.keys()+ [p], connsinprogress.keys() + outsock, [], 0.5) else: input, output, exc = select.select(conns.keys() + connsinprogress.keys() +[p], connsinprogress.keys() + outsock, [], 0.5) numsockets = 0 if p is not None: numsockets += 1 numsockets += len(conns) + len(connsinprogress) self._ui_callback([InternalData(numsockets)]) #print "Sockets open: %s = %s + %s + %s (+1)" % (len(conns.keys()+connsinprogress.keys()+[p]+outsock), len(conns.keys()), len(connsinprogress.keys()), len(outsock)) except select.error, error: if len(error.args) == 2 and error.args[0] == EINTR: # Error recieved; but we don't care :) continue # Error recieved; terminate networking loop print time.strftime("%H:%M:%S"), "select.error", error self._want_abort = 1 message = _("Major Socket Error: Networking terminated! %s" % str(error)) log.addwarning(message) except ValueError, error: # Possibly opened too many sockets print time.strftime("%H:%M:%S"), "select ValueError:", error if not self.killOverflowConnection(connsinprogress): self.killOverflowConnection(conns) continue
def writeDownloadQueue(self): self.config_lock.acquire() realfile = self.filename + '.transfers.pickle' tmpfile = realfile + '.tmp' backupfile = realfile + ' .backup' try: handle = open(tmpfile, 'w') except Exception, inst: log.addwarning(_("Something went wrong while opening your transfer list: %(error)s") % {'error': str(inst)})
def miniupnpcbinary(internallanport, externallanport): if internallanport != externallanport: log.addWarning(_('UPnPc binary cannot be used since the internal port (%s) is not identical to the external port (%s)') % (internallanport, externallanport)) command = 'upnpc -r $ tcp' try: output = executeCommand(command, replacement=str(externallanport), returnoutput=True) except RuntimeError, e: log.addwarning('Failed to use UPnPc binary: %s' % (str(e),)) return
def fixportmapping(internallanport, externallanport = None): if not upnppossible: log.addwarning(_('Both MiniUPnPc python module and MiniUPnPc binary failed - automatic portmapping is not possible. Errors: %(errors)s') % {'error':"\n".join(miniupnpc_errors)}) return if not externallanport: externallanport = internallanport if miniupnpc: return miniupnpcmodule(internallanport, externallanport) else: return miniupnpcbinary(internallanport, externallanport)
def disable_plugin(self, pluginname): try: plugin = self.enabled_plugins[pluginname] del self.enabled_plugins[pluginname] plugin.disable(self) except: traceback.print_exc() log.addwarning(_("Unable to fully disable plugin %s")%pluginname) #common.log_exception(logger) return False return True
def getFilesStreamsUnicode(self, mtimes, oldmtimes, oldstreams, newsharedfiles, rebuild=False, yieldcall=None): streams = {} shared = self.config.sections["transfers"]["shared"] for directory in mtimes.keys(): virtualdir = self.real2virtual(directory) # force Unicode for reading from disk u_directory = u"%s" % directory str_directory = str(directory) if self.hiddenCheck({'dir': directory}): continue if directory in oldmtimes and directory not in oldstreams: # Partial information, happened with unicode paths that N+ couldn't handle properly del oldmtimes[directory] if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(u_directory): # No change try: streams[virtualdir] = oldstreams[virtualdir] continue except KeyError: log.addwarning( _("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'" ) % { 'vdir': virtualdir, 'dir': directory }) else: log.adddebug( _("Dropping missing directory %(dir)s") % {'dir': directory}) continue streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir]) if yieldcall is not None: yieldcall() return streams
def disable_plugin(self, pluginname): if pluginname not in self.enabled_plugins: return try: plugin = self.enabled_plugins[pluginname] log.add(_("Disabled plugin {}".format(plugin.PLUGIN.__name__))) del self.enabled_plugins[pluginname] plugin.disable(self) except: print_exc() log.addwarning(_("Unable to fully disable plugin %s") % pluginname) # common.log_exception(logger) return False return True
def enable_plugin(self, pluginname): if pluginname in self.enabled_plugins: return try: plugin = self.load_plugin(pluginname) if not plugin: raise Exception("Error loading plugin '%s'" % pluginname) plugin.enable(self) self.enabled_plugins[pluginname] = plugin log.add(_("Loaded plugin %s") % plugin.PLUGIN.__name__) except: traceback.print_exc() log.addwarning(_("Unable to enable plugin %s")%pluginname) #common.log_exception(logger) return False return True
def readConfig(self): self.config_lock.acquire() self.sections['transfers']['downloads'] = [] if exists(self.filename+'.transfers.pickle'): # <1.2.13 stored transfers inside the main config try: handle = open(self.filename+'.transfers.pickle') except IOError, inst: log.addwarning(_("Something went wrong while opening your transfer list: %(error)s") % {'error':str(inst)}) else: try: self.sections['transfers']['downloads'] = cPickle.load(handle) except (IOError, EOFError, ValueError), inst: log.addwarning(_("Something went wrong while reading your transfer list: %(error)s") % {'error':str(inst)})
def enable_plugin(self, pluginname): if pluginname in self.enabled_plugins: return try: plugin = self.load_plugin(pluginname) if not plugin: raise Exception("Error loading plugin '%s'" % pluginname) plugin.enable(self) self.enabled_plugins[pluginname] = plugin log.add(_("Enabled plugin %s") % plugin.PLUGIN.__name__) except: print_exc() log.addwarning(_("Unable to enable plugin %s") % pluginname) # common.log_exception(logger) return False return True
def readConfig(self): self.config_lock.acquire() self.sections['transfers']['downloads'] = [] if exists(self.filename+'.transfers.pickle'): # <1.2.13 stored transfers inside the main config try: handle = open(self.filename+'.transfers.pickle') except IOError, inst: log.addwarning(_("Something went wrong while opening your transfer list: %(error)s") % {'error': str(inst)}) else: try: self.sections['transfers']['downloads'] = cPickle.load(handle) except (IOError, EOFError, ValueError), inst: log.addwarning(_("Something went wrong while reading your transfer list: %(error)s") % {'error': str(inst)})
def getFilesStreams(self, mtimes, oldmtimes, oldstreams, newsharedfiles, rebuild=False, yieldcall=None): streams = {} shared = self.config.sections["transfers"]["shared"] for directory in mtimes.keys(): virtualdir = self.real2virtual(directory) if self.hiddenCheck({'dir': directory}): continue if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): # No change try: streams[virtualdir] = oldstreams[virtualdir] continue except KeyError: log.addwarning( _("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'" ) % { 'vdir': virtualdir, 'dir': directory }) else: log.adddebug( _("Dropping missing directory %(dir)s") % {'dir': directory}) continue streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir]) if yieldcall is not None: yieldcall() return streams
def RescanShares(self, msg, rebuild=False): try: files, streams, wordindex, fileindex, mtimes = self.rescandirs( msg.shared, self.config.sections["transfers"]["sharedmtimes"], self.config.sections["transfers"]["sharedfiles"], self.config.sections["transfers"]["sharedfilesstreams"], msg.yieldfunction, self.np.frame.SharesProgress, name=_("Shares"), rebuild=rebuild) time.sleep(0.5) self.np.frame.RescanFinished( [files, streams, wordindex, fileindex, mtimes], "normal") except Exception, ex: log.addwarning( _("Failed to rebuild share, serious error occurred. If this problem persists delete ~/.nicotine/*.db and try again. If that doesn't help please file a bug report with the stack trace included (see terminal output after this message). Technical details: %s" ) % ex) raise
def getFilesStreams(self, mtimes, oldmtimes, oldstreams, newsharedfiles, yieldcall=None): streams = {} shared = self.config.sections["transfers"]["shared"] for directory in mtimes.keys(): virtualdir = self.real2virtual(directory) if self.hiddenCheck(directory): continue if directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): # No change try: streams[virtualdir] = oldstreams[virtualdir] continue except KeyError: log.addwarning( "Inconsistent cache for '%s', rebuilding '%s'" % (virtualdir, directory)) else: print "2S. Dropping missing directory %s" % directory continue streams[virtualdir] = self.getDirStream(newsharedfiles[virtualdir]) if yieldcall is not None: yieldcall() return streams
def AddPortMapping(self, frame, np): """Wrapper to redirect the Port Mapping creation to either: - The MiniUPnPc binary: upnpc. - The python binding to the MiniUPnPc binary: miniupnpc. Both method support creating a Port Mapping via the UPnP IGDv1 and IGDv2 protocol. Need a reference to NicotineFrame to update the interface with the WAN external port chosen and connect to the slsk network. Also need a reference to the np object to extract the internal LAN local from the protothread socket. From the UPnP IGD reference: http://upnp.org/specs/gw/UPnP-gw-WANIPConnection-v2-Service.pdf IGDv1 and IGDV2: AddPortMapping: This action creates a new port mapping or overwrites an existing mapping with the same internal client. If the ExternalPort and PortMappingProtocol pair is already mapped to another internal client, an error is returned. IGDv1: NewLeaseDuration: This argument defines the duration of the port mapping. If the value of this argument is 0, it means it's a static port mapping that never expire. IGDv2: NewLeaseDuration: This argument defines the duration of the port mapping. The value of this argument MUST be greater than 0. A NewLeaseDuration with value 0 means static port mapping, but static port mappings can only be created through an out-of-band mechanism. If this parameter is set to 0, default value of 604800 MUST be used. BTW since we don't recheck periodically ports mappings while nicotine+ runs, any UPnP port mapping done with IGDv2 (any modern router does that) will expire after 7 days. The client won't be able to send/receive files anymore... """ log.add(_('Creating Port Mapping rule via UPnP...')) # Hack to found out the local LAN IP # See https://stackoverflow.com/questions/166506/finding-local-ip-addresses-using-pythons-stdlib/28950776#28950776 # Create a UDP socket s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # Send a broadcast packet on a local address (doesn't need to be reachable) s.connect(('10.255.255.255', 0)) # This returns the "primary" IP on the local box, even if that IP is a NAT/private/internal IP. self.internalipaddress = s.getsockname()[0] # Close the socket s.close() # Store the Local LAN port self.internallanport = np.protothread._p.getsockname()[1] # The function depends on what method of configuring port mapping is # available functiontocall = getattr(self, 'AddPortMapping' + self.mode) try: functiontocall() except Exception as e: log.addwarning(_('UPnP exception: %(error)s') % {'error': str(e)}) log.addwarning( _('Failed to automate the creation of ' + 'UPnP Port Mapping rule.')) return log.add( _('Managed to map external WAN port %(externalwanport)s ' + 'on your external IP %(externalipaddress)s ' + 'to your local host %(internalipaddress)s ' + 'port %(internallanport)s.') % { 'externalwanport': self.externalwanport, 'externalipaddress': self.externalipaddress, 'internalipaddress': self.internalipaddress, 'internallanport': self.internallanport }) # Set the external WAN port in the GUI frame.networkcallback([slskmessages.IncPort(self.externalwanport)]) # Establish the connection to the slsk network frame.OnConnect(-1)
def AddPortMapping(self, frame, np): """Wrapper to redirect the Port Mapping creation to either: - The MiniUPnPc binary: upnpc. - The python binding to the MiniUPnPc binary: miniupnpc. Both method support creating a Port Mapping via the UPnP IGDv1 and IGDv2 protocol. Need a reference to NicotineFrame to update the interface with the WAN external port chosen and connect to the slsk network. Also need a reference to the np object to extract the internal LAN local from the protothread socket. From the UPnP IGD reference: http://upnp.org/specs/gw/UPnP-gw-WANIPConnection-v2-Service.pdf IGDv1 and IGDV2: AddPortMapping: This action creates a new port mapping or overwrites an existing mapping with the same internal client. If the ExternalPort and PortMappingProtocol pair is already mapped to another internal client, an error is returned. IGDv1: NewLeaseDuration: This argument defines the duration of the port mapping. If the value of this argument is 0, it means it's a static port mapping that never expire. IGDv2: NewLeaseDuration: This argument defines the duration of the port mapping. The value of this argument MUST be greater than 0. A NewLeaseDuration with value 0 means static port mapping, but static port mappings can only be created through an out-of-band mechanism. If this parameter is set to 0, default value of 604800 MUST be used. BTW since we don't recheck periodically ports mappings while nicotine+ runs, any UPnP port mapping done with IGDv2 (any modern router does that) will expire after 7 days. The client won't be able to send/receive files anymore... """ log.add(_('Creating Port Mapping rule via UPnP...')) # Local LAN IP self.internalipaddress = gethostbyname(gethostname()) # Store the Local LAN port self.internallanport = np.protothread._p.getsockname()[1] # The function depends on what method of configuring port mapping is # available functiontocall = getattr(self, 'AddPortMapping' + self.mode) try: functiontocall() except Exception as e: log.addwarning(_('UPnP exception: %(error)s') % {'error': str(e)}) log.addwarning( _('Failed to automate the creation of ' + 'UPnP Port Mapping rule.')) return log.add( _('Managed to map external WAN port %(externalwanport)s ' + 'on your external IP %(externalipaddress)s ' + 'to your local host %(internalipaddress)s ' + 'port %(internallanport)s.') % { 'externalwanport': self.externalwanport, 'externalipaddress': self.externalipaddress, 'internalipaddress': self.internalipaddress, 'internallanport': self.internallanport } ) # Set the external WAN port in the GUI frame.networkcallback([slskmessages.IncPort(self.externalwanport)]) # Establish the connection to the slsk network frame.OnConnect(-1)
def clearShares(self, sharedfiles, bsharedfiles, sharedfilesstreams, bsharedfilesstreams, wordindex, bwordindex, fileindex, bfileindex, sharedmtimes, bsharedmtimes): try: if sharedfiles: sharedfiles.close() try: os.unlink(os.path.join(self.data_dir, 'files.db')) except: pass sharedfiles = shelve.open(os.path.join(self.data_dir, "files.db"), flag='n') if bsharedfiles: bsharedfiles.close() try: os.unlink(os.path.join(self.data_dir, 'buddyfiles.db')) except: pass bsharedfiles = shelve.open(os.path.join(self.data_dir, "buddyfiles.db"), flag='n') if sharedfilesstreams: sharedfilesstreams.close() try: os.unlink(os.path.join(self.data_dir, 'streams.db')) except: pass sharedfilesstreams = shelve.open(os.path.join( self.data_dir, "streams.db"), flag='n') if bsharedfilesstreams: bsharedfilesstreams.close() try: os.unlink(os.path.join(self.data_dir, 'buddystreams.db')) except: pass bsharedfilesstreams = shelve.open(os.path.join( self.data_dir, "buddystreams.db"), flag='n') if wordindex: wordindex.close() try: os.unlink(os.path.join(self.data_dir, 'wordindex.db')) except: pass wordindex = shelve.open(os.path.join(self.data_dir, "wordindex.db"), flag='n') if bwordindex: bwordindex.close() try: os.unlink(os.path.join(self.data_dir, 'buddywordindex.db')) except: pass bwordindex = shelve.open(os.path.join(self.data_dir, "buddywordindex.db"), flag='n') if fileindex: fileindex.close() try: os.unlink(os.path.join(self.data_dir, 'fileindex.db')) except: pass fileindex = shelve.open(os.path.join(self.data_dir, "fileindex.db"), flag='n') if bfileindex: bfileindex.close() try: os.unlink(os.path.join(self.data_dir, 'buddyfileindex.db')) except: pass bfileindex = shelve.open(os.path.join(self.data_dir, "buddyfileindex.db"), flag='n') if sharedmtimes: sharedmtimes.close() try: os.unlink(os.path.join(self.data_dir, 'mtimes.db')) except: pass sharedmtimes = shelve.open(os.path.join(self.data_dir, "mtimes.db"), flag='n') if bsharedmtimes: bsharedmtimes.close() try: os.unlink(os.path.join(self.data_dir, 'buddymtimes.db')) except: pass bsharedmtimes = shelve.open(os.path.join(self.data_dir, "buddymtimes.db"), flag='n') except Exception, error: log.addwarning(_("Error while writing database files: %s") % error) return None
else: try: self.sections['transfers']['downloads'] = cPickle.load(handle) except (IOError, EOFError, ValueError), inst: log.addwarning(_("Something went wrong while reading your transfer list: %(error)s") % {'error': str(inst)}) try: handle.close() except: pass path, fn = os.path.split(self.filename) try: if not os.path.isdir(path): os.makedirs(path) except OSError, msg: log.addwarning("Can't create directory '%s', reported error: %s" % (path, msg)) # Transition from 1.2.16 -> 1.2.16+ # Do the cleanup early so we don't get the annoying # 'Unknown config option ...' message self.removeOldOption("transfers", "pmqueueddir") self.removeOldOption("server", "lastportstatuscheck") self.removeOldOption("userinfo", "descrutf8") # Checking for nnknown section/options unknown1 = [ 'login', 'passw', 'enc', 'downloaddir', 'uploaddir', 'customban', 'descr', 'pic', 'logsdir', 'roomlogsdir', 'privatelogsdir', 'incompletedir', 'autoreply', 'afterfinish', 'downloadregexp', 'afterfolder', 'default', 'chatfont', 'npothercommand', 'npplayer', 'npformat', 'private_timestamp', 'rooms_timestamp', 'log_timestamp'
def getFilesListUnicode(self, mtimes, oldmtimes, oldlist, yieldcall=None, progress=None, rebuild=False): """ Get a list of files with their filelength, bitrate and track length in seconds """ list = {} count = 0 for directory in mtimes: directory = os.path.expanduser(directory) virtualdir = self.real2virtual(directory) count += 1 if progress: percent = float(count)/len(mtimes) if percent <= 1.0: gobject.idle_add(progress.set_fraction, percent) # force Unicode for reading from disk u_directory = u"%s" % directory str_directory = str(directory) if self.hiddenCheck({'dir': directory}): continue if directory in oldmtimes and directory not in oldlist: # Partial information, happened with unicode paths that N+ couldn't handle properly del oldmtimes[directory] if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): try: list[virtualdir] = oldlist[virtualdir] continue except KeyError: log.addwarning(_("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'") % { 'vdir': virtualdir, 'dir': directory }) else: log.adddebug(_("Dropping missing directory %(dir)s") % {'dir': directory}) continue list[virtualdir] = [] try: contents = os.listdir(u_directory) except OSError, errtuple: print str(errtuple) self.logMessage(str(errtuple)) continue contents.sort() for filename in contents: if self.hiddenCheck({'dir': directory, 'file': filename}): continue path = os.path.join(directory, filename) s_path = str(path) ppath = unicode(path) s_filename = str(filename) try: # try to force Unicode for reading from disk isfile = os.path.isfile(ppath) except OSError, errtuple: message = _("Scanning Error: %(error)s Path: %(path)s") % {'error': errtuple, 'path': path} print str(message) self.logMessage(message) displayTraceback(sys.exc_info()[2]) continue else: if isfile: # Get the metadata of the file via mutagen data = self.getFileInfoUnicode(s_filename, s_path) if data is not None: list[virtualdir].append(data) if yieldcall is not None: yieldcall()
def clearShares(self, sharedfiles, bsharedfiles, sharedfilesstreams, bsharedfilesstreams, wordindex, bwordindex, fileindex, bfileindex, sharedmtimes, bsharedmtimes): try: if sharedfiles: sharedfiles.close() try: os.unlink(self.filename+'.files.db') except: pass sharedfiles = shelve.open(self.filename+".files.db", flag='n') if bsharedfiles: bsharedfiles.close() try: os.unlink(self.filename+'.buddyfiles.db') except: pass bsharedfiles = shelve.open(self.filename+".buddyfiles.db", flag='n') if sharedfilesstreams: sharedfilesstreams.close() try: os.unlink(self.filename+'.streams.db') except: pass sharedfilesstreams =shelve.open(self.filename+".streams.db", flag='n') if bsharedfilesstreams: bsharedfilesstreams.close() try: os.unlink(self.filename+'.buddystreams.db') except: pass bsharedfilesstreams =shelve.open(self.filename+".buddystreams.db", flag='n') if wordindex: wordindex.close() try: os.unlink(self.filename+'.wordindex.db') except: pass wordindex = shelve.open(self.filename+".wordindex.db", flag='n') if bwordindex: bwordindex.close() try: os.unlink(self.filename+'.buddywordindex.db') except: pass bwordindex = shelve.open(self.filename+".buddywordindex.db", flag='n') if fileindex: fileindex.close() try: os.unlink(self.filename+'.fileindex.db') except: pass fileindex = shelve.open(self.filename+".fileindex.db", flag='n') if bfileindex: bfileindex.close() try: os.unlink(self.filename+'.buddyfileindex.db') except: pass bfileindex = shelve.open(self.filename+".buddyfileindex.db", flag='n') if sharedmtimes: sharedmtimes.close() try: os.unlink(self.filename+'.mtimes.db') except: pass sharedmtimes = shelve.open(self.filename+".mtimes.db", flag='n') if bsharedmtimes: bsharedmtimes.close() try: os.unlink(self.filename+'.buddymtimes.db') except: pass bsharedmtimes = shelve.open(self.filename+".buddymtimes.db", flag='n') except Exception, error: log.addwarning(_("Error while writing database files: %s") % error) return None
def getFilesList(self, mtimes, oldmtimes, oldlist, yieldcall=None, progress=None, rebuild=False): """ Get a list of files with their filelength and (if mp3) bitrate and track length in seconds """ list = {} count = 0 for directory in mtimes: directory = os.path.expanduser(directory) virtualdir = self.real2virtual(directory) count += 1 if progress: percent = float(count) / len(mtimes) if percent <= 1.0: gobject.idle_add(progress.set_fraction, percent) if self.hiddenCheck(directory): continue if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): try: list[virtualdir] = oldlist[virtualdir] continue except KeyError: log.addwarning( "Inconsistent cache for '%s', rebuilding '%s'" % (virtualdir, directory)) else: print "Dropping removed directory %s" % directory continue list[virtualdir] = [] try: contents = os.listdir(directory) except OSError, errtuple: print str(errtuple) self.logMessage(str(errtuple)) continue contents.sort() for filename in contents: if self.hiddenCheck(filename): continue path = os.path.join(directory, filename) try: isfile = os.path.isfile(path) except OSError, errtuple: message = _("Scanning Error: %(error)s Path: %(path)s") % { 'error': errtuple, 'path': path } print str(message) self.logMessage(message) displayTraceback(sys.exc_info()[2]) continue else: if isfile: # Get the metadata of the file via mutagen data = self.getFileInfo(filename, path) if data is not None: list[virtualdir].append(data) if yieldcall is not None: yieldcall()
command = 'upnpc -r $ tcp' try: output = executeCommand(command, replacement=str(externallanport), returnoutput=True) except RuntimeError, e: log.addwarning('Failed to use UPnPc binary: %s' % (str(e),)) return for line in output.split('\n'): # "external %s:%s %s is redirected to internal %s:%s\n" if line.startswith("external ") and line.find(" is redirected to internal ") > -1: lst = line.split() external = lst[1].split(':') #internal = lst[7].split(':') try: return (external[0], int(external[1])) except (ValueError, IndexError): log.addwarning(_('UPnPc binary failed, could not decompose %s into IP and port.') % (external)) return None log.addwarning('UPnPc binary failed, could not parse output: %s' % (output,)) return None def miniupnpcmodule(internallanport, externallanport): u = miniupnpc.UPnP() u.discoverdelay = 2000 try: print "Discovering... delay=%ums" % u.discoverdelay ndevices = u.discover() print "%s device(s) detected" % ndevices u.selectigd() lanaddr = u.lanaddr externalipaddress = u.externalipaddress() print "Selecting one of the IGD. Local address: %s External address: %s" % (lanaddr, externalipaddress) print "Misc: %s\n%s" % (u.statusinfo(), u.connectiontype())
def clearShares( self, sharedfiles, bsharedfiles, sharedfilesstreams, bsharedfilesstreams, wordindex, bwordindex, fileindex, bfileindex, sharedmtimes, bsharedmtimes ): try: if sharedfiles: sharedfiles.close() try: os.unlink(self.filename + '.files.db') except: pass sharedfiles = shelve.open(self.filename + ".files.db", flag='n') if bsharedfiles: bsharedfiles.close() try: os.unlink(self.filename + '.buddyfiles.db') except: pass bsharedfiles = shelve.open(self.filename + ".buddyfiles.db", flag='n') if sharedfilesstreams: sharedfilesstreams.close() try: os.unlink(self.filename + '.streams.db') except: pass sharedfilesstreams = shelve.open(self.filename + ".streams.db", flag='n') if bsharedfilesstreams: bsharedfilesstreams.close() try: os.unlink(self.filename + '.buddystreams.db') except: pass bsharedfilesstreams = shelve.open(self.filename + ".buddystreams.db", flag='n') if wordindex: wordindex.close() try: os.unlink(self.filename + '.wordindex.db') except: pass wordindex = shelve.open(self.filename + ".wordindex.db", flag='n') if bwordindex: bwordindex.close() try: os.unlink(self.filename + '.buddywordindex.db') except: pass bwordindex = shelve.open(self.filename + ".buddywordindex.db", flag='n') if fileindex: fileindex.close() try: os.unlink(self.filename + '.fileindex.db') except: pass fileindex = shelve.open(self.filename + ".fileindex.db", flag='n') if bfileindex: bfileindex.close() try: os.unlink(self.filename + '.buddyfileindex.db') except: pass bfileindex = shelve.open(self.filename + ".buddyfileindex.db", flag='n') if sharedmtimes: sharedmtimes.close() try: os.unlink(self.filename + '.mtimes.db') except: pass sharedmtimes = shelve.open(self.filename + ".mtimes.db", flag='n') if bsharedmtimes: bsharedmtimes.close() try: os.unlink(self.filename + '.buddymtimes.db') except: pass bsharedmtimes = shelve.open(self.filename + ".buddymtimes.db", flag='n') except Exception, error: log.addwarning(_("Error while writing database files: %s") % error) return None
else: try: self.sections['transfers']['downloads'] = cPickle.load(handle) except (IOError, EOFError, ValueError), inst: log.addwarning(_("Something went wrong while reading your transfer list: %(error)s") % {'error': str(inst)}) try: handle.close() except: pass path, fn = os.path.split(self.filename) try: if not os.path.isdir(path): os.makedirs(path) except OSError, msg: log.addwarning("Can't create directory '%s', reported error: %s" % (path, msg)) # Transition from 1.2.16 -> 1.4.0 # Do the cleanup early so we don't get the annoying # 'Unknown config option ...' message self.removeOldOption("transfers", "pmqueueddir") self.removeOldOption("server", "lastportstatuscheck") self.removeOldOption("server", "serverlist") self.removeOldOption("userinfo", "descrutf8") self.removeOldOption("ui", "enabletrans") self.removeOldOption("ui", "mozembed") self.removeOldOption("ui", "open_in_mozembed") self.removeOldOption("ui", "tooltips") self.removeOldOption("ui", "transalpha") self.removeOldOption("ui", "transfilter") self.removeOldOption("ui", "transtint")
# System imports import dircache import gobject import string, sys, os import time import struct # N+ imports import slskmessages from slskmessages import NetworkIntType, NetworkLongLongType from logfacility import log from utils import _, displayTraceback, strace try: import metadata_mutagen as metadata except ImportError: log.addwarning("Failed to import the Mutagen library, falling back to old library. To improve meta data please install Mutagen.") import mp3 as metadata win32 = sys.platform.startswith("win") class Shares: def __init__(self, np): self.np = np self.config = self.np.config self.queue = self.np.queue self.LogMessage = self.np.logMessage self.CompressedSharesBuddy = self.CompressedSharesNormal = None self.CompressShares("normal") self.CompressShares("buddy") self.requestedShares = {}
self._resetCounters(conns) elif msgObj.__class__ is SetGeoBlock: self._geoip = msgObj.config elif msgObj.__class__ is SetUploadLimit: if msgObj.uselimit: if msgObj.limitby: cb = self._calcLimitByTotal else: cb = self._calcLimitByTransfer else: cb = self._calcLimitNone self._resetCounters(conns) self._uploadlimit = (cb, msgObj.limit) elif msgObj.__class__ is SetDownloadLimit: self._downloadlimit = (self._calcDLimitByTotal, msgObj.limit) if socketwarning and time.time() - self.lastsocketwarning > 60: self.lastsocketwarning = time.time() log.addwarning(_("You have just hit your connection limit of %(limit)s. Nicotine+ will drop connections for your protection. If you get this message often you should search for less generic terms, or increase your per-process file descriptor limit.") % {'limit':maxsockets}) if needsleep: time.sleep(1) return conns, connsinprogress, server_socket def abort(self): """ Call this to abort the thread""" self._want_abort = 1 def stopped(self): """ returns true if thread has stopped """ return self._stopped
def ApplyTranslation(lang=None): """Function dealing with translations and locales. If no language is specified by the user we try to autodetect it and fix the locale. If a language is specified by the user we try to load the corresponding translation file and fix the locale. In both case if something goes wrong we fall back to no translation. This function also try to find translation files in the project path first: $(PROJECT_PATH)/languages/$(LANG)/LC_MESSAGES/nicotine.mo If no translations are found we fall back to the system path for locates: GNU/Linux: /usr/share/locale/$(LANG)/LC_MESSAGES Windows: %PYTHONHOME%\share\locale\$(LANG)\LC_MESSAGES Note: To the best of my knowledge when we are in a python venv falling back to the system path does not work.""" # Package name for gettext PACKAGE = 'nicotine' # Local path where to find translation (mo) files LOCAL_MO_PATH = 'languages' # If no lang is provided we try to autodetect it if lang is None: # Setting up environnement variables for locale handling SetLocaleEnv() # Current language derived from the locale currentlang = locale.getlocale()[0].split('_')[0] if currentlang == "en": # If the current locale is english we dont translate # It can be either that the locale of the user is really english # ot we might be in the fallback mode of the locale handling gettext.install(PACKAGE) else: try: # We try to find the translation file in the current path first tr = gettext.translation(PACKAGE, localedir=LOCAL_MO_PATH) tr.install() except IOError as e1: try: # We try to find the translation file in the global path tr = gettext.translation(PACKAGE) tr.install() except IOError as e2: logfacility.addwarning("Translation for '%s' not found, " "falling back to english" % (currentlang) ) # If we can't find the translation for the current locale # we fall back to no translation at all gettext.install(PACKAGE) # And we reset locale to english SetLocaleEnv("en") else: # The user has forced a language to be used # Setting up environnement variables for the specified language SetLocaleEnv(lang) if lang == "en": # We don't translate since the user wants the english language gettext.install(PACKAGE) else: try: # We try to find the translation file in the current path tr = gettext.translation(PACKAGE, localedir=LOCAL_MO_PATH, languages=[lang] ) tr.install() except IOError as e1: try: # We try to find the translation file in the global path tr = gettext.translation(PACKAGE, languages=[lang]) tr.install() except IOError as e2: logfacility.addwarning("Translation for '%s' not found, " "falling back to english" % (lang) ) # If we can't find the translation for the specified # language: we fall back to no translation at all gettext.install(PACKAGE) # And we reset locale to english SetLocaleEnv("en") # Now we bind glade to the nicotine domain # Same method than before, try the local then global path if gettext.find(PACKAGE, localedir=LOCAL_MO_PATH) is None: gtk.glade.bindtextdomain(PACKAGE) else: gtk.glade.bindtextdomain(PACKAGE, LOCAL_MO_PATH) gtk.glade.textdomain(PACKAGE)
def _convert_to_virtual(x): if isinstance(x, tuple): return x virtual = x.replace('/', '_').replace('\\', '_').strip('_') log.addwarning("Renaming shared folder '%s' to '%s'. A rescan of your share is required." % (x, virtual)) return (virtual, x)
def SetLocaleEnv(lang=None): """Function to set locale used by gettext and glade. We try to autodetect the locale if the user don't specify a language we can derived the locale from. In any case if something goes bad we fall back to english (C).""" # Detect if we're running on Windows win32 = platform.system().startswith("Win") if lang is None: # If no lang is provided we just fix LC_ALL to be sure if win32: # On windows python can get a normalize tuple # (language code, encoding) locale_win = locale.getdefaultlocale() # Build a locale name compatible with gettext locale_win_gettext = _build_localename_win(locale_win) # Fix environnement variables os.environ['LC_ALL'] = locale_win_gettext _putenv_win('LC_ALL', locale_win_gettext) # Fix the locale locale.setlocale(locale.LC_ALL, '') else: # On UNIX we simply set the current locale # or fallback to C.UTF-8 try: locale.setlocale(locale.LC_ALL, '') except Exception as e: log.addwarning("Cannot set the locale: %s, " "falling back to english" % str(e)) # Falling back to english locale.setlocale(locale.LC_ALL, 'C.UTF-8') os.environ['LC_ALL'] = 'C' else: # If a lang is provided if win32: # On Windows we normalize the language in unix format # for env variables used by glade wanted_lang = locale.normalize(lang).split('.')[0] # Fix environnement variables os.environ['LC_ALL'] = wanted_lang _putenv_win('LC_ALL', wanted_lang) # We fix the locale using the locales conversion table # since VS2010 doesn't support unix format locales locale.setlocale(locale.LC_ALL, UNIX_TO_WINDOWS_LOCALES[lang]) else: # On Unix we nomalize it and add UTF-8 encoding try: locale.setlocale(locale.LC_ALL, locale.normalize(lang).split('.')[0]+'.UTF-8') except Exception as e: log.addwarning("Cannot set the locale: %s, " "falling back to english" % str(e)) # Falling back to english locale.setlocale(locale.LC_ALL, 'C.UTF-8') os.environ['LC_ALL'] = 'C'
def getFilesListUnicode(self, mtimes, oldmtimes, oldlist, yieldcall=None, progress=None, rebuild=False): """ Get a list of files with their filelength, bitrate and track length in seconds """ list = {} count = 0 for directory in mtimes: directory = os.path.expanduser(directory) virtualdir = self.real2virtual(directory) count += 1 if progress: percent = float(count) / len(mtimes) if percent <= 1.0: gobject.idle_add(progress.set_fraction, percent) # force Unicode for reading from disk u_directory = u"%s" % directory str_directory = str(directory) if self.hiddenCheck({'dir': directory}): continue if directory in oldmtimes and directory not in oldlist: # Partial information, happened with unicode paths that N+ couldn't handle properly del oldmtimes[directory] if not rebuild and directory in oldmtimes: if mtimes[directory] == oldmtimes[directory]: if os.path.exists(directory): try: list[virtualdir] = oldlist[virtualdir] continue except KeyError: log.addwarning( _("Inconsistent cache for '%(vdir)s', rebuilding '%(dir)s'" ) % { 'vdir': virtualdir, 'dir': directory }) else: log.adddebug( _("Dropping missing directory %(dir)s") % {'dir': directory}) continue list[virtualdir] = [] try: contents = os.listdir(u_directory) except OSError, errtuple: print str(errtuple) self.logMessage(str(errtuple)) continue contents.sort() for filename in contents: if self.hiddenCheck({'dir': directory, 'file': filename}): continue path = os.path.join(directory, filename) s_path = str(path) ppath = unicode(path) s_filename = str(filename) try: # try to force Unicode for reading from disk isfile = os.path.isfile(ppath) except OSError, errtuple: message = _("Scanning Error: %(error)s Path: %(path)s") % { 'error': errtuple, 'path': path } print str(message) self.logMessage(message) displayTraceback(sys.exc_info()[2]) continue else: if isfile: # Get the metadata of the file via mutagen data = self.getFileInfoUnicode(s_filename, s_path) if data is not None: list[virtualdir].append(data) if yieldcall is not None: yieldcall()