def _check_cert_directory(self): cert_path = FilePath(self['cert-directory']) self['cert-directory'] = cert_path if not cert_path.exists(): raise UsageError("{} does not exist".format(cert_path.path)) if not cert_path.isdir(): raise UsageError("{} is not a directory".format(cert_path.path))
def inspect(doc): data = json.loads(doc) path = FilePath(data['path']) ret = {'kind': 'file', 'path': path.path, 'exists': path.exists()} if not ret['exists']: return ret if path.isdir(): ret['filetype'] = 'dir' elif path.isfile(): ret['filetype'] = 'file' ret['size'] = path.statinfo.st_size h = sha1() fh = open(path.path, 'r') while True: data = fh.read(4096) if not data: break h.update(data) ret['sha1'] = h.hexdigest() ret['owner'] = pwd.getpwuid(path.getUserID()).pw_name ret['group'] = grp.getgrgid(path.getGroupID()).gr_name ret['perms'] = permsString(path.getPermissions()) ret['ctime'] = int(path.statinfo.st_ctime) ret['mtime'] = int(path.statinfo.st_mtime) ret['atime'] = int(path.statinfo.st_atime) return ret
def test_directory_is_created(self): """ If a directory does not exist in given path, it is created. """ path = FilePath(self.mktemp()) self.service(path) self.assertTrue(path.isdir())
def _backup_pcap(self, username, ip_addr): """ Backup existing pcap file. Used when restarting traffic capture for ACTIVE accounts and after the account expires. :param username (str): account username :param ip_addr (IPv4Address): IP address allocated for the account. """ log.debug("ACCOUNTS:: Backing up pcap for {} with IP {}.".format( username, str(ip_addr))) day_month_str = datetime.now().strftime("%m%d%H%M") cur_pcap_file = "{}_{}.pcap".format(username, str(ip_addr)) new_path = os.path.join(self.path['pcaps'], "{}_{}".format(username, str(ip_addr))) new_path_fp = FilePath(new_path) if not new_path_fp.isdir(): log.debug("ACCOUNTS:: Creating directory {}".format(new_path)) new_path_fp.createDirectory() new_pcap_file = "{}_{}_{}.pcap".format(username, str(ip_addr), day_month_str) cur_pcap_file = os.path.join(self.path['pcaps'], cur_pcap_file) new_pcap_file = os.path.join(new_path, new_pcap_file) log.debug("ACCOUNTS:: Current pcap file {}".format(cur_pcap_file)) log.debug("ACCOUNTS:: New pcap file {}".format(new_pcap_file)) fp = FilePath(cur_pcap_file) backup_fp = FilePath(new_pcap_file) fp.moveTo(backup_fp) backup_fp.chmod(0654)
class JoinOptions(usage.Options): synopsis = "INVITE_CODE LOCAL_DIR" dmd_write_cap = "" magic_readonly_cap = "" optParameters = [ ("poll-interval", "p", "60", "How often to ask for updates"), ("name", "n", None, "Name for the new magic-folder"), ("author", "A", None, "Author name for Snapshots in this magic-folder"), ] def parseArgs(self, invite_code, local_dir): super(JoinOptions, self).parseArgs() try: if int(self['poll-interval']) <= 0: raise ValueError("should be positive") except ValueError: raise usage.UsageError( "--poll-interval must be a positive integer") self.local_dir = FilePath(local_dir) if not self.local_dir.exists(): raise usage.UsageError("'{}' doesn't exist".format(local_dir)) if not self.local_dir.isdir(): raise usage.UsageError("'{}' isn't a directory".format(local_dir)) self.invite_code = invite_code def postOptions(self): super(JoinOptions, self).postOptions() _fill_author_from_environment(self) if self["name"] is None: raise usage.UsageError("Must specify the --name option")
class AddOptions(usage.Options): local_dir = None synopsis = "LOCAL_DIR" optParameters = [ ("poll-interval", "p", "60", "How often to ask for updates"), ("name", "n", None, "The name of this magic-folder"), ("author", "A", None, "Our name for Snapshots authored here"), ] description = ("Create a new magic-folder.") def parseArgs(self, local_dir=None): if local_dir is None: raise usage.UsageError( "Must specify a single argument: the local directory") self.local_dir = FilePath(local_dir) if not self.local_dir.exists(): raise usage.UsageError("'{}' doesn't exist".format(local_dir)) if not self.local_dir.isdir(): raise usage.UsageError("'{}' isn't a directory".format(local_dir)) def postOptions(self): super(AddOptions, self).postOptions() _fill_author_from_environment(self) if self["name"] is None: raise usage.UsageError("Must specify the --name option") try: if int(self['poll-interval']) <= 0: raise ValueError("should be positive") except ValueError: raise usage.UsageError( "--poll-interval must be a positive integer")
def test_starts_persistence_service(self): """ ``ControlScript.main`` starts a configuration persistence service. """ path = FilePath(self.mktemp()) options = ControlOptions() options.parseOptions([b"--data-path", path.path]) reactor = MemoryCoreReactor() ControlScript().main(reactor, options) self.assertTrue(path.isdir())
def gotStatus(status): extraDiffs = [] lines = status.splitlines() for L in lines: if L.startswith('A'): ignored, ignored, fileName = L.split(None, 2) path = FilePath(self.projectTrunk).preauthChild(fileName) if not path.isdir(): extraDiffs.append( getProcessOutput( "diff", ("-u", "/dev/null", path.path), env=environ)) return gatherResults(extraDiffs)
def _find_suite(): root = os.environ.get("JSON_SCHEMA_TEST_SUITE") if root is not None: return FilePath(root) root = FilePath(jsonschema.__file__).parent().sibling("json") if not root.isdir(): # pragma: no cover raise ValueError( ("Can't find the JSON-Schema-Test-Suite directory. " "Set the 'JSON_SCHEMA_TEST_SUITE' environment " "variable or run the tests from alongside a checkout " "of the suite."), ) return root
class ControlScriptTests(TestCase): """ Tests for ``ControlScript``. """ def setUp(self): """ Create some certificates to use when creating the control service. """ super(ControlScriptTests, self).setUp() ca_set, _ = get_credential_sets() self.certificate_path = FilePath(self.mktemp()) self.certificate_path.makedirs() ca_set.copy_to(self.certificate_path, control=True) self.script = ControlScript() self.options = ControlOptions() self.data_path = FilePath(self.mktemp()) self.options.parseOptions([ b"--port", b"tcp:8001", b"--agent-port", b"tcp:8002", b"--data-path", self.data_path.path, b"--certificates-directory", self.certificate_path.path ]) def test_no_immediate_stop(self): """ The ``Deferred`` returned from ``ControlScript`` is not fired. """ self.assertNoResult(self.script.main(MemoryCoreReactor(), self.options)) def test_starts_persistence_service(self): """ ``ControlScript.main`` starts a configuration persistence service. """ reactor = MemoryCoreReactor() self.script.main(reactor, self.options) self.assertTrue(self.data_path.isdir()) def test_starts_cluster_state_service(self): """ ``ControlScript.main`` starts a cluster state service. """ reactor = MemoryCoreReactor() self.script.main(reactor, self.options) server = reactor.tcpServers[0] control_resource = server[1].wrappedFactory.resource service = control_resource._v1_user.cluster_state_service self.assertEqual((service.__class__, service.running), (ClusterStateService, True))
class ControlScriptTests(TestCase): """ Tests for ``ControlScript``. """ def setUp(self): """ Create some certificates to use when creating the control service. """ super(ControlScriptTests, self).setUp() ca_set, _ = get_credential_sets() self.certificate_path = FilePath(self.mktemp()) self.certificate_path.makedirs() ca_set.copy_to(self.certificate_path, control=True) self.script = ControlScript() self.options = ControlOptions() self.data_path = FilePath(self.mktemp()) self.options.parseOptions([ b"--port", b"tcp:8001", b"--agent-port", b"tcp:8002", b"--data-path", self.data_path.path, b"--certificates-directory", self.certificate_path.path ]) def test_no_immediate_stop(self): """ The ``Deferred`` returned from ``ControlScript`` is not fired. """ self.assertNoResult( self.script.main(MemoryCoreReactor(), self.options)) def test_starts_persistence_service(self): """ ``ControlScript.main`` starts a configuration persistence service. """ reactor = MemoryCoreReactor() self.script.main(reactor, self.options) self.assertTrue(self.data_path.isdir()) def test_starts_cluster_state_service(self): """ ``ControlScript.main`` starts a cluster state service. """ reactor = MemoryCoreReactor() self.script.main(reactor, self.options) server = reactor.tcpServers[0] control_resource = server[1].wrappedFactory.resource service = control_resource._v1_user.cluster_state_service self.assertEqual((service.__class__, service.running), (ClusterStateService, True))
def _find_suite(): root = os.environ.get("JSON_SCHEMA_TEST_SUITE") if root is not None: return FilePath(root) root = FilePath(jsonschema.__file__).parent().sibling("json") if not root.isdir(): # pragma: no cover raise ValueError( ( "Can't find the JSON-Schema-Test-Suite directory. " "Set the 'JSON_SCHEMA_TEST_SUITE' environment " "variable or run the tests from alongside a checkout " "of the suite." ), ) return root
class AddOptions(usage.Options): local_dir = None synopsis = "LOCAL_DIR" optParameters = [ ("poll-interval", "p", "60", "How often to ask for updates"), ("scan-interval", "s", "60", "Seconds between scans of local changes"), ("name", "n", None, "The name of this magic-folder", str), ("author", "A", None, "Our name for Snapshots authored here", str), ] optFlags = [ ["disable-scanning", None, "Disable scanning for local changes."], ] description = ("Create a new magic-folder.") def parseArgs(self, local_dir=None): if local_dir is None: raise usage.UsageError( "Must specify a single argument: the local directory") self.local_dir = FilePath(local_dir) if not self.local_dir.exists(): raise usage.UsageError("'{}' doesn't exist".format(local_dir)) if not self.local_dir.isdir(): raise usage.UsageError("'{}' isn't a directory".format(local_dir)) def postOptions(self): super(AddOptions, self).postOptions() _fill_author_from_environment(self) if self["name"] is None: raise usage.UsageError("Must specify the --name option") try: valid_magic_folder_name(self['name']) except InvalidMagicFolderName as e: raise usage.UsageError(str(e)) try: self['poll-interval'] = int(self['poll-interval']) if self['poll-interval'] <= 0: raise ValueError("should be positive") except ValueError: raise usage.UsageError( "--poll-interval must be a positive integer") try: self['scan-interval'] = int(self['scan-interval']) if self['scan-interval'] <= 0: raise ValueError("should be positive") except ValueError: raise usage.UsageError( "--scan-interval must be a positive integer")
def walk(self, path): containers = [] filepath = FilePath(path) if filepath.isdir(): containers.append(filepath) elif filepath.isfile(): self.items.append(FilePath(path)) while len(containers)>0: container = containers.pop() try: for child in container.children(): if child.isdir(): containers.append(child) elif child.isfile() or child.islink(): mimetype,_ = mimetypes.guess_type(child.path, strict=False) if mimetype and mimetype.startswith("image/"): self.items.append(child) except UnicodeDecodeError: self.warning("UnicodeDecodeError - there is something wrong with a file located in %r", container.get_path())
def do_urlextract(dest, url): global dsklst dest=FilePath(dest) # Don't do this if not mounted! mntpnt=dsklst['/'].real_mountpoint() if not os.path.ismount(mntpnt): return False if not dest.isdir(): return False try: uh=urllib2.urlopen(url) tf=tarfile.open(mode='r|*',fileobj=uh) os.chroot(mntpnt) os.chdir(os.path.join(dest.dirname(),dest.basename())) tf.extractall() except: traceback.print_exc() os.chdir('/')
def getDirectory(self, path='/'): self.fs = yield FilePath(path) if not self.fs.getPermissions(): defer.returnValue(False) files = [] for f in self.fs.listdir(): if f == '/': continue fp = path+f fs = FilePath(fp) # dont follow symlinks if fs.realpath().path != fp: continue perm = None isdir = fs.isdir() size = fs.getsize() modified = datetime.utcfromtimestamp(fs.getModificationTime()) df = DiscoveredFile( resource_id=self.data['resource_id'], file_path=path, file_name=f, file_isdir=isdir, file_size=size, file_modified=modified, file_perm=perm ) print '[%s] LIST %s.' % (self.data['resource_name'], fp if not fp.endswith('.') else fp) files.append(df) defer.returnValue(files)
logFile = logfile.LogFile("ip_streamer.log", "/tmp") log.startLogging(logFile) #-------------------------------------------------------------------------------- # Read config, exit if no config is found #-------------------------------------------------------------------------------- config = ConfigParser.ConfigParser() path = FilePath('/etc/vice/config.ini') if(FilePath.isfile(path)): config.read('/etc/vice/config.ini') viceIp = config.get('settings','server') vicePort = config.get('settings','port') viceServer = 'http://' + viceIp + ':' + vicePort updatetime = config.get('settings_mumudude','updatetime') tmpdir = FilePath(config.get('settings_mumudude','tmpdir')) if not FilePath.isdir(tmpdir): FilePath.createDirectory(tmpdir) mumudvblogdir = FilePath(config.get('settings_mumudude','mumudvblogdir')) if not FilePath.isdir(mumudvblogdir): FilePath.createDirectory(mumudvblogdir) mumudvbbindir = FilePath(config.get('settings_mumudude','mumudvbbindir')) if not FilePath.isdir(mumudvbbindir): FilePath.createDirectory(mumudvbbindir) else: sys.exit('No config file found, please install /etc/vice/config.ini') #-------------------------------------------------------------------------------- # Calculate uptime #-------------------------------------------------------------------------------- def getUptime(): return time.time() - startTime
class FSItem(BackendItem): logCategory = 'fs_item' def __init__(self, object_id, parent, path, mimetype, urlbase, UPnPClass,update=False): self.id = object_id self.parent = parent if parent: parent.add_child(self,update=update) if mimetype == 'root': self.location = unicode(path) else: if mimetype == 'item' and path is None: path = os.path.join(parent.get_path(),unicode(self.id)) #self.location = FilePath(unicode(path)) self.location = FilePath(path) self.mimetype = mimetype if urlbase[-1] != '/': urlbase += '/' self.url = urlbase + str(self.id) if parent == None: parent_id = -1 else: parent_id = parent.get_id() self.item = UPnPClass(object_id, parent_id, self.get_name()) if isinstance(self.item, Container): self.item.childCount = 0 self.child_count = 0 self.children = [] if mimetype in ['directory','root']: self.update_id = 0 self.get_url = lambda : self.url self.get_path = lambda : None #self.item.searchable = True #self.item.searchClass = 'object' if(isinstance(self.location,FilePath) and self.location.isdir() == True): self.check_for_cover_art() if hasattr(self, 'cover'): _,ext = os.path.splitext(self.cover) """ add the cover image extension to help clients not reacting on the mimetype """ self.item.albumArtURI = ''.join((urlbase,str(self.id),'?cover',ext)) else: self.get_url = lambda : self.url if self.mimetype.startswith('audio/'): if hasattr(parent, 'cover'): _,ext = os.path.splitext(parent.cover) """ add the cover image extension to help clients not reacting on the mimetype """ self.item.albumArtURI = ''.join((urlbase,str(self.id),'?cover',ext)) _,host_port,_,_,_ = urlsplit(urlbase) if host_port.find(':') != -1: host,port = tuple(host_port.split(':')) else: host = host_port try: size = self.location.getsize() except: size = 0 if mimetype != 'item': res = Resource('file://'+ urllib.quote(self.get_path()), 'internal:%s:%s:*' % (host,self.mimetype)) res.size = size self.item.res.append(res) if mimetype != 'item': res = Resource(self.url, 'http-get:*:%s:*' % self.mimetype) else: res = Resource(self.url, 'http-get:*:*:*') res.size = size self.item.res.append(res) """ if this item is an image and we want to add a thumbnail for it we have to follow these rules: create a new Resource object, at least a 'http-get' and maybe an 'internal' one too for an JPG this looks like that res = Resource(url_for_thumbnail, 'http-get:*:image/jpg:%s'% ';'.join(simple_dlna_tags+('DLNA.ORG_PN=JPEG_TN',))) res.size = size_of_thumbnail self.item.res.append(res) and for a PNG the Resource creation is like that res = Resource(url_for_thumbnail, 'http-get:*:image/png:%s'% ';'.join(simple_dlna_tags+('DLNA.ORG_PN=PNG_TN',))) if not hasattr(self.item, 'attachments'): self.item.attachments = {} self.item.attachments[key] = utils.StaticFile(filename_of_thumbnail) """ if self.mimetype in ('image/jpeg', 'image/png'): path = self.get_path() thumbnail = os.path.join(os.path.dirname(path),'.thumbs',os.path.basename(path)) if os.path.exists(thumbnail): mimetype,_ = mimetypes.guess_type(thumbnail, strict=False) if mimetype in ('image/jpeg','image/png'): if mimetype == 'image/jpeg': dlna_pn = 'DLNA.ORG_PN=JPEG_TN' else: dlna_pn = 'DLNA.ORG_PN=PNG_TN' hash_from_path = str(id(thumbnail)) new_res = Resource(self.url+'?attachment='+hash_from_path, 'http-get:*:%s:%s' % (mimetype, ';'.join(simple_dlna_tags+(dlna_pn,)))) new_res.size = os.path.getsize(thumbnail) self.item.res.append(new_res) if not hasattr(self.item, 'attachments'): self.item.attachments = {} self.item.attachments[hash_from_path] = utils.StaticFile(urllib.quote(thumbnail)) try: # FIXME: getmtime is deprecated in Twisted 2.6 self.item.date = datetime.fromtimestamp(self.location.getmtime()) except: self.item.date = None def rebuild(self, urlbase): #print "rebuild", self.mimetype if self.mimetype != 'item': return #print "rebuild for", self.get_path() mimetype,_ = mimetypes.guess_type(self.get_path(),strict=False) if mimetype == None: return self.mimetype = mimetype #print "rebuild", self.mimetype UPnPClass = classChooser(self.mimetype) self.item = UPnPClass(self.id, self.parent.id, self.get_name()) if hasattr(self.parent, 'cover'): _,ext = os.path.splitext(self.parent.cover) """ add the cover image extension to help clients not reacting on the mimetype """ self.item.albumArtURI = ''.join((urlbase,str(self.id),'?cover',ext)) _,host_port,_,_,_ = urlsplit(urlbase) if host_port.find(':') != -1: host,port = tuple(host_port.split(':')) else: host = host_port res = Resource('file://'+urllib.quote(self.get_path()), 'internal:%s:%s:*' % (host,self.mimetype)) try: res.size = self.location.getsize() except: res.size = 0 self.item.res.append(res) res = Resource(self.url, 'http-get:*:%s:*' % self.mimetype) try: res.size = self.location.getsize() except: res.size = 0 self.item.res.append(res) try: # FIXME: getmtime is deprecated in Twisted 2.6 self.item.date = datetime.fromtimestamp(self.location.getmtime()) except: self.item.date = None self.parent.update_id += 1 def check_for_cover_art(self): """ let's try to find in the current directory some jpg file, or png if the jpg search fails, and take the first one that comes around """ try: jpgs = [i.path for i in self.location.children() if i.splitext()[1] in ('.jpg', '.JPG')] try: self.cover = jpgs[0] except IndexError: pngs = [i.path for i in self.location.children() if i.splitext()[1] in ('.png', '.PNG')] try: self.cover = pngs[0] except IndexError: return except UnicodeDecodeError: self.warning("UnicodeDecodeError - there is something wrong with a file located in %r", self.location.path) def remove(self): #print "FSItem remove", self.id, self.get_name(), self.parent if self.parent: self.parent.remove_child(self) del self.item def add_child(self, child, update=False): self.children.append(child) self.child_count += 1 if isinstance(self.item, Container): self.item.childCount += 1 if update == True: self.update_id += 1 def remove_child(self, child): #print "remove_from %d (%s) child %d (%s)" % (self.id, self.get_name(), child.id, child.get_name()) if child in self.children: self.child_count -= 1 if isinstance(self.item, Container): self.item.childCount -= 1 self.children.remove(child) self.update_id += 1 def get_children(self,start=0,request_count=0): if request_count == 0: return self.children[start:] else: return self.children[start:request_count] def get_child_count(self): return self.child_count def get_id(self): return self.id def get_update_id(self): if hasattr(self, 'update_id'): return self.update_id else: return None def get_path(self): if isinstance( self.location,FilePath): return self.location.path else: self.location def set_path(self,path=None,extension=None): if path is None: path = self.get_path() if extension is not None: path,old_ext = os.path.splitext(path) path = ''.join((path,extension)) if isinstance( self.location,FilePath): self.location = FilePath(path) else: self.location = path def get_name(self): if isinstance( self.location,FilePath): name = self.location.basename().decode("utf-8", "replace") else: name = self.location.decode("utf-8", "replace") return name def get_cover(self): try: return self.cover except: try: return self.parent.cover except: return '' def get_parent(self): return self.parent def get_item(self): return self.item def get_xml(self): return self.item.toString() def __repr__(self): return 'id: ' + str(self.id) + ' @ ' + self.get_name().encode('ascii','xmlcharrefreplace')
class DirectoryDeadProperties(object): """ An implementation of a DeadPropertyStore (i.e. store for persistent properties). We store the dead properties in a directory tree _parallel_ to the resource tree. The path to a resource's metadata relative to the root of the metadata directory tree is the same as the path of the resource content relative to the repository root. Every resource's metadata is represented as a directory: every metadata entry is represented as a file pickle in that directory. """ implements(IDeadPropertyStore) def __init__(self, _resource): self.resource = _resource self.metadataPath = FilePath(metadata + os.sep + self.resource.relativePath()) self.__sanitize() def _fileNameFor(self, qname): """ @return: a file name for a property of this resource """ return self.metadataPath.path + os.sep + qname[1] def __sanitize(self): """ The problem we're trying to address here (rather than simply putting it into the constructor) is that seemingly, in twisted, an object's method may be called _before_ the constructor has returned (if the constructor is doing some non-blocking shizzle, that is). So we provide a separate file system sanity check which we call before every call to this that hits the file system, to e.g. ensure that the containing directory exists. """ # perform sanity check: if os.path.exists(self.metadataPath.path): assert self.metadataPath.isdir(), \ "metadata store must be a directory: " + self.metaDataPath.path else: log.info("Creating metadata store for: %s", self.metadataPath.path) os.mkdir(self.metadataPath.path) def get(self, qname): """ @param qname (see twisted.web2.dav.davxml) of the property to look for. """ self.__sanitize() obj = None try: obj = cPickle.load(open(self._fileNameFor(qname))) except (IOError, EOFError, cPickle.PickleError): return None return obj def set(self, property): """ @param property -- an instance of twisted.web2.dav.davxml.WebDAVElement """ self.__sanitize() transaction = SingleFileTransaction() try: f = transaction.open(self._fileNameFor(property.qname()), 'wb') cPickle.dump(property, f) f.close() except cPickle.PickleError, e: transaction.cleanup() log.warn("A problem occured while saving property %s:", property.qname(), exc_info = e) raise else:
class DirDBM: """ A directory with a DBM interface. This class presents a hash-like interface to a directory of small, flat files. It can only use strings as keys or values. """ def __init__(self, name): """ @type name: str @param name: Base path to use for the directory storage. """ self.dname = os.path.abspath(name) self._dnamePath = FilePath(name) if not self._dnamePath.isdir(): self._dnamePath.createDirectory() else: # Run recovery, in case we crashed. we delete all files ending # with ".new". Then we find all files who end with ".rpl". If a # corresponding file exists without ".rpl", we assume the write # failed and delete the ".rpl" file. If only a ".rpl" exist we # assume the program crashed right after deleting the old entry # but before renaming the replacement entry. # # NOTE: '.' is NOT in the base64 alphabet! for f in glob.glob(self._dnamePath.child("*.new").path): os.remove(f) replacements = glob.glob(self._dnamePath.child("*.rpl").path) for f in replacements: old = f[:-4] if os.path.exists(old): os.remove(f) else: os.rename(f, old) def _encode(self, k): """ Encode a key so it can be used as a filename. """ # NOTE: '_' is NOT in the base64 alphabet! return base64.encodestring(k).replace(b'\n', b'_').replace(b"/", b"-") def _decode(self, k): """ Decode a filename to get the key. """ return base64.decodestring(k.replace(b'_', b'\n').replace(b"-", b"/")) def _readFile(self, path): """ Read in the contents of a file. Override in subclasses to e.g. provide transparently encrypted dirdbm. """ with _open(path.path, "rb") as f: s = f.read() return s def _writeFile(self, path, data): """ Write data to a file. Override in subclasses to e.g. provide transparently encrypted dirdbm. """ with _open(path.path, "wb") as f: f.write(data) f.flush() def __len__(self): """ @return: The number of key/value pairs in this Shelf """ return len(self._dnamePath.listdir()) def __setitem__(self, k, v): """ C{dirdbm[k] = v} Create or modify a textfile in this directory @type k: bytes @param k: key to set @type v: bytes @param v: value to associate with C{k} """ if not type(k) == bytes: raise TypeError("DirDBM key must be bytes") if not type(v) == bytes: raise TypeError("DirDBM value must be bytes") k = self._encode(k) # We create a new file with extension .new, write the data to it, and # if the write succeeds delete the old file and rename the new one. old = self._dnamePath.child(k) if old.exists(): new = old.siblingExtension(".rpl") # Replacement entry else: new = old.siblingExtension(".new") # New entry try: self._writeFile(new, v) except: new.remove() raise else: if (old.exists()): old.remove() new.moveTo(old) def __getitem__(self, k): """ C{dirdbm[k]} Get the contents of a file in this directory as a string. @type k: bytes @param k: key to lookup @return: The value associated with C{k} @raise KeyError: Raised when there is no such key """ if not type(k) == bytes: raise TypeError("DirDBM key must be bytes") path = self._dnamePath.child(self._encode(k)) try: return self._readFile(path) except (EnvironmentError): raise KeyError(k) def __delitem__(self, k): """ C{del dirdbm[foo]} Delete a file in this directory. @type k: bytes @param k: key to delete @raise KeyError: Raised when there is no such key """ if not type(k) == bytes: raise TypeError("DirDBM key must be bytes") k = self._encode(k) try: self._dnamePath.child(k).remove() except (EnvironmentError): raise KeyError(self._decode(k)) def keys(self): """ @return: a L{list} of filenames (keys). """ return list(map(self._decode, self._dnamePath.asBytesMode().listdir())) def values(self): """ @return: a L{list} of file-contents (values). """ vals = [] keys = self.keys() for key in keys: vals.append(self[key]) return vals def items(self): """ @return: a L{list} of 2-tuples containing key/value pairs. """ items = [] keys = self.keys() for key in keys: items.append((key, self[key])) return items def has_key(self, key): """ @type key: bytes @param key: The key to test @return: A true value if this dirdbm has the specified key, a false value otherwise. """ if not type(key) == bytes: raise TypeError("DirDBM key must be bytes") key = self._encode(key) return self._dnamePath.child(key).isfile() def setdefault(self, key, value): """ @type key: bytes @param key: The key to lookup @param value: The value to associate with key if key is not already associated with a value. """ if key not in self: self[key] = value return value return self[key] def get(self, key, default=None): """ @type key: bytes @param key: The key to lookup @param default: The value to return if the given key does not exist @return: The value associated with C{key} or C{default} if not L{DirDBM.has_key(key)} """ if key in self: return self[key] else: return default def __contains__(self, key): """ @see: L{DirDBM.has_key} """ return self.has_key(key) def update(self, dict): """ Add all the key/value pairs in L{dict} to this dirdbm. Any conflicting keys will be overwritten with the values from L{dict}. @type dict: mapping @param dict: A mapping of key/value pairs to add to this dirdbm. """ for key, val in dict.items(): self[key] = val def copyTo(self, path): """ Copy the contents of this dirdbm to the dirdbm at C{path}. @type path: L{str} @param path: The path of the dirdbm to copy to. If a dirdbm exists at the destination path, it is cleared first. @rtype: C{DirDBM} @return: The dirdbm this dirdbm was copied to. """ path = FilePath(path) assert path != self._dnamePath d = self.__class__(path.path) d.clear() for k in self.keys(): d[k] = self[k] return d def clear(self): """ Delete all key/value pairs in this dirdbm. """ for k in self.keys(): del self[k] def close(self): """ Close this dbm: no-op, for dbm-style interface compliance. """ def getModificationTime(self, key): """ Returns modification time of an entry. @return: Last modification date (seconds since epoch) of entry C{key} @raise KeyError: Raised when there is no such key """ if not type(key) == bytes: raise TypeError("DirDBM key must be bytes") path = self._dnamePath.child(self._encode(key)) if path.isfile(): return path.getModificationTime() else: raise KeyError(key)
d = defer.succeed(None) d.addCallback(lambda ign: activate_subscribed_service(customer_email, customer_pgpinfo, customer_id, subscription_id, plan_id, signup_stdout, signup_stderr, SSEC2_secretsfile, signup_log_fp.path) ) d.addErrback(errhandler) d.addBoth(lambda ign: signup_logfile.close()) if __name__ == '__main__': defer.setDebugging(True) stdin = sys.stdin logDir = FilePath('../secrets/flappserver_logs') if not logDir.isdir(): make_dirs(logDir.path) flapp_stdout = logDir.child('stdout') flapp_stderr = logDir.child('stderr') d = defer.succeed(None) d.addCallback(lambda ign: main(stdin, flapp_stdout, flapp_stderr)) def _print_except(f): fh = flapp_stderr.open('a+') f.print_stack(file=fh) fh.close() d.addErrback(_print_except) d.addCallbacks(lambda ign: sys.exit(0), lambda ign: sys.exit(8)) try: reactor.run()
def logsCoerce(directory): path = FilePath(directory) if not path.isdir(): raise ValueError("%r is not a directory" % (path.path, )) return path
class FSItem(BackendItem): logCategory = 'fs_item' def __init__( self, object_id, parent, path, mimetype, urlbase, UPnPClass, update=False, store=None, ): BackendItem.__init__(self) self.id = object_id self.parent = parent if parent: parent.add_child(self, update=update) if mimetype == 'root': self.location = str(path) else: if mimetype == 'item' and path is None: path = os.path.join(parent.get_realpath(), str(self.id)) # self.location = FilePath(unicode(path)) self.location = FilePath(path) self.mimetype = mimetype if urlbase[-1] != '/': urlbase += '/' self.url = urlbase + str(self.id) self.store = store if parent is None: parent_id = -1 else: parent_id = parent.get_id() self.item = UPnPClass(object_id, parent_id, self.get_name()) if isinstance(self.item, Container): self.item.childCount = 0 self.child_count = 0 self.children = [] self.sorted = False self.caption = None if mimetype in ['directory', 'root']: self.update_id = 0 self.get_url = lambda: self.url # self.item.searchable = True # self.item.searchClass = 'object' if (isinstance(self.location, FilePath) and self.location.isdir() is True): self.check_for_cover_art() if getattr(self, 'cover', None): _, ext = os.path.splitext(self.cover) ''' add the cover image extension to help clients not reacting on the mimetype ''' self.item.albumArtURI = ''.join( (urlbase, str(self.id), '?cover', str(ext))) else: self.get_url = lambda: self.url if self.mimetype.startswith('audio/'): if getattr(parent, 'cover', None): _, ext = os.path.splitext(parent.cover) ''' add the cover image extension to help clients not reacting on the mimetype ''' self.item.albumArtURI = ''.join( (urlbase, str(self.id), '?cover', ext)) _, host_port, _, _, _ = urlsplit(urlbase) if host_port.find(':') != -1: host, port = tuple(host_port.split(':')) else: host = host_port try: size = self.location.getsize() except Exception: size = 0 if (self.store.server and self.store.server.coherence.config.get( 'transcoding', 'no') == 'yes'): if self.mimetype in ( 'application/ogg', 'audio/ogg', 'audio/x-wav', 'audio/x-m4a', 'application/x-flac', ): new_res = Resource( self.url + '/transcoded.mp3', f'http-get:*:{"audio/mpeg"}:*', ) new_res.size = None # self.item.res.append(new_res) if mimetype != 'item': res = Resource( 'file://' + quote(self.get_path(), encoding='utf-8'), f'internal:{host}:{self.mimetype}:*', ) res.size = size self.item.res.append(res) if mimetype != 'item': res = Resource(self.url, f'http-get:*:{self.mimetype}:*') else: res = Resource(self.url, 'http-get:*:*:*') res.size = size self.item.res.append(res) ''' if this item is of type audio and we want to add a transcoding rule for it, this is the way to do it: create a new Resource object, at least a 'http-get' and maybe an 'internal' one too for transcoding to wav this looks like that res = Resource( url_for_transcoded audio, 'http-get:*:audio/x-wav:%s'% ';'.join( ['DLNA.ORG_PN=JPEG_TN']+simple_dlna_tags)) res.size = None self.item.res.append(res) ''' if (self.store.server and self.store.server.coherence.config.get( 'transcoding', 'no') == 'yes'): if self.mimetype in ( 'audio/mpeg', 'application/ogg', 'audio/ogg', 'audio/x-wav', 'audio/x-m4a', 'audio/flac', 'application/x-flac', ): dlna_pn = 'DLNA.ORG_PN=LPCM' dlna_tags = simple_dlna_tags[:] # dlna_tags[1] = 'DLNA.ORG_OP=00' dlna_tags[2] = 'DLNA.ORG_CI=1' new_res = Resource( self.url + '?transcoded=lpcm', f'http-get:*:{"audio/L16;rate=44100;channels=2"}:' f'{";".join([dlna_pn] + dlna_tags)}', ) new_res.size = None # self.item.res.append(new_res) if self.mimetype != 'audio/mpeg': new_res = Resource( self.url + '?transcoded=mp3', f'http-get:*:{"audio/mpeg"}:*', ) new_res.size = None # self.item.res.append(new_res) ''' if this item is an image and we want to add a thumbnail for it we have to follow these rules: create a new Resource object, at least a 'http-get' and maybe an 'internal' one too for an JPG this looks like that res = Resource(url_for_thumbnail, 'http-get:*:image/jpg:%s'% ';'.join( ['DLNA.ORG_PN=JPEG_TN']+simple_dlna_tags)) res.size = size_of_thumbnail self.item.res.append(res) and for a PNG the Resource creation is like that res = Resource(url_for_thumbnail, 'http-get:*:image/png:%s'% ';'.join( simple_dlna_tags+['DLNA.ORG_PN=PNG_TN'])) if not hasattr(self.item, 'attachments'): self.item.attachments = {} self.item.attachments[key] = utils.StaticFile( filename_of_thumbnail) ''' if (self.mimetype in ('image/jpeg', 'image/png') or self.mimetype.startswith('video/')): try: filename, mimetype, dlna_pn = _find_thumbnail( self.get_path()) except NoThumbnailFound: pass except Exception: self.warning(traceback.format_exc()) else: dlna_tags = simple_dlna_tags[:] dlna_tags[ 3] = 'DLNA.ORG_FLAGS=00f00000000000000000000000000000' hash_from_path = str(id(filename)) new_res = Resource( self.url + '?attachment=' + hash_from_path, f'http-get:*:{mimetype}:' f'{";".join([dlna_pn] + dlna_tags)}', ) new_res.size = os.path.getsize(filename) self.item.res.append(new_res) if not hasattr(self.item, 'attachments'): self.item.attachments = {} self.item.attachments[hash_from_path] = utils.StaticFile( filename) if self.mimetype.startswith('video/'): # check for a subtitles file caption, _ = os.path.splitext(self.get_path()) caption = caption + '.srt' if os.path.exists(caption): hash_from_path = str(id(caption)) mimetype = 'smi/caption' new_res = Resource( self.url + '?attachment=' + hash_from_path, f'http-get:*:{mimetype}:{"*"}', ) new_res.size = os.path.getsize(caption) self.caption = new_res.data self.item.res.append(new_res) if not hasattr(self.item, 'attachments'): self.item.attachments = {} self.item.attachments[hash_from_path] = utils.StaticFile( caption, defaultType=mimetype, ) try: # FIXME: getmtime is deprecated in Twisted 2.6 self.item.date = datetime.fromtimestamp( self.location.getmtime()) except Exception: self.item.date = None def rebuild(self, urlbase): # print('rebuild', self.mimetype) if self.mimetype != 'item': return # print('rebuild for', self.get_path()) mimetype, _ = mimetypes.guess_type(self.get_path(), strict=False) if mimetype is None: return self.mimetype = mimetype # print('rebuild', self.mimetype) UPnPClass = classChooser(self.mimetype) self.item = UPnPClass(self.id, self.parent.id, self.get_name()) if getattr(self.parent, 'cover', None): _, ext = os.path.splitext(self.parent.cover) # add the cover image extension to help # clients not reacting on the mimetype self.item.albumArtURI = ''.join( (urlbase, str(self.id), '?cover', ext)) _, host_port, _, _, _ = urlsplit(urlbase) if host_port.find(':') != -1: host, port = tuple(host_port.split(':')) else: host = host_port res = Resource( 'file://' + quote(self.get_path()), f'internal:{host}:{self.mimetype}:*', ) try: res.size = self.location.getsize() except Exception: res.size = 0 self.item.res.append(res) res = Resource(self.url, f'http-get:*:{self.mimetype}:*') try: res.size = self.location.getsize() except Exception: res.size = 0 self.item.res.append(res) try: # FIXME: getmtime is deprecated in Twisted 2.6 self.item.date = datetime.fromtimestamp(self.location.getmtime()) except Exception: self.item.date = None self.parent.update_id += 1 def check_for_cover_art(self): ''' let's try to find in the current directory some jpg file, or png if the jpg search fails, and take the first one that comes around ''' try: jpgs = [ i.path for i in self.location.children() if i.splitext()[1] in ('.jpg', '.JPG') ] try: self.cover = jpgs[0] except IndexError: pngs = [ i.path for i in self.location.children() if i.splitext()[1] in ('.png', '.PNG') ] try: self.cover = pngs[0] except IndexError: return except UnicodeDecodeError: self.warning( f'UnicodeDecodeError - there is something wrong with a ' + f'file located in {self.location.path}') def remove(self): # print('FSItem remove', self.id, self.get_name(), self.parent) if self.parent: self.parent.remove_child(self) del self.item def add_child(self, child, update=False): self.children.append(child) self.child_count += 1 if isinstance(self.item, Container): self.item.childCount += 1 if update: self.update_id += 1 self.sorted = False def remove_child(self, child): # print(f'remove_from {self.id:d} ({self.get_name()}) ' # f'child {child.id:d} ({child.get_name()})') if child in self.children: self.child_count -= 1 if isinstance(self.item, Container): self.item.childCount -= 1 self.children.remove(child) self.update_id += 1 self.sorted = False def get_children(self, start=0, request_count=0): if not self.sorted: self.children.sort(key=_natural_key) self.sorted = True if request_count == 0: return self.children[start:] else: return self.children[start:request_count] def get_child_count(self): return self.child_count def get_id(self): return self.id def get_update_id(self): if hasattr(self, 'update_id'): return self.update_id else: return None def get_path(self): if self.mimetype in ['directory', 'root']: return None if isinstance(self.location, FilePath): return self.location.path else: return self.location def get_realpath(self): if isinstance(self.location, FilePath): return self.location.path else: return self.location def set_path(self, path=None, extension=None): if path is None: path = self.get_path() if extension is not None: path, old_ext = os.path.splitext(path) path = ''.join((path, extension)) if isinstance(self.location, FilePath): self.location = FilePath(path) else: self.location = path def get_name(self): if isinstance(self.location, FilePath): name = self.location.basename() else: name = self.location return name def get_cover(self): if self.cover: return self.cover try: return self.parent.cover except AttributeError: return None def get_parent(self): return self.parent def get_item(self): return self.item def get_xml(self): return self.item.toString() def __repr__(self): return ('id: ' + str(self.id) + ' @ ' + str(self.get_name().encode('ascii', 'xmlcharrefreplace')))
class DirDBM: """ A directory with a DBM interface. This class presents a hash-like interface to a directory of small, flat files. It can only use strings as keys or values. """ def __init__(self, name): """ @type name: str @param name: Base path to use for the directory storage. """ self.dname = os.path.abspath(name) self._dnamePath = FilePath(name) if not self._dnamePath.isdir(): self._dnamePath.createDirectory() else: # Run recovery, in case we crashed. we delete all files ending # with ".new". Then we find all files who end with ".rpl". If a # corresponding file exists without ".rpl", we assume the write # failed and delete the ".rpl" file. If only a ".rpl" exist we # assume the program crashed right after deleting the old entry # but before renaming the replacement entry. # # NOTE: '.' is NOT in the base64 alphabet! for f in glob.glob(self._dnamePath.child("*.new").path): os.remove(f) replacements = glob.glob(self._dnamePath.child("*.rpl").path) for f in replacements: old = f[:-4] if os.path.exists(old): os.remove(f) else: os.rename(f, old) def _encode(self, k): """ Encode a key so it can be used as a filename. """ # NOTE: '_' is NOT in the base64 alphabet! return base64.encodestring(k).replace(b'\n', b'_').replace(b"/", b"-") def _decode(self, k): """ Decode a filename to get the key. """ return base64.decodestring(k.replace(b'_', b'\n').replace(b"-", b"/")) def _readFile(self, path): """ Read in the contents of a file. Override in subclasses to e.g. provide transparently encrypted dirdbm. """ with _open(path.path, "rb") as f: s = f.read() return s def _writeFile(self, path, data): """ Write data to a file. Override in subclasses to e.g. provide transparently encrypted dirdbm. """ with _open(path.path, "wb") as f: f.write(data) f.flush() def __len__(self): """ @return: The number of key/value pairs in this Shelf """ return len(self._dnamePath.listdir()) def __setitem__(self, k, v): """ C{dirdbm[k] = v} Create or modify a textfile in this directory @type k: bytes @param k: key to set @type v: bytes @param v: value to associate with C{k} """ if not type(k) == bytes: raise TypeError("DirDBM key must be bytes") if not type(v) == bytes: raise TypeError("DirDBM value must be bytes") k = self._encode(k) # We create a new file with extension .new, write the data to it, and # if the write succeeds delete the old file and rename the new one. old = self._dnamePath.child(k) if old.exists(): new = old.siblingExtension(".rpl") # Replacement entry else: new = old.siblingExtension(".new") # New entry try: self._writeFile(new, v) except: new.remove() raise else: if (old.exists()): old.remove() new.moveTo(old) def __getitem__(self, k): """ C{dirdbm[k]} Get the contents of a file in this directory as a string. @type k: bytes @param k: key to lookup @return: The value associated with C{k} @raise KeyError: Raised when there is no such key """ if not type(k) == bytes: raise TypeError("DirDBM key must be bytes") path = self._dnamePath.child(self._encode(k)) try: return self._readFile(path) except (EnvironmentError): raise KeyError(k) def __delitem__(self, k): """ C{del dirdbm[foo]} Delete a file in this directory. @type k: bytes @param k: key to delete @raise KeyError: Raised when there is no such key """ if not type(k) == bytes: raise TypeError("DirDBM key must be bytes") k = self._encode(k) try: self._dnamePath.child(k).remove() except (EnvironmentError): raise KeyError(self._decode(k)) def keys(self): """ @return: a L{list} of filenames (keys). """ return list(map(self._decode, self._dnamePath.asBytesMode().listdir())) def values(self): """ @return: a L{list} of file-contents (values). """ vals = [] keys = self.keys() for key in keys: vals.append(self[key]) return vals def items(self): """ @return: a L{list} of 2-tuples containing key/value pairs. """ items = [] keys = self.keys() for key in keys: items.append((key, self[key])) return items def has_key(self, key): """ @type key: bytes @param key: The key to test @return: A true value if this dirdbm has the specified key, a false value otherwise. """ if not type(key) == bytes: raise TypeError("DirDBM key must be bytes") key = self._encode(key) return self._dnamePath.child(key).isfile() def setdefault(self, key, value): """ @type key: bytes @param key: The key to lookup @param value: The value to associate with key if key is not already associated with a value. """ if key not in self: self[key] = value return value return self[key] def get(self, key, default = None): """ @type key: bytes @param key: The key to lookup @param default: The value to return if the given key does not exist @return: The value associated with C{key} or C{default} if not L{DirDBM.has_key(key)} """ if key in self: return self[key] else: return default def __contains__(self, key): """ @see: L{DirDBM.has_key} """ return self.has_key(key) def update(self, dict): """ Add all the key/value pairs in L{dict} to this dirdbm. Any conflicting keys will be overwritten with the values from L{dict}. @type dict: mapping @param dict: A mapping of key/value pairs to add to this dirdbm. """ for key, val in dict.items(): self[key]=val def copyTo(self, path): """ Copy the contents of this dirdbm to the dirdbm at C{path}. @type path: L{str} @param path: The path of the dirdbm to copy to. If a dirdbm exists at the destination path, it is cleared first. @rtype: C{DirDBM} @return: The dirdbm this dirdbm was copied to. """ path = FilePath(path) assert path != self._dnamePath d = self.__class__(path.path) d.clear() for k in self.keys(): d[k] = self[k] return d def clear(self): """ Delete all key/value pairs in this dirdbm. """ for k in self.keys(): del self[k] def close(self): """ Close this dbm: no-op, for dbm-style interface compliance. """ def getModificationTime(self, key): """ Returns modification time of an entry. @return: Last modification date (seconds since epoch) of entry C{key} @raise KeyError: Raised when there is no such key """ if not type(key) == bytes: raise TypeError("DirDBM key must be bytes") path = self._dnamePath.child(self._encode(key)) if path.isfile(): return path.getModificationTime() else: raise KeyError(key)
def makeService(self, options): dataDir = FilePath(os.path.expanduser(options['data'])) if not dataDir.isdir(): dataDir.makedirs() os.environ['TXCONNECT_DATABASE_NAME'] = dataDir.child('dj.sqlite3').path os.environ['TXCONNECT_TRAFFICLOG_DATABASE_NAME'] = dataDir.child('trafficlog.sqlite3').path from txconnect import dbthread dbthread.setup() # have to import these after dbthread is setup from txconnect.directconnect import search, hub_herder, peer_herder, interfaces from txconnect import plugin_service, queuestore, sharestore, memory, web_service, config from txconnect import downloader_manager, trafficlog, extutils locator = components.ReprableComponentized() txconnectService = service.MultiService() txconnectService.name = 'txconnect' locator.setComponent(service.IServiceCollection, txconnectService) configObj = config.Config(dataDir) locator.setComponent(interfaces.IConfig, configObj) memoryService = memory.MemoryService(locator) memoryService.setServiceParent(txconnectService) locator.setComponent(interfaces.IMessageMemory, memoryService) downloadQueue = queuestore.QueueStore(locator) locator.setComponent(interfaces.IDownloadQueue, downloadQueue) downloaderManager = downloader_manager.DownloaderManager(locator) downloaderManager.setServiceParent(txconnectService) locator.setComponent(interfaces.IDownloaderManager, downloaderManager) fileSource = sharestore.ShareStore(locator) fileSource.setServiceParent(txconnectService) locator.setComponent(interfaces.IFileSource, fileSource) searchHerder = search.SearchHerder(locator) searchHerder.setServiceParent(txconnectService) locator.setComponent(interfaces.ISearchHerder, searchHerder) hubHerder = hub_herder.HubHerder(locator) hubHerder.setServiceParent(txconnectService) locator.setComponent(interfaces.IHubHerder, hubHerder) peerHerder = peer_herder.PeerHerder(locator) peerHerder.setServiceParent(txconnectService) locator.setComponent(interfaces.IPeerHerder, peerHerder) trafficLogger = trafficlog.TrafficLogger(locator) trafficLogger.setServiceParent(txconnectService) webService = web_service.make_service(locator) webService.setServiceParent(txconnectService) extUtils = extutils.ExtUtilsService() extUtils.setServiceParent(txconnectService) locator.setComponent(interfaces.IHasher, extUtils) locator.setComponent(interfaces.IDirLister, extUtils) if configObj.has_key('manhole') and configObj['manhole'].has_key('port'): from twisted.conch import manhole_tap manholeService = manhole_tap.makeService({ 'telnetPort': str(configObj['manhole']['port']), 'namespace': {'locator': locator}, 'passwd': 'passwd', 'sshPort': None, }) manholeService.setName('manhole') manholeService.setServiceParent(txconnectService) plugins = plugin_service.PluginService(locator) plugins.setServiceParent(txconnectService) return txconnectService
class DropUploader(service.MultiService): name = 'drop-upload' def __init__(self, client, upload_dircap, local_dir_utf8, inotify=None): service.MultiService.__init__(self) try: local_dir_u = abspath_expanduser_unicode(local_dir_utf8.decode('utf-8')) if sys.platform == "win32": local_dir = local_dir_u else: local_dir = local_dir_u.encode(get_filesystem_encoding()) except (UnicodeEncodeError, UnicodeDecodeError): raise AssertionError("The '[drop_upload] local.directory' parameter %s was not valid UTF-8 or " "could not be represented in the filesystem encoding." % quote_output(local_dir_utf8)) self._client = client self._stats_provider = client.stats_provider self._convergence = client.convergence self._local_path = FilePath(local_dir) if inotify is None: from twisted.internet import inotify self._inotify = inotify if not self._local_path.exists(): raise AssertionError("The '[drop_upload] local.directory' parameter was %s but there is no directory at that location." % quote_output(local_dir_u)) if not self._local_path.isdir(): raise AssertionError("The '[drop_upload] local.directory' parameter was %s but the thing at that location is not a directory." % quote_output(local_dir_u)) # TODO: allow a path rather than a cap URI. self._parent = self._client.create_node_from_uri(upload_dircap) if not IDirectoryNode.providedBy(self._parent): raise AssertionError("The '[drop_upload] upload.dircap' parameter does not refer to a directory.") if self._parent.is_unknown() or self._parent.is_readonly(): raise AssertionError("The '[drop_upload] upload.dircap' parameter is not a writecap to a directory.") self._uploaded_callback = lambda ign: None self._notifier = inotify.INotify() # We don't watch for IN_CREATE, because that would cause us to read and upload a # possibly-incomplete file before the application has closed it. There should always # be an IN_CLOSE_WRITE after an IN_CREATE (I think). # TODO: what about IN_MOVE_SELF or IN_UNMOUNT? mask = inotify.IN_CLOSE_WRITE | inotify.IN_MOVED_TO | inotify.IN_ONLYDIR self._notifier.watch(self._local_path, mask=mask, callbacks=[self._notify]) def startService(self): service.MultiService.startService(self) d = self._notifier.startReading() self._stats_provider.count('drop_upload.dirs_monitored', 1) return d def _notify(self, opaque, path, events_mask): self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join(self._inotify.humanReadableMask(events_mask)))) self._stats_provider.count('drop_upload.files_queued', 1) eventually(self._process, opaque, path, events_mask) def _process(self, opaque, path, events_mask): d = defer.succeed(None) # FIXME: if this already exists as a mutable file, we replace the directory entry, # but we should probably modify the file (as the SFTP frontend does). def _add_file(ign): name = path.basename() # on Windows the name is already Unicode if not isinstance(name, unicode): name = name.decode(get_filesystem_encoding()) u = FileName(path.path, self._convergence) return self._parent.add_file(name, u) d.addCallback(_add_file) def _succeeded(ign): self._stats_provider.count('drop_upload.files_queued', -1) self._stats_provider.count('drop_upload.files_uploaded', 1) def _failed(f): self._stats_provider.count('drop_upload.files_queued', -1) if path.exists(): self._log("drop-upload: %r failed to upload due to %r" % (path.path, f)) self._stats_provider.count('drop_upload.files_failed', 1) return f else: self._log("drop-upload: notified file %r disappeared " "(this is normal for temporary files): %r" % (path.path, f)) self._stats_provider.count('drop_upload.files_disappeared', 1) return None d.addCallbacks(_succeeded, _failed) d.addBoth(self._uploaded_callback) return d def set_uploaded_callback(self, callback): """This sets a function that will be called after a file has been uploaded.""" self._uploaded_callback = callback def finish(self, for_tests=False): self._notifier.stopReading() self._stats_provider.count('drop_upload.dirs_monitored', -1) if for_tests and hasattr(self._notifier, 'wait_until_stopped'): return self._notifier.wait_until_stopped() else: return defer.succeed(None) def _log(self, msg): self._client.log(msg)
class DropUploader(service.MultiService): name = 'drop-upload' def __init__(self, client, upload_dircap, local_dir_utf8, inotify=None): service.MultiService.__init__(self) try: local_dir_u = abspath_expanduser_unicode( local_dir_utf8.decode('utf-8')) if sys.platform == "win32": local_dir = local_dir_u else: local_dir = local_dir_u.encode(get_filesystem_encoding()) except (UnicodeEncodeError, UnicodeDecodeError): raise AssertionError( "The '[drop_upload] local.directory' parameter %s was not valid UTF-8 or " "could not be represented in the filesystem encoding." % quote_output(local_dir_utf8)) self._client = client self._stats_provider = client.stats_provider self._convergence = client.convergence self._local_path = FilePath(local_dir) if inotify is None: from twisted.internet import inotify self._inotify = inotify if not self._local_path.exists(): raise AssertionError( "The '[drop_upload] local.directory' parameter was %s but there is no directory at that location." % quote_output(local_dir_u)) if not self._local_path.isdir(): raise AssertionError( "The '[drop_upload] local.directory' parameter was %s but the thing at that location is not a directory." % quote_output(local_dir_u)) # TODO: allow a path rather than a cap URI. self._parent = self._client.create_node_from_uri(upload_dircap) if not IDirectoryNode.providedBy(self._parent): raise AssertionError( "The '[drop_upload] upload.dircap' parameter does not refer to a directory." ) if self._parent.is_unknown() or self._parent.is_readonly(): raise AssertionError( "The '[drop_upload] upload.dircap' parameter is not a writecap to a directory." ) self._uploaded_callback = lambda ign: None self._notifier = inotify.INotify() # We don't watch for IN_CREATE, because that would cause us to read and upload a # possibly-incomplete file before the application has closed it. There should always # be an IN_CLOSE_WRITE after an IN_CREATE (I think). # TODO: what about IN_MOVE_SELF or IN_UNMOUNT? mask = inotify.IN_CLOSE_WRITE | inotify.IN_MOVED_TO | inotify.IN_ONLYDIR self._notifier.watch(self._local_path, mask=mask, callbacks=[self._notify]) def startService(self): service.MultiService.startService(self) d = self._notifier.startReading() self._stats_provider.count('drop_upload.dirs_monitored', 1) return d def _notify(self, opaque, path, events_mask): self._log("inotify event %r, %r, %r\n" % (opaque, path, ', '.join( self._inotify.humanReadableMask(events_mask)))) self._stats_provider.count('drop_upload.files_queued', 1) eventually(self._process, opaque, path, events_mask) def _process(self, opaque, path, events_mask): d = defer.succeed(None) # FIXME: if this already exists as a mutable file, we replace the directory entry, # but we should probably modify the file (as the SFTP frontend does). def _add_file(ign): name = path.basename() # on Windows the name is already Unicode if not isinstance(name, unicode): name = name.decode(get_filesystem_encoding()) u = FileName(path.path, self._convergence) return self._parent.add_file(name, u) d.addCallback(_add_file) def _succeeded(ign): self._stats_provider.count('drop_upload.files_queued', -1) self._stats_provider.count('drop_upload.files_uploaded', 1) def _failed(f): self._stats_provider.count('drop_upload.files_queued', -1) if path.exists(): self._log("drop-upload: %r failed to upload due to %r" % (path.path, f)) self._stats_provider.count('drop_upload.files_failed', 1) return f else: self._log("drop-upload: notified file %r disappeared " "(this is normal for temporary files): %r" % (path.path, f)) self._stats_provider.count('drop_upload.files_disappeared', 1) return None d.addCallbacks(_succeeded, _failed) d.addBoth(self._uploaded_callback) return d def set_uploaded_callback(self, callback): """This sets a function that will be called after a file has been uploaded.""" self._uploaded_callback = callback def finish(self, for_tests=False): self._notifier.stopReading() self._stats_provider.count('drop_upload.dirs_monitored', -1) if for_tests and hasattr(self._notifier, 'wait_until_stopped'): return self._notifier.wait_until_stopped() else: return defer.succeed(None) def _log(self, msg): self._client.log(msg)
def logsCoerce(directory): path = FilePath(directory) if not path.isdir(): raise ValueError("%r is not a directory" % (path.path,)) return path