Ejemplo n.º 1
0
    def __init__(self, username, bucket, obj, mode):
        self.username = username
        self.bucket = bucket
        self.name = obj
        self.mode = mode
        self.closed = False
        self.total_size = 0
        self.temp_file_path = None
        self.temp_file = None
        ftpserver.log("Creating FaetusFD(%s,%s,%s,%s)" %(username, bucket, obj, mode))
        
        if not all([username, bucket, obj]):
            self.closed = True
            raise IOError(1, 'Operation not permitted')

        try:
            self.bucket = operations.connection.get_bucket(self.bucket)
        except:
            raise IOError(2, 'No such file or directory')

        if 'r' in self.mode:
            try:
                self.obj = self.bucket.get_key(self.name)
            except:
                raise IOError(2, 'No such file or directory')
        else: #write
            self.obj = self.bucket.get_key(self.name)
            if not self.obj:
                # key does not exist, create it
                self.obj = self.bucket.new_key(self.name)
            # create a temporary file
            self.temp_file_path = tempfile.mkstemp()[1]
            self.temp_file = open(self.temp_file_path, 'w')
Ejemplo n.º 2
0
    def __init__(self, username, bucket, obj, mode):
        self.username = username
        self.bucket = bucket
        self.name = obj
        self.mode = mode
        self.closed = False
        self.total_size = 0
        self.temp_file_path = None
        self.temp_file = None
        ftpserver.log("Creating FaetusFD(%s,%s,%s,%s)" %
                      (username, bucket, obj, mode))

        if not all([username, bucket, obj]):
            self.closed = True
            raise IOError(1, 'Operation not permitted')

        try:
            self.bucket = operations.connection.get_bucket(self.bucket)
        except:
            raise IOError(2, 'No such file or directory')

        if 'r' in self.mode:
            try:
                self.obj = self.bucket.get_key(self.name)
            except:
                raise IOError(2, 'No such file or directory')
        else:  #write
            self.obj = self.bucket.get_key(self.name)
            if not self.obj:
                # key does not exist, create it
                self.obj = self.bucket.new_key(self.name)
            # create a temporary file
            self.temp_file_path = tempfile.mkstemp()[1]
            self.temp_file = open(self.temp_file_path, 'w')
Ejemplo n.º 3
0
 def parse_fspath(self, path):
     '''Returns a (username, site, filename) tuple. For shorter paths
     replaces not-provided values with empty strings.
     '''
     ftpserver.log("parse_fspath(%s)" % (path))
     if not path.startswith(ftp_sep):
         raise ValueError('parse_fspath: You have to provide a full path, not %s'  % path)
     parts = path.split(ftp_sep)[1:]
     if len(parts) > 3:
         # join extra 'directories' into key
         # Conveting os.sep (which was unfortunately introduced by pyftpdlib)
         # to cloud_sep
         parts = parts[0], parts[1], cloud_sep.join(parts[2:])
     while len(parts) < 3:
         parts.append('')
     return tuple(parts)
Ejemplo n.º 4
0
 def parse_fspath(self, path):
     '''Returns a (username, site, filename) tuple. For shorter paths
     replaces not-provided values with empty strings.
     '''
     ftpserver.log("parse_fspath(%s)" % (path))
     if not path.startswith(ftp_sep):
         raise ValueError(
             'parse_fspath: You have to provide a full path, not %s' % path)
     parts = path.split(ftp_sep)[1:]
     if len(parts) > 3:
         # join extra 'directories' into key
         # Conveting os.sep (which was unfortunately introduced by pyftpdlib)
         # to cloud_sep
         parts = parts[0], parts[1], cloud_sep.join(parts[2:])
     while len(parts) < 3:
         parts.append('')
     return tuple(parts)
Ejemplo n.º 5
0
 def transform_username(self, username):
     ftpserver.log("transforming username %s" % (username))
     if (self.username_transform_map.has_key(username)):
         username = self.username_transform_map[username]
     ftpserver.log("transformed username to %s" % (username))
     return username
Ejemplo n.º 6
0
 def get_home_dir(self, username):
     ftpserver.log("get_home_dir(%s(" % (username))
     return ftp_sep + self.transform_username(username)
Ejemplo n.º 7
0
 def transform_username(self, username):
     ftpserver.log("transforming username %s" % (username))
     if (self.username_transform_map.has_key(username)):
         username = self.username_transform_map[username]
     ftpserver.log("transformed username to %s" % (username))
     return username
Ejemplo n.º 8
0
 def get_home_dir(self, username):
     ftpserver.log("get_home_dir(%s(" % (username))
     return ftp_sep + self.transform_username(username)
Ejemplo n.º 9
0
            if addr is None:
                return

        handler = None
        ip = None
        try:
            """
            *********************
            handler = StreamHandler, which specifies stream_rate for the overall
            tcp connection.
            *********************
            """
            handler = self.handler(sock, self, len(self.handlers), self.stream_rate)
            if not handler.connected:
                return
            ftpserver.log("[]%s:%s Connected." % addr[:2])
            ip = addr[0]
            self.ip_map.append(ip)

            # For performance and security reasons we should always set a
            # limit for the number of file descriptors that socket_map
            # should contain.  When we're running out of such limit we'll
            # use the last available channel for sending a 421 response
            # to the client before disconnecting it.
            if self.max_cons and (len(asyncore.socket_map) > self.max_cons):
                print "Connection accepted for max_cons"
                sys.stderr.write('ERROR: Connection accepted for max_cons')
                handler.handle_max_cons()
                return

            # accept only a limited number of connections from the same