Beispiel #1
0
    def check_src_path(self, paths):
        """
        This checks the source paths to deem if they are valid.  The check
        performed in S3 is first it lists the objects using the source path.
        If there is an error like the bucket does not exist, the error will be
        caught with ``check_error()`` funciton.  If the operation is on a
        single object in s3, it checks that a list of object was returned and
        that the first object listed is the name of the specified in the
        command line.  If the operation is on objects under a common prefix,
        it will check that there are common prefixes and objects under
        the specified prefix.
        For local files, it first checks that the path exists.  Then it checks
        that the path is a directory if it is a directory operation or that
        the path is a file if the operation is on a single file.
        """
        src_path = paths[0]
        dir_op = self.parameters['dir_op']
        if src_path.startswith('s3://'):
            if self.cmd in ['ls', 'mb', 'rb']:
                return
            session = self.session
            service = session.get_service('s3')
            endpoint = service.get_endpoint(self.parameters['region'])
            src_path = src_path[5:]
            if dir_op:
                if not src_path.endswith('/'):
                    src_path += '/'  # all prefixes must end with a /
            bucket, key = find_bucket_key(src_path)
            operation = service.get_operation('ListObjects')
            html_response, response_data = operation.call(endpoint,
                                                          bucket=bucket,
                                                          prefix=key,
                                                          delimiter='/')
            check_error(response_data)
            contents = response_data['Contents']
            common_prefixes = response_data['CommonPrefixes']
            if not dir_op:
                if contents:
                    if contents[0]['Key'] == key:
                        pass
                    else:
                        raise Exception("Error: S3 Object does not exist")
                else:
                    raise Exception('Error: S3 Object does not exist')
            else:
                if not contents and not common_prefixes:
                    raise Exception('Error: S3 Prefix does not exist')

        else:
            src_path = os.path.abspath(src_path)
            if os.path.exists(src_path):
                if os.path.isdir(src_path) and not dir_op:
                    raise Exception("Error: Requires a local file")
                elif os.path.isfile(src_path) and dir_op:
                    raise Exception("Error: Requires a local directory")
                else:
                    pass
            else:
                raise Exception("Error: Local path does not exist")
Beispiel #2
0
    def check_src_path(self, paths):
        """
        This checks the source paths to deem if they are valid.  The check
        performed in S3 is first it lists the objects using the source path.
        If there is an error like the bucket does not exist, the error will be
        caught with ``check_error()`` funciton.  If the operation is on a
        single object in s3, it checks that a list of object was returned and
        that the first object listed is the name of the specified in the
        command line.  If the operation is on objects under a common prefix,
        it will check that there are common prefixes and objects under
        the specified prefix.
        For local files, it first checks that the path exists.  Then it checks
        that the path is a directory if it is a directory operation or that
        the path is a file if the operation is on a single file.
        """
        src_path = paths[0]
        dir_op = self.parameters['dir_op']
        if src_path.startswith('s3://'):
            if self.cmd in ['ls', 'mb', 'rb']:
                return
            session = self.session
            service = session.get_service('s3')
            endpoint = service.get_endpoint(self.parameters['region'])
            src_path = src_path[5:]
            if dir_op:
                if not src_path.endswith('/'):
                    src_path += '/'  # all prefixes must end with a /
            bucket, key = find_bucket_key(src_path)
            operation = service.get_operation('ListObjects')
            html_response, response_data = operation.call(endpoint,
                                                          bucket=bucket,
                                                          prefix=key,
                                                          delimiter='/')
            check_error(response_data)
            contents = response_data['Contents']
            common_prefixes = response_data['CommonPrefixes']
            if not dir_op:
                if contents:
                    if contents[0]['Key'] == key:
                        pass
                    else:
                        raise Exception("Error: S3 Object does not exist")
                else:
                    raise Exception('Error: S3 Object does not exist')
            else:
                if not contents and not common_prefixes:
                    raise Exception('Error: S3 Prefix does not exist')

        else:
            src_path = os.path.abspath(src_path)
            if os.path.exists(src_path):
                if os.path.isdir(src_path) and not dir_op:
                    raise Exception("Error: Requires a local file")
                elif os.path.isfile(src_path) and dir_op:
                    raise Exception("Error: Requires a local directory")
                else:
                    pass
            else:
                raise Exception("Error: Local path does not exist")
Beispiel #3
0
 def list_objects(self):
     """
     List all of the buckets if no bucket is specified.  List the objects
     and common prefixes under a specified prefix.
     """
     bucket, key = find_bucket_key(self.src)
     if bucket == '':
         operation = self.service.get_operation('ListBuckets')
         html_response, response_data = operation.call(self.endpoint)
         header_str = "CreationTime".rjust(19, ' ')
         header_str = header_str + ' ' + "Bucket"
         underline_str = "------------".rjust(19, ' ')
         underline_str = underline_str + ' ' + "------"
         sys.stdout.write("\n%s\n" % header_str)
         sys.stdout.write("%s\n" % underline_str)
         buckets = response_data['Buckets']
         for bucket in buckets:
             last_mod_str = make_last_mod_str(bucket['CreationDate'])
             print_str = last_mod_str + ' ' + bucket['Name'] + '\n'
             uni_print(print_str)
             sys.stdout.flush()
     else:
         operation = self.service.get_operation('ListObjects')
         iterator = operation.paginate(self.endpoint,
                                       bucket=bucket,
                                       prefix=key,
                                       delimiter='/')
         sys.stdout.write("\nBucket: %s\n" % bucket)
         sys.stdout.write("Prefix: %s\n\n" % key)
         header_str = "LastWriteTime".rjust(19, ' ')
         header_str = header_str + ' ' + "Length".rjust(10, ' ')
         header_str = header_str + ' ' + "Name"
         underline_str = "-------------".rjust(19, ' ')
         underline_str = underline_str + ' ' + "------".rjust(10, ' ')
         underline_str = underline_str + ' ' + "----"
         sys.stdout.write("%s\n" % header_str)
         sys.stdout.write("%s\n" % underline_str)
         for html_response, response_data in iterator:
             check_error(response_data)
             common_prefixes = response_data['CommonPrefixes']
             contents = response_data['Contents']
             for common_prefix in common_prefixes:
                 prefix_components = common_prefix['Prefix'].split('/')
                 prefix = prefix_components[-2]
                 pre_string = "PRE".rjust(30, " ")
                 print_str = pre_string + ' ' + prefix + '/\n'
                 uni_print(print_str)
                 sys.stdout.flush()
             for content in contents:
                 last_mod_str = make_last_mod_str(content['LastModified'])
                 size_str = make_size_str(content['Size'])
                 filename_components = content['Key'].split('/')
                 filename = filename_components[-1]
                 print_str = last_mod_str + ' ' + size_str + ' ' + \
                     filename + '\n'
                 uni_print(print_str)
                 sys.stdout.flush()
Beispiel #4
0
 def list_objects(self):
     """
     List all of the buckets if no bucket is specified.  List the objects
     and common prefixes under a specified prefix.
     """
     bucket, key = find_bucket_key(self.src)
     if bucket == '':
         operation = self.service.get_operation('ListBuckets')
         html_response, response_data = operation.call(self.endpoint)
         header_str = "CreationTime".rjust(19, ' ')
         header_str = header_str + ' ' + "Bucket"
         underline_str = "------------".rjust(19, ' ')
         underline_str = underline_str + ' ' + "------"
         sys.stdout.write("\n%s\n" % header_str)
         sys.stdout.write("%s\n" % underline_str)
         buckets = response_data['Buckets']
         for bucket in buckets:
             last_mod_str = make_last_mod_str(bucket['CreationDate'])
             print_str = last_mod_str + ' ' + bucket['Name'] + '\n'
             uni_print(print_str)
             sys.stdout.flush()
     else:
         operation = self.service.get_operation('ListObjects')
         iterator = operation.paginate(self.endpoint, bucket=bucket,
                                       prefix=key, delimiter='/')
         sys.stdout.write("\nBucket: %s\n" % bucket)
         sys.stdout.write("Prefix: %s\n\n" % key)
         header_str = "LastWriteTime".rjust(19, ' ')
         header_str = header_str + ' ' + "Length".rjust(10, ' ')
         header_str = header_str + ' ' + "Name"
         underline_str = "-------------".rjust(19, ' ')
         underline_str = underline_str + ' ' + "------".rjust(10, ' ')
         underline_str = underline_str + ' ' + "----"
         sys.stdout.write("%s\n" % header_str)
         sys.stdout.write("%s\n" % underline_str)
         for html_response, response_data in iterator:
             check_error(response_data)
             common_prefixes = response_data['CommonPrefixes']
             contents = response_data['Contents']
             for common_prefix in common_prefixes:
                 prefix_components = common_prefix['Prefix'].split('/')
                 prefix = prefix_components[-2]
                 pre_string = "PRE".rjust(30, " ")
                 print_str = pre_string + ' ' + prefix + '/\n'
                 uni_print(print_str)
                 sys.stdout.flush()
             for content in contents:
                 last_mod_str = make_last_mod_str(content['LastModified'])
                 size_str = make_size_str(content['Size'])
                 filename_components = content['Key'].split('/')
                 filename = filename_components[-1]
                 print_str = last_mod_str + ' ' + size_str + ' ' + \
                     filename + '\n'
                 uni_print(print_str)
                 sys.stdout.flush()