def upload(fileobj, filename): cfg = Config() cfg.read_config_file(os.path.join(os.getenv("HOME"), ".s3cfg")) cfg.progress_meter = False cfg.acl_public = True s3 = S3(cfg) headers = SortedDict(ignore_case=True) headers["x-amz-acl"] = "public-read" headers["x-amz-storage-class"] = "REDUCED_REDUNDANCY" remote_uri = S3Uri(base_uri + filename) fileobj.seek(0, 2) # seek to end size = fileobj.tell() fileobj.seek(0) # seek to start response = s3.send_file_multipart(fileobj, headers, remote_uri, size) assert response['status'] == 200 return remote_uri.public_url()
def parse_plugin_definition(self, context, plugindef): ''' Parses the plugin argmuents and reads files from file given by argument 'config' ''' super(BareosFdPluginS3, self).parse_plugin_definition( context, plugindef) if ('config' not in self.options): bareosfd.DebugMessage(context, 100, "Option \'config\' not defined.\n") return bRCs['bRC_Error'] if ('bucket' not in self.options): bareosfd.DebugMessage(context, 100, "Option \'bucket\' not defined.\n") return bRCs['bRC_Error'] bareosfd.DebugMessage(context, 100, "Using %s to look up plugin config\n" % (self.options['config'])) if os.path.exists(self.options['config']): try: cfg = Config(self.options['config']) #config_file = open(self.options['config'], 'rb') except: bareosfd.DebugMessage(context, 100, "Could not open file %s\n" % (self.options['config'])) return bRCs['bRC_Error'] else: bareosfd.DebugMessage(context, 100, "File %s does not exist\n" % (self.options['config'])) return bRCs['bRC_Error'] if ('prefix' not in self.options): self.options['prefix'] = None if ('pattern' in self.options): self.pattern = re.compile(self.options['pattern']) else: self.pattern = None self.files_to_backup = [] self.s3 = S3(cfg) self.prefix_list = [ None ] self.file_iterator = {} self.file_iterator['uri_params'] = None if self.pattern: self.make_prefix_list() self.iterate_files() return bRCs['bRC_OK']
def connect(self): """ Establishes the actual connection to the referred RSE. :param: credentials needed to establish a connection with the stroage. :raises RSEAccessDenied: if no connection could be established. """ try: cfg = Config() for k in self.rse['credentials']: cfg.update_option(k.encode('utf-8'), self.rse['credentials'][k].encode('utf-8')) self.__s3 = S3(cfg) except Exception as e: raise exception.RSEAccessDenied(e)
def cmd_bucket_create(args): #bucket, object = check_uri(args) print 'dentro al cmd create' s3 = S3(Config()) for arg in args: uri = S3Uri(arg) if not uri.type == "s3" or not uri.has_bucket() or uri.has_object(): raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % arg) try: print 'dentro al try, provo il create' response = s3.bucket_create(uri.bucket(), Config().bucket_location) print 'ho la response' output(u"Bucket '%s' created" % uri.uri()) except S3Error, e: if S3.codes.has_key(e.info["Code"]): error(S3.codes[e.info["Code"]] % uri.bucket()) return else: raise
def __init__(self, config): self._config = config self._s3config = S3Config() access_key = config.get('access_key', False) if access_key: self._s3config.update_option('access_key', access_key) secret_key = config.get('secret_key', False) if secret_key: self._s3config.update_option('secret_key', secret_key) host_base = config.get('host_base', False) if host_base: self._s3config.update_option('host_base', host_base) host_bucket = config.get('host_bucket', False) if host_bucket: self._s3config.update_option('host_bucket', host_bucket) use_https = config.get('use_https', True) self._s3config.update_option('use_https', use_https) signature = config.get('signature_v2', True) self._s3config.update_option('signature_v2', signature) self._s3 = S3(self._s3config)