def for_key(self, key, params=None, bucket_name=None): if params: self.update(params) if key.path: t = os.path.split(key.path) self['OriginalLocation'] = t[0] self['OriginalFileName'] = t[1] mime_type = mimetypes.guess_type(t[1])[0] if mime_type is None: mime_type = 'application/octet-stream' self['Content-Type'] = mime_type s = os.stat(key.path) t = time.gmtime(s[7]) self['FileAccessedDate'] = get_ts(t) t = time.gmtime(s[8]) self['FileModifiedDate'] = get_ts(t) t = time.gmtime(s[9]) self['FileCreateDate'] = get_ts(t) else: self['OriginalFileName'] = key.name self['OriginalLocation'] = key.bucket.name self['ContentType'] = key.content_type self['Host'] = gethostname() if bucket_name: self['Bucket'] = bucket_name else: self['Bucket'] = key.bucket.name self['InputKey'] = key.name self['Size'] = key.size
def __init__(self, config_file=None): self.instance_id = boto.config.get('Instance', 'instance-id', 'default') self.name = self.__class__.__name__ self.ts = get_ts() if config_file: boto.config.read(config_file)
def _save(self, name, content): cleaned_name = self._clean_name(name) name = self._normalize_name(cleaned_name) headers = self.headers.copy() content_type = getattr( content, 'content_type', mimetypes.guess_type(name)[0] or Key.DefaultContentType) if self.gzip and content_type in self.gzip_content_types: content = self._compress_content(content) headers.update({'Content-Encoding': 'gzip'}) content.name = cleaned_name encoded_name = self._encode_name(name) key = self.bucket.get_key(encoded_name) if not key: key = self.bucket.new_key(encoded_name) key.set_metadata('Content-Type', content_type) key.set_contents_from_file(content, headers=headers, policy=self.acl, reduced_redundancy=self.reduced_redundancy) if self.preload_metadata: #self._cache_set(encoded_name, key) # The above doesn't work because 'last_modified' doesn't get updated # when boto does an S3 PUT request :-( so instead, we fake it: self._cache_set(encoded_name, CachedKey(key.size, get_ts())) return cleaned_name
def _save(self, name, content): cleaned_name = self._clean_name(name) name = self._normalize_name(cleaned_name) headers = self.headers.copy() content_type = getattr(content, 'content_type', mimetypes.guess_type(name)[0] or Key.DefaultContentType) if self.gzip and content_type in self.gzip_content_types: content = self._compress_content(content) headers.update({'Content-Encoding': 'gzip'}) content.name = cleaned_name encoded_name = self._encode_name(name) key = self.bucket.get_key(encoded_name) if not key: key = self.bucket.new_key(encoded_name) key.set_metadata('Content-Type', content_type) key.set_contents_from_file(content, headers=headers, policy=self.acl, reduced_redundancy=self.reduced_redundancy) if self.preload_metadata: #self._cache_set(encoded_name, key) # The above doesn't work because 'last_modified' doesn't get updated # when boto does an S3 PUT request :-( so instead, we fake it: self._cache_set(encoded_name, CachedKey(key.size, get_ts())) return cleaned_name
def read_message(self): boto.log.info('read_message') message = self.input_queue.read(self.processing_time) if message: boto.log.info(message.get_body()) key = 'Service-Read' message[key] = get_ts() return message
def write_message(self, message): message['Service-Write'] = get_ts() message['Server'] = self.name if os.environ.has_key('HOSTNAME'): message['Host'] = os.environ['HOSTNAME'] else: message['Host'] = 'unknown' message['Instance-ID'] = self.instance_id if self.output_queue: boto.log.info('Writing message to SQS queue: %s' % self.output_queue.id) self.output_queue.write(message) if self.output_domain: boto.log.info('Writing message to SDB domain: %s' % self.output_domain.name) item_name = '/'.join([message['Service-Write'], message['Bucket'], message['InputKey']]) self.output_domain.put_attributes(item_name, message)
def __init__(self, config_file=None): self.instance_id = boto.config.get("Instance", "instance-id", "default") self.name = self.__class__.__name__ self.ts = get_ts() if config_file: boto.config.read(config_file)