def connect(self): config_xml = s3path.s3path().pbconfig config = xml.etree.ElementTree.parse(config_xml) host = config.find('database-hostname').text db = config.find('database-name').text user = config.find('database-username').text passwd = 'dummypassword' # retry the connect because mysql server at IA sometimes causes this # exception: # OperationalError: (2013, # "Lost connection to MySQL server at 'reading authorization packet',system error: 0") # # this appears to be related to a config problem with mysqld and a loaded web server. # see: http://bugs.mysql.com/bug.php?id=28359 try_count = 1 max_tries = 5 self.conn = None while (not self.conn) and (try_count <= max_tries): try: try_count = try_count + 1 self.conn = MySQLdb.connect( host=host, db=db, user=user, passwd=passwd, ) except MySQLdb.OperationalError, e: if try_count > max_tries: raise e
def bucket_in_host_header(self): if self.headers.has_key('host'): host = self.headers['host'] for regex in s3path.s3path().dns_bucket_regexs: m = re.match(regex, host) if m: return m.group('bucket') return ''
def __init__(self, uri, log_function=fshandler.FilesystemHandler._log_internal, verbose=False): self.item_dirs = [] for n in range(0,23): self.item_dirs.append('/%d/items' % n) fshandler.FilesystemHandler.__init__(self, self.item_dirs[0], uri, log_function, verbose) #for writes, should we sidestep contrib-submit, and write directly to the filesystem self.use_fs_directly = False p = s3path.s3path() self.petabox_path = p.petabox self.ias3_path = p.s3
def get_bucket_host(self, bucket): try: pipe = subprocess.Popen([ self.petabox_path + "/sw/bin/find_item.php", bucket, ], stdout=subprocess.PIPE) output = pipe.communicate()[0] if not (pipe.wait() == 0): raise S3_Error("NoSuchBucket", ("%s not found by find_item.php" % bucket)) except OSError: raise S3_Error locate_out = output.rstrip("\n") (host, path) = locate_out.split(':') #iaNNNNNN.us.archive.org short_hostname = host.split('.')[0] domain = host.split('.')[1:] new_name = "%s.s3dns.%s:%d" % (short_hostname, '.'.join(domain), s3path.s3path().port) return new_name
def do_GET(self): """Serve a GET request.""" dc=self.IFACE_CLASS lm="Sun, 01 Dec 2014 00:00:00 GMT" # dummy! headers={"Last-Modified":lm} data_head = {}; query = urlparse.urlparse(self.path)[4] try: if not self.is_local_request(): return self.send_location_redirect() (bucket, key) = self.get_bucket_key() (accesskey, signature) = self.get_accesskey_signature() if query == 'acl': data = self.get_acl_xml() headers['content-type'] = 'application/xml' elif query == 'log': p = s3path.s3path() data = subprocess.Popen([p.s3+"/dumplog", "s3accesskey-"+accesskey], stdout=subprocess.PIPE).communicate()[0] elif key: data_head=dc.get_head(bucket, key) data=dc.get_data(bucket, key) elif bucket: key_list = dc.get_key_list(bucket) #dc.pp(key_list) data=self.get_keys_xml(bucket, key_list) data_head=dc.get_head(bucket, key) headers['content-type'] = 'application/xml' else: bucket_list = dc.get_bucket_list(accesskey) data = self.gen_bucket_list_xml(bucket_list) data_head=dc.get_head(bucket, key) headers['content-type'] = 'application/xml' except S3_Error, err: self.send_error(err) return
def get_bucket_host(self, bucket): self.assert_bucket_exists(bucket) return "%s:%d" % (socket.gethostname(), s3path.s3path().port)