def run_stress_test(thread_num): logging.info("Thread %d starting", thread_num) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s3conn = S3Connection(host=ENDPOINT_HOSTNAME, port=ENDPOINT_PORT, is_secure=IS_SECURE, calling_format=boto.s3.connection.OrdinaryCallingFormat()) logging.info("Thread %d made connection", thread_num) #bucket = s3conn.create_bucket(BUCKET_NAME) #bucket.set_acl("public-read") bucket = s3conn.get_bucket(BUCKET_NAME) logging.info("Thread %d starting loop", thread_num) while True: st = datetime.now() obj_name = uuid.uuid4().hex if OBJ_MEAN_KB == 0: size_in_kb = 0 object_contents = b"" else: size_in_kb = int(random.normalvariate(OBJ_MEAN_KB, OBJ_STDDEV_KB)) object_contents = os.urandom(1024)*size_in_kb obj_create_time = (datetime.now()-st).total_seconds() key = Key(bucket, obj_name) def fake_should_retry(response, chunked_transfer=False): logging.info("Got response status %d", response.status) if 200 <= response.status <= 299: return True # doesn't retry, just finishes normally (I think) raise Exception("HTTP response %d %s" % (response.status, response.reason)) return False key.should_retry = fake_should_retry start_time = datetime.now() try: key.set_contents_from_string(object_contents) end_time = datetime.now() elapsed = (end_time-start_time).total_seconds() msg = [NODE, datetime.timestamp(start_time), ENDPOINT_HOSTNAME, BUCKET_NAME, size_in_kb*1024, elapsed, ""] csv_data = ",".join(map(str, msg)) logging.info("Thread %d: %s obj_create:%s", thread_num, csv_data, str(obj_create_time)) sock.sendto(csv_data.encode("utf-8"), (LOG_SERVER_ADDR, LOG_SERVER_PORT)) except Exception as e: end_time = datetime.now() elapsed = (end_time-start_time).total_seconds() logging.error("Thread %d: %s", thread_num, str(e)) msg = [NODE, datetime.timestamp(start_time), ENDPOINT_HOSTNAME, BUCKET_NAME, -1, elapsed, str(e).strip("\n")] csv_data = ",".join(map(str, msg)) sock.sendto(csv_data.encode("utf-8"), (LOG_SERVER_ADDR, LOG_SERVER_PORT))