def create_conn(self, sock, host, port): if not self.running: xlog.debug("session not running, try to connect") return None self.target_on_roads = max(g.config.min_on_road, self.target_on_roads) self.lock.acquire() self.last_conn_id += 2 conn_id = self.last_conn_id self.lock.release() if isinstance(host, str): host = host.encode("ascii") seq = 0 cmd_type = 0 # create connection sock_type = 0 # TCP data = struct.pack("<IBBH", seq, cmd_type, sock_type, len(host)) + host + struct.pack("<H", port) self.send_conn_data(conn_id, data) self.conn_list[conn_id] = base_container.Conn(self, conn_id, sock, host, port, g.config.windows_size, g.config.windows_ack, True, xlog) return conn_id
def serialize_body(self): first = struct.pack("!LHxB", self.max_age, self.port, len(self.protocol_id)) host_length = struct.pack("!B", len(self.host)) return b''.join([ first, self.protocol_id, host_length, self.host, self.serialize_origin() ])
def pack_callback(arg): output = "" for name, id, description, extra in arg: extra = dict(extra) output += pack('SIS', name, id, description) if ObjectParamsStructDesc.has_key(id): for substruct, name, description in ObjectParamsStructDesc[id]: output += pack(substruct, extra.pop(name)) if len(extra) > 0: raise TypeError("There was left over extra stuff...") return output
def get_ack(self, force=False): time_now = time.time() if force or \ (self.last_receive_time < self.last_send_time and time_now - self.last_send_time > self.ack_delay): buf = base_container.WriteBuffer() buf.append(struct.pack("<I", self.receive_process.next_sn - 1)) for sn in self.receive_process.block_list: buf.append(struct.pack("<I", sn)) return buf return ""
def get_ack(self, force=False): time_now = time.time() if force or \ (self.last_receive_time < self.last_send_time and time_now - self.last_send_time > self.ack_delay): buf = base_container.WriteBuffer() buf.append(struct.pack("<I", self.receive_process.next_sn-1)) for sn in self.receive_process.block_list: buf.append(struct.pack("<I", sn)) return buf return ""
def __str__(self): output1 = pack(self.struct, self.id, self.modify_time, self.categories, self.name, self.desc, self.used, self.owner, self.components, self.feedback, self.properties) self.length = len(output1) output2 = Processed.__str__(self) return output2+output1
def __str__(self): args = [] for name, type in self.names: struct, size = ARG_STRUCTMAP[type] attr = getattr(self, name) if size == 1: args.append(attr) else: args += list(attr) output = Order.__str__(self) try: output += pack(self.substruct, *args) return output except TypeError, e: s = str(e) causedby = '%s %s' % self.names[int(s[:s.find(' ')])] being = getattr(self, name) traceback = sys.exc_info()[2] while not traceback.tb_next is None: traceback = traceback.tb_next raise TypeError, '%s was %s\n%s' % (causedby, being, e), traceback
def pack(self, values): """\ pack() -> bytes Returns a packed version of values. """ return xstruct.pack(self.xstruct, values)
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.key, self.start, self.amount, self.since) assert len(output) == Header.size+self.length, "Output length (%s) did not match out length! (%s)" % (len(output, self.length)) return output
def transfer_ack(self, position): with self.recv_notice: if self.transfered_close_to_peer: return cmd_position = struct.pack("<IBQ", self.next_recv_seq, 3, position) self.session.send_conn_data(self.conn_id, cmd_position) self.next_recv_seq += 1
def transfer_peer_close(self, reason=""): with self.recv_notice: if self.transfered_close_to_peer: return self.transfered_close_to_peer = True cmd = struct.pack("<IB", self.next_recv_seq, 2) self.session.send_conn_data(self.conn_id, cmd + reason) self.next_recv_seq += 1
def serialize_body(self): data = struct.pack( "!LL", self.last_stream_id & 0x7FFFFFFF, self.error_code ) data += self.additional_data return data
def create_conn(self, sock, host, port): if not self.running: xlog.debug("session not running, try to connect") return None self.lock.acquire() self.last_conn_id += 1 conn_id = self.last_conn_id self.lock.release() seq = 0 cmd_type = 0 # create connection sock_type = 0 # TCP data = struct.pack("<IBBH", seq, cmd_type, sock_type, len(host)) + host + struct.pack("<H", port) self.send_conn_data(conn_id, data) self.conn_list[conn_id] = base_container.Conn(self, conn_id, sock, host, port, g.config.windows_size, g.config.windows_ack, True, xlog) return conn_id
def __str__(self): output = Description.__str__(self) output += pack(self.struct, \ self.id, \ self._name, \ self.description, \ self.arguments, \ self.modify_time) return output
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.name, \ self.key, self.tp, \ self.server, self.sertype, \ self.rule, self.rulever, \ self.locations, \ self._optional) return output
def __str__(self): output = Describable.__str__(self) output += pack(self.struct, \ self.id, \ self._subtype, \ self.name, \ self.desc, \ self.parent, \ self.contains, \ self.modify_time) return output
def transfer_peer_close(self, reason=""): with self.recv_notice: if self.transfered_close_to_peer: return self.transfered_close_to_peer = True cmd = struct.pack("<IB", self.next_recv_seq, 2) if isinstance(reason, str): reason = reason.encode("utf-8") self.session.send_conn_data(self.conn_id, cmd + reason) self.next_recv_seq += 1
def transfer_received_data(self, data): with self.recv_notice: if self.transfered_close_to_peer: return buf = WriteBuffer(struct.pack("<IB", self.next_recv_seq, 1)) buf.append(data) self.next_recv_seq += 1 self.received_position += len(data) self.session.send_conn_data(self.conn_id, buf)
def pack_request(method, url, headers, body, timeout): headers = dict(headers) if isinstance(body, bytes) and body: if len(body) < 10 * 1024 * 1024 and b'Content-Encoding' not in headers: # 可以压缩 zbody = deflate(body) if len(zbody) < len(body): body = zbody headers[b'Content-Encoding'] = b'deflate' if len(body) > 10 * 1024 * 1024: xlog.warn("body len:%d %s %s", len(body), method, url) headers[b'Content-Length'] = utils.to_bytes(str(len(body))) # GAE don't allow set `Host` header if b'Host' in headers: del headers[b'Host'] kwargs = {} # gae 用的参数 if front.config.GAE_PASSWORD: kwargs[b'password'] = front.config.GAE_PASSWORD # kwargs['options'] = kwargs[b'validate'] = front.config.GAE_VALIDATE if url.endswith(b".js"): kwargs[b'maxsize'] = front.config.JS_MAXSIZE else: kwargs[b'maxsize'] = front.config.AUTORANGE_MAXSIZE kwargs[b'timeout'] = str(timeout) # gae 用的参数 end payload = b'%s %s HTTP/1.1\r\n' % (method, url) payload += b''.join(b'%s: %s\r\n' % (k, v) for k, v in list(headers.items()) if k not in skip_request_headers) # for k, v in headers.items(): # xlog.debug("Send %s: %s", k, v) for k, v in kwargs.items(): if isinstance(v, int): payload += b'X-URLFETCH-%s: %d\r\n' % (k, v) else: payload += b'X-URLFETCH-%s: %s\r\n' % (k, utils.to_bytes(v)) payload = deflate(payload) body = b'%s%s%s' % (struct.pack('!h', len(payload)), payload, body) request_headers = {} request_headers[b'Content-Length'] = str(len(body)) # request_headers 只有上面一项 return request_headers, body
def send_conn_data(self, conn_id, data, no_delay=False): if not self.running: return # xlog.debug("upload conn_id:%d, len:%d", conn_id, len(data)) buf = base_container.WriteBuffer() buf.append(struct.pack("<II", conn_id, len(data))) buf.append(data) self.send_buffer.put(buf) if self.send_buffer.pool_size > g.config.max_payload or \ time.time() - self.last_send_time > self.send_delay: # xlog.debug("notify on send conn data") self.wait_queue.notify()
def pack_request(method, url, headers, body, timeout): headers = dict(headers) if isinstance(body, basestring) and body: if len(body) < 10 * 1024 * 1024 and 'Content-Encoding' not in headers: # 可以压缩 zbody = deflate(body) if len(zbody) < len(body): body = zbody headers['Content-Encoding'] = 'deflate' if len(body) > 10 * 1024 * 1024: xlog.warn("body len:%d %s %s", len(body), method, url) headers['Content-Length'] = str(len(body)) # GAE don't allow set `Host` header if 'Host' in headers: del headers['Host'] kwargs = {} # gae 用的参数 if front.config.GAE_PASSWORD: kwargs['password'] = front.config.GAE_PASSWORD # kwargs['options'] = kwargs['validate'] = front.config.GAE_VALIDATE if url.endswith(".js"): kwargs['maxsize'] = front.config.JS_MAXSIZE else: kwargs['maxsize'] = front.config.AUTORANGE_MAXSIZE kwargs['timeout'] = str(timeout) # gae 用的参数 end payload = '%s %s HTTP/1.1\r\n' % (method, url) payload += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items() if k not in skip_headers) # for k, v in headers.items(): # xlog.debug("Send %s: %s", k, v) payload += ''.join('X-URLFETCH-%s: %s\r\n' % (k, v) for k, v in kwargs.items() if v) payload = deflate(payload) # kkk #print("len: ", len(payload)) body = '%s%s%s' % (struct.pack('!h', len(payload)), payload, body) request_headers = {} request_headers['Content-Length'] = str(len(body)) # request_headers 只有上面一项 return request_headers, body
def __str__(self): output = Processed.__str__(self) output += pack( self.struct, self.id, self.name_singular, self.name_plural, self.unit_singular, self.unit_plural, self.description, self.weight, self.size, self.modify_time, ) return output
def send_conn_data(self, conn_id, data): if not self.running: xlog.warn("send_conn_data but not running") return # xlog.debug("upload conn:%d, len:%d", conn_id, len(data)) buf = base_container.WriteBuffer() buf.append(struct.pack("<II", conn_id, len(data))) buf.append(data) self.send_buffer.put(buf) if self.oldest_received_time == 0: self.oldest_received_time = time.time() elif self.send_buffer.pool_size > g.config.max_payload or \ time.time() - self.oldest_received_time > self.send_delay: # xlog.debug("notify on send conn data") self.wait_queue.notify()
def transfer_received_data(self, data): with self.recv_notice: if self.transfered_close_to_peer: return buf = WriteBuffer(struct.pack("<IB", self.next_recv_seq, 1)) buf.append(data) self.next_recv_seq += 1 self.received_position += len(data) if self.received_position < 16 * 1024: no_delay = True else: no_delay = False self.session.send_conn_data(self.conn_id, buf, no_delay)
def __str__(self): output = Describable.__str__(self) output += pack(self.struct, \ self.id, \ self._subtype, \ self.name, \ self.size, \ self.pos[0], \ self.pos[1], \ self.pos[2], \ self.vel[0], \ self.vel[1], \ self.vel[2], \ self.contains, \ self.order_types, \ self.order_number, \ self.modify_time) return output
def serialize(self): body = self.serialize_body() self.body_len = len(body) # Build the common frame header. # First, get the flags. flags = 0 for flag, flag_bit in self.defined_flags: if flag in self.flags: flags |= flag_bit header = struct.pack( "!HBBBL", (self.body_len & 0xFFFF00) >> 8, # Length is spread over top 24 bits self.body_len & 0x0000FF, self.type, flags, self.stream_id & 0x7FFFFFFF # Stream ID is 32 bits. ) return header + body
def serialize_body(self): return struct.pack("!L", self.error_code)
def login_session(self): if len(g.server_host) == 0: return False start_time = time.time() while time.time() - start_time < 30: try: magic = b"P" pack_type = 1 upload_data_head = struct.pack("<cBB8sIHIIHH", magic, g.protocol_version, pack_type, bytes(self.session_id), g.config.max_payload, g.config.send_delay, g.config.windows_size, int(g.config.windows_ack), g.config.resend_timeout, g.config.ack_delay) upload_data_head += struct.pack("<H", len(g.config.login_account)) + bytes(g.config.login_account, encoding='iso-8859-1') upload_data_head += struct.pack("<H", len(g.config.login_password)) + bytes(g.config.login_password, encoding='iso-8859-1') upload_post_data = encrypt_data(upload_data_head) content, status, response = g.http_client.request(method="POST", host=g.server_host, path="/data", data=upload_post_data, timeout=g.config.network_timeout) time_cost = time.time() - start_time if status == 521: g.last_api_error = "session server is down." xlog.warn("login session server is down, try get new server.") g.server_host = None return False if status != 200: g.last_api_error = "session server login fail:%r" % status xlog.warn("login session fail, status:%r", status) continue if len(content) < 6: g.last_api_error = "session server protocol fail, login res len:%d" % len(content) xlog.error("login data len:%d fail", len(content)) continue info = decrypt_data(content) magic, protocol_version, pack_type, res, message_len = struct.unpack("<cBBBH", info[:6]) message = info[6:] if isinstance(message, memoryview): message = message.tobytes() if magic != b"P" or protocol_version != g.protocol_version or pack_type != 1: xlog.error("login_session time:%d head error:%s", 1000 * time_cost, utils.str2hex(info[:6])) return False if res != 0: g.last_api_error = "session server login fail, code:%d msg:%s" % (res, message) xlog.warn("login_session time:%d fail, res:%d msg:%s", 1000 * time_cost, res, message) return False g.last_api_error = "" xlog.info("login_session %s time:%d msg:%s", self.session_id, 1000 * time_cost, message) return True except Exception as e: xlog.exception("login_session e:%r", e) time.sleep(1) return False
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.s) return output
def serialize_body(self): data = struct.pack("!LL", self.last_stream_id & 0x7FFFFFFF, self.error_code) data += self.additional_data return data
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.id, self.name, self.description, self.number, self.modify_time) return output
def serialize_body(self): padding_data = self.serialize_padding_data() padding = b'\0' * self.total_padding data = struct.pack("!L", self.promised_stream_id) return b''.join([padding_data, data, self.data, padding])
def __str__(self): output = Describable.__str__(self) output += pack(self.struct, self.id, self.slot, self._subtype, self.turns, self.resources) return output
def serialize_padding_data(self): if b'PADDED' in self.flags: return struct.pack('!B', self.pad_length) return b''
def __str__(self): output = Object.__str__(self) output += pack(self.substruct, self.owner, self.ships, self.damage) return output
def sn_payload_head(sn, payload): return struct.pack("<II", sn, len(payload))
def serialize_body(self): settings = [ struct.pack("!HL", setting & 0xFF, value) for setting, value in list(self.settings.items()) ] return b''.join(settings)
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.key, self.left, self.ids, self.since) return output
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.id, self.modify_time, self.categories, self.name, self.description, self.requirements, self.properties) return output
def normal_round_trip_worker(self, work_id): while self.running: data, ack = self.get_send_data(work_id) if not self.running: return send_data_len = len(data) send_ack_len = len(ack) transfer_no = self.get_transfer_no() # xlog.debug("trip:%d no:%d send data:%s", work_id, transfer_no, parse_data(data)) magic = b"P" pack_type = 2 if self.send_buffer.pool_size > g.config.max_payload or \ (self.send_buffer.pool_size and len(self.wait_queue.waiters) < g.config.min_on_road): server_timeout = 0 elif work_id > g.config.concurent_thread_num * 0.9: server_timeout = 1 elif work_id > g.config.concurent_thread_num * 0.7: server_timeout = 3 else: server_timeout = g.config.roundtrip_timeout request_session_id = self.session_id upload_data_head = struct.pack("<cBB8sIBIH", magic, g.protocol_version, pack_type, bytes(self.session_id), transfer_no, server_timeout, send_data_len, send_ack_len) upload_post_buf = base_container.WriteBuffer(upload_data_head) upload_post_buf.append(data) upload_post_buf.append(ack) upload_post_data = upload_post_buf.to_bytes() upload_post_data = encrypt_data(upload_post_data) self.last_send_time = time.time() sleep_time = 1 start_time = time.time() with self.lock: self.on_road_num += 1 self.transfer_list[transfer_no] = {} self.transfer_list[transfer_no]["stat"] = "request" self.transfer_list[transfer_no]["start"] = start_time # xlog.debug("start trip transfer_no:%d send_data_len:%d ack_len:%d timeout:%d", # transfer_no, send_data_len, send_ack_len, server_timeout) try: content, status, response = g.http_client.request(method="POST", host=g.server_host, path="/data?tid=%d" % transfer_no, data=upload_post_data, headers={ "Content-Length": str(len(upload_post_data))}, timeout=server_timeout + g.config.network_timeout) traffic = len(upload_post_data) + len(content) + 645 self.traffic += traffic g.quota -= traffic if g.quota < 0: g.quota = 0 except Exception as e: xlog.exception("request except:%r ", e) time.sleep(sleep_time) continue finally: with self.lock: self.on_road_num -= 1 try: if transfer_no in self.transfer_list: del self.transfer_list[transfer_no] except: pass g.stat["roundtrip_num"] += 1 roundtrip_time = (time.time() - start_time) if status == 521: xlog.warn("X-tunnel server is down, try get new server.") g.server_host = None self.stop() login_process() return if status != 200: xlog.warn("roundtrip time:%f transfer_no:%d send:%d status:%r ", roundtrip_time, transfer_no, send_data_len, status) time.sleep(sleep_time) continue recv_len = len(content) if recv_len < 6: xlog.warn("roundtrip time:%f transfer_no:%d send:%d recv:%d Head", roundtrip_time, transfer_no, send_data_len, recv_len) continue content = decrypt_data(content) payload = base_container.ReadBuffer(content) magic, version, pack_type = struct.unpack("<cBB", payload.get(3)) if magic != b"P" or version != g.protocol_version: xlog.warn("get data head:%s", utils.str2hex(content[:2])) time.sleep(sleep_time) continue if pack_type == 3: # error report error_code, message_len = struct.unpack("<BH", payload.get(3)) message = payload.get(message_len) # xlog.warn("report code:%d, msg:%s", error_code, message) if error_code == 1: # no quota xlog.warn("x_server error:no quota") self.stop() return elif error_code == 2: # unpack error xlog.warn("roundtrip time:%f transfer_no:%d send:%d recv:%d unpack_error:%s", roundtrip_time, transfer_no, send_data_len, len(content), message) continue elif error_code == 3: # session not exist if self.session_id == request_session_id: xlog.warn("server session_id:%s not exist, reset session.", request_session_id) self.reset() return else: continue else: xlog.error("unknown error code:%d, message:%s", error_code, message) time.sleep(sleep_time) continue if pack_type != 2: # normal download traffic pack xlog.error("pack type:%d", pack_type) time.sleep(100) continue time_cost, server_send_pool_size, data_len, ack_len = struct.unpack("<IIIH", payload.get(14)) xlog.debug( "trip:%d no:%d tc:%f cost:%f to:%d snd:%d rcv:%d s_pool:%d on_road:%d target:%d", work_id, transfer_no, roundtrip_time, time_cost / 1000.0, server_timeout, send_data_len, len(content), server_send_pool_size, self.on_road_num, self.target_on_roads) if len(self.conn_list) == 0: self.target_on_roads = 0 elif len(content) >= g.config.max_payload: self.target_on_roads = \ min(g.config.concurent_thread_num - g.config.min_on_road, self.target_on_roads + 10) elif len(content) <= 21: self.target_on_roads = max(g.config.min_on_road, self.target_on_roads - 5) self.trigger_more() rtt = roundtrip_time * 1000 - time_cost rtt = max(100, rtt) speed = (send_data_len + len(content) + 400) / rtt response.worker.update_debug_data(rtt, send_data_len, len(content), speed) if rtt > 8000: xlog.debug("rtt:%d speed:%d trace:%s", rtt, speed, response.worker.get_trace()) xlog.debug("task trace:%s", response.task.get_trace()) g.stat["slow_roundtrip"] += 1 try: data = payload.get_buf(data_len) ack = payload.get_buf(ack_len) except Exception as e: xlog.warn("trip:%d no:%d data not enough %r", work_id, transfer_no, e) continue # xlog.debug("trip:%d no:%d recv data:%s", work_id, transfer_no, parse_data(data)) try: self.round_trip_process(data, ack) self.last_receive_time = time.time() except Exception as e: xlog.exception("data process:%r", e) xlog.info("roundtrip thread exit")
def serialize_priority_data(self): return struct.pack("!LB", self.depends_on | (int(self.exclusive) << 31), self.stream_weight)
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.time, self.reason, self.turn_num, self.turn_name) return output
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.id, self.name, self.race_name) return output
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.username, self.password, self.email, self.comment) return output
def __str__(self): output = Processed.__str__(self) output += pack(self.struct, self.errno, self.s, self.references) return output
def __str__(self): """\ Produce a string suitable to be send over the wire. """ output = pack(Header.struct, self.protocol, self.sequence, self._type, self.length) return output
def serialize_body(self): return struct.pack("!L", self.window_increment & 0x7FFFFFFF)