def get_request(soc: socket.socket, progress: Optional[Callable[[int, int, int], None]] = None) -> Optional[bytes]: try: size = struct.unpack("I", notnone(recv_bytes(soc, 4))) return bytes(notnone(recv_bytes(soc, size[0], progress=progress))) except (TypeError, AssertionError): return None
def flush_data(self): if self.resume_data is not None: os.remove('resume.pyb') os.chdir(self.output) with open(notnone(self.file_name), 'wb') as f: for d in self.data: f.write(d)
async def verify(list_: MutableSequence[Tuple[Tuple[int, int], Optional[bytes]]]) \ -> Sequence[bytes]: # Enumerate is required since we are updating the list contents # Basically compensating for pass-by-value for i, ((start, end), seg) in enumerate(list_): try: # If the segment is None if seg is None: list_[i] = ( (start, end), bytes(b"".join( # Assert if all the contents of sequence are bytes # Recursive call itself until the content has been resolved # Or all the connections have been refused assertsequencetype( bytes, flatten_bytes(await verify(await fetch(start, end))), )))) except AssertionError: # If this exception occurs, all connections have been closed await self.get_data() # This should never be returned since self-call will quit the program return [] return [notnone(seg) for (_, _), seg in list_]
async def fetch(start: int, end: int) \ -> MutableSequence[Tuple[Tuple[int, int], Optional[bytes]]]: # Regenerate the connections self.generate_connections() # Split the data into parts for connections _split: Sequence[Tuple[int, int]] = \ spilt(file_size=notnone(end - start), parts=len(self.ports)) # Fetch the data for each connection _data: Sequence[Optional[bytes]] = [ # Assert if the type of element is bytes or None assertoptionaltype(bytes, data) for data in # Use high-level co-routine to fetch data await asyncio.gather( *(self._async_get(soc, Request.TRANSFER, normalize(tp, start), progress=_print, decode=False) for soc, tp in zip(notnone(self.conns), _split))) ] return list(zip(_split, _data))
def _start(self): with network.create_server_connection(network.get_local_ip(), self.port) as soc: while True: self.request = None try: soc.listen() if soc: c_soc, _ = soc.accept() request, *params = network.decode_parameter( notnone(network.get_request(c_soc))) self.request = Request(request) self.update() if self.request == Request.CHECKSUM: network.send_request( c_soc, network.encode_parameter( file.gen_checksum(self.src))) elif self.request == Request.FILE_NAME: network.send_request( c_soc, network.encode_parameter( file.get_file_name(self.src))) elif self.request == Request.FILE_SIZE: network.send_request( c_soc, network.encode_parameter( str(file.get_size(self.src)))) elif self.request == Request.TRANSFER: params = [p for p in params] start, end = int(params[0]), int(params[1]) with open(self.src, 'rb') as f: f.seek(start) data = f.read(end - start) network.send_request(c_soc, data) c_soc.close() except (OSError, AssertionError) as e: con.error("Error occurred: {}".format(e)) with open('log_server.log', 'a+') as f: f.write('[{}] ERROR {}'.format(datetime.datetime.now(), e))
async def get_data(self) -> None: total: int = 0 def _print(start, end, size): nonlocal total total += size print( f'\rProgress: {con.pretty_size(total)} / {con.pretty_size(self.file_size)},' f'{con.pretty_size(size)}ps, Chunk[{start}/{end}]', sep=' ', end='') def normalize(_tuple: Tuple[int, int], start: int = 0) -> Sequence[str]: # Return the data return by file.split to str to be sent as parameter return [str(start + _tuple[0]), str(start + _tuple[1])] async def fetch(start: int, end: int) \ -> MutableSequence[Tuple[Tuple[int, int], Optional[bytes]]]: # Regenerate the connections self.generate_connections() # Split the data into parts for connections _split: Sequence[Tuple[int, int]] = \ spilt(file_size=notnone(end - start), parts=len(self.ports)) # Fetch the data for each connection _data: Sequence[Optional[bytes]] = [ # Assert if the type of element is bytes or None assertoptionaltype(bytes, data) for data in # Use high-level co-routine to fetch data await asyncio.gather( *(self._async_get(soc, Request.TRANSFER, normalize(tp, start), progress=_print, decode=False) for soc, tp in zip(notnone(self.conns), _split))) ] return list(zip(_split, _data)) async def verify(list_: MutableSequence[Tuple[Tuple[int, int], Optional[bytes]]]) \ -> Sequence[bytes]: # Enumerate is required since we are updating the list contents # Basically compensating for pass-by-value for i, ((start, end), seg) in enumerate(list_): try: # If the segment is None if seg is None: list_[i] = ( (start, end), bytes(b"".join( # Assert if all the contents of sequence are bytes # Recursive call itself until the content has been resolved # Or all the connections have been refused assertsequencetype( bytes, flatten_bytes(await verify(await fetch(start, end))), )))) except AssertionError: # If this exception occurs, all connections have been closed await self.get_data() # This should never be returned since self-call will quit the program return [] return [notnone(seg) for (_, _), seg in list_] # Check if there is resume data if self.resume_data is not None: # Load the resume data into tmp variable self.data_unfinished = self.resume_data.data else: # Store the data in unverified (error-prone) data in tmp variable self.data_unfinished = await fetch(0, notnone(self.file_size)) # Verify the data integrity self.data = await verify(self.data_unfinished) print()
def get_file_size(self) -> None: self.generate_connections(sockets=(0, )) self.file_size = int( notnone(first(notnone(self._get(self.conns[0], Request.FILE_SIZE)))))
def get_file_name(self): self.generate_connections(sockets=(0, )) self.file_name = str( notnone(first(notnone(self._get(self.conns[0], Request.FILE_NAME)))))