def _upload_slice_file(self, request): assert isinstance(request, UploadSliceFileRequest) check_params_ret = self._check_params(request) if check_params_ret is not None: return check_params_ret local_path = request.get_local_path() slice_size = request.get_slice_size() enable_sha1 = request.enable_sha1 if enable_sha1 is True: sha1_by_slice_list = Sha1Util.get_sha1_by_slice( local_path, slice_size) request.sha1_list = sha1_by_slice_list request.sha1_content = sha1_by_slice_list[-1]["datasha"] else: request.sha1_list = None request.sha1_content = None control_ret = self._upload_slice_control(request) # 表示控制分片已经产生错误信息 if control_ret[u'code'] != 0: return control_ret # 命中秒传 if u'access_url' in control_ret[u'data']: return control_ret local_path = request.get_local_path() file_size = os.path.getsize(local_path) if u'slice_size' in control_ret[u'data']: slice_size = control_ret[u'data'][u'slice_size'] offset = 0 session = control_ret[u'data'][u'session'] # ?concurrency if request._max_con <= 1 or (u'serial_upload' in control_ret[u'data'] and control_ret[u'data'][u'serial_upload'] == 1): logger.info("upload file serially") slice_idx = 0 with open(local_path, 'rb') as local_file: while offset < file_size: file_content = local_file.read(slice_size) data_ret = self._upload_slice_data(request, file_content, session, offset) if data_ret[u'code'] == 0: if u'access_url' in data_ret[u'data']: return data_ret else: return data_ret offset += slice_size slice_idx += 1 else: logger.info('upload file concurrently') from threadpool import SimpleThreadPool pool = SimpleThreadPool(request._max_con) slice_idx = 0 with open(local_path, 'rb') as local_file: while offset < file_size: file_content = local_file.read(slice_size) pool.add_task(self._upload_slice_data, request, file_content, session, offset) offset += slice_size slice_idx += 1 pool.wait_completion() result = pool.get_result() if not result['success_all']: return {u'code': 1, u'message': str(result)} data_ret = self._upload_slice_finish(request, session, file_size) return data_ret
def _upload_slice_file_from_buffer(self, request): assert isinstance(request, UploadSliceFileFromBufferRequest) check_params_ret = self._check_params(request) if check_params_ret is not None: return check_params_ret data = request.get_data() slice_size = request.get_slice_size() enable_sha1 = request.enable_sha1 request.sha1_list = None request.sha1_content = None control_ret = self._upload_slice_control_from_buffer(request) # 表示控制分片已经产生错误信息 if control_ret[u'code'] != 0: return control_ret # 命中秒传 if u'access_url' in control_ret[u'data']: return control_ret data = request.get_data() file_size = len(data) if u'slice_size' in control_ret[u'data']: slice_size = control_ret[u'data'][u'slice_size'] offset = 0 session = control_ret[u'data'][u'session'] # ?concurrency if request._max_con <= 1 or (u'serial_upload' in control_ret[u'data'] and control_ret[u'data'][u'serial_upload'] == 1): logger.info("upload file serially") slice_idx = 0 while offset < file_size: file_content = data[offset:offset + slice_size] data_ret = self._upload_slice_data(request, file_content, session, offset) if data_ret[u'code'] == 0: if u'access_url' in data_ret[u'data']: return data_ret else: return data_ret offset += slice_size slice_idx += 1 else: logger.info('upload file concurrently') from threadpool import SimpleThreadPool pool = SimpleThreadPool(request._max_con) slice_idx = 0 while offset < file_size: file_content = data[offset:offset + slice_size] pool.add_task(self._upload_slice_data, request, file_content, session, offset) offset += slice_size slice_idx += 1 pool.wait_completion() result = pool.get_result() if not result['success_all']: return {u'code': 1, u'message': str(result)} data_ret = self._upload_slice_finish_from_buffer( request, session, file_size) return data_ret
'//*[@id="confirmOrder_1"]/div[2]/div[2]/div[1]/div[2]/label/span[1]/input').click() except Exception as e: raise ('购票人选择出错') time.sleep(0.5) self.driver.find_element_by_xpath('//div[@class="submit-wrapper"]/button').click() break except Exception as e: print(e) continue def get_config(section, key): config = configparser.ConfigParser() config.read('config.ini', encoding='UTF-8') return config.get(section, key) def work(ticket_number, driver_path): dotakey = get_config('info', 'privilege_val') myapp = App(dotakey, driver_path) myapp.login() myapp.detail_page_auto(ticket_number) if __name__ == '__main__': _max_thread = 10 pool = SimpleThreadPool(_max_thread) pool.add_task(work, 3, r"driver/chromedriver.exe") pool.add_task(work, 3, r"driver/chromedriver.1.exe") pool.add_task(work, 3, r"driver/chromedriver.2.exe") pool.wait_completion()
def _upload_slice_file(self, request): assert isinstance(request, UploadSliceFileRequest) check_params_ret = self._check_params(request) if check_params_ret is not None: return check_params_ret local_path = request.get_local_path() slice_size = request.get_slice_size() enable_sha1 = request.enable_sha1 if enable_sha1 is True: sha1_by_slice_list = Sha1Util.get_sha1_by_slice(local_path, slice_size) request.sha1_list = sha1_by_slice_list request.sha1_content = sha1_by_slice_list[-1]["datasha"] else: request.sha1_list = None request.sha1_content = None control_ret = self._upload_slice_control(request) # 表示控制分片已经产生错误信息 if control_ret[u'code'] != 0: return control_ret # 命中秒传 if u'access_url' in control_ret[u'data']: return control_ret local_path = request.get_local_path() file_size = os.path.getsize(local_path) slice_size = control_ret[u'data'][u'slice_size'] offset = 0 session = control_ret[u'data'][u'session'] # ?concurrency if request._max_con <= 1 or ( u'serial_upload' in control_ret[u'data'] and control_ret[u'data'][u'serial_upload'] == 1): logger.info("upload file serially") slice_idx = 0 with open(local_path, 'rb') as local_file: while offset < file_size: file_content = local_file.read(slice_size) data_ret = self._upload_slice_data(request, file_content, session, offset) if data_ret[u'code'] == 0: if u'access_url' in data_ret[u'data']: return data_ret else: return data_ret offset += slice_size slice_idx += 1 else: logger.info('upload file concurrently') from threadpool import SimpleThreadPool pool = SimpleThreadPool(request._max_con) slice_idx = 0 with open(local_path, 'rb') as local_file: while offset < file_size: file_content = local_file.read(slice_size) pool.add_task(self._upload_slice_data, request, file_content, session, offset) offset += slice_size slice_idx += 1 pool.wait_completion() result = pool.get_result() if not result['success_all']: return {u'code': 1, u'message': str(result)} data_ret = self._upload_slice_finish(request, session, file_size) return data_ret