def thread_way(): """多线程方式""" addresses = parse_args() for i, address in enumerate(addresses): t = Thread(target=get_poetry, args=(address, )) t.start() t.join()
def process_way(): """多进程方式""" addresses = parse_args() p = Pool(4) for i, address in enumerate(addresses): p.apply_async(get_poetry, args=(address, )) p.close() p.join()
def sync_way(): """同步方式""" addresses = parse_args() # 8000 8001 8002 elapsed = datetime.timedelta() for i, address in enumerate(addresses): addr_fmt = format_address(address) print('Task %d: get poetry from: %s' % (i + 1, addr_fmt)) start = datetime.datetime.now() poem = nonblocking_way(address) time = datetime.datetime.now() - start msg = 'Task %d: got %d bytes of poetry from %s in %s' print(msg % (i + 1, len(poem), addr_fmt, time)) elapsed += time print('Got %d poems in %s' % (i + 1, elapsed))
import asyncio import datetime from parser_util import parse_args addresses = list(parse_args()) async def fetch(url): reader, writer = await asyncio.open_connection(host=url[0], port=url[1]) get = b'GET / HTTP/1.0\r\nHost: localhost\r\n\r\n' writer.write(get) response = await reader.read() writer.close() return response def main(): loop = asyncio.get_event_loop() # 得到事件循环 tasks = [fetch(address) for address in addresses] # loop.run_until_complete(asyncio.wait(tasks)) loop.run_until_complete(asyncio.gather(*tasks)) if __name__ == '__main__': elapsed = datetime.timedelta() start = datetime.datetime.now() main() time = datetime.datetime.now() - start elapsed += time print('done in %s' % elapsed)
import torch.utils.data.sampler import os import glob import random import time import configs from convnet import Convnet from data.datamgr import SetDataManager from parser_util import parse_args, get_best_file from utils import set_device, euclidean_dist if __name__ == '__main__': params = parse_args('test') device = set_device(params) acc_all = [] image_size = 84 iter_num = 600 few_shot_params = dict(n_way=params.test_n_way, n_support=params.n_shot) model = Convnet() model = model.to(device) checkpoint_dir = '%s/checkpoints/%s_%dway_%dshot' % ( configs.save_dir, params.dataset, params.train_n_way, params.n_shot) if params.train_aug: checkpoint_dir += '_aug'
outfile = os.path.join(params.checkpoint_dir, 'best_model.tar') torch.save({'epoch': epoch, 'state': model.state_dict()}, outfile) # torch.save(trlog, params.checkpoint_dir+'/trlog') if (epoch % params.save_freq == 0) or (epoch == stop_epoch - 1): outfile = os.path.join(params.checkpoint_dir, '{:d}.tar'.format(epoch)) torch.save({'epoch': epoch, 'state': model.state_dict()}, outfile) return model if __name__ == '__main__': params = parse_args('train') ###########################setting############################## set_seed(params) ###########################dataset################################ base_file = configs.data_dir[params.dataset] + 'base.json' val_file = configs.data_dir[params.dataset] + 'val.json' if params.dataset == 'omniglot': image_size = 28 else: image_size = 84 if params.stop_epoch == -1: if params.n_shot == 1: