Example #1
0
	def checkSeat(self):
		#130527200212051639
		body=(("queryMonths","2014-04"),("queryProvinces",11),("_",int(time.time()*1000)))
		url="http://ielts.etest.net.cn/myHome/1968462/queryTestSeats?%s" % urllib.urlencode(body)
		req = urllib2.Request(url)
		req.add_header('Accept','text/html,application/xhtml+xml,application/xml,application/json')
		req.add_header('X-Requested-With','XMLHttpRequest')
		try:
			raw = urllib2.urlopen(req)
		except:
			config.log('access %s failed' % url)
			sys.exit(1)
		res = raw.read()
		if self.debug:
			open('%s/seats.res' % config.dir_data(),'a').write(res)
		obj = json.loads(res)
		seats=[]
		seatNum=0
		if not type(obj) is dict:
			open('%s/bad.res' % config.dir_data(), 'a').write(res)
			config.log('check seats failed')
			return seats
		for day in obj.keys():
			for center in obj[day]:
				seatNum+=1
				if center['levelCode']=='A/G' and center['optStatus']==1:
					seats.append(center['seatGuid'])
		config.log('%d seats found from %d seats' % (len(seats), seatNum))
		if not 0==len(seats):
			self.sms('trying')
		return seats
Example #2
0
	def bookSeat(self, sid):
		url='http://ielts.etest.net.cn/myHome/1968462/createOrderConfirm'
		body=(('seatGuid',sid),)
		res=self.req(url,body)
		if self.debug:
			open('%s/book.res' % config.dir_data(),'a').write(res)
		url='http://ielts.etest.net.cn/myHome/1968462/newAppointment'
		body=(('seatGuid',sid),)
		res = self.req(url,body)
		if self.debug:
			open('%s/confirm.res' % config.dir_data(),'a').write(res)
Example #3
0
def import_op_dataset(operator, operand_digits, train_ratio, dev_ratio,
                      test_ratio):
    # Path of op_dataset
    import_path = '{}/{}-bit/{}/op_dataset.pickle'.format(
        config.dir_data(), operand_digits, operator)

    # Import the op_dataset
    with open(import_path, 'rb') as f:
        op_dataset = pickle.load(f)

    # Dataset size
    ds_size = op_dataset['input'].shape[0]

    # Make a training set.
    train_end_index = int(ds_size * train_ratio)
    input_train = op_dataset['input'][:train_end_index, :]
    target_train = op_dataset['output'][:train_end_index, :]

    # Make a development set.
    dev_end_index = int(ds_size * (train_ratio + dev_ratio))

    if dev_ratio != 0:
        input_dev = op_dataset['input'][:dev_end_index, :]
        target_dev = op_dataset['output'][:dev_end_index, :]
    else:
        input_dev = None
        target_dev = None

    # Maek a test set.
    input_test = op_dataset['input'][dev_end_index:, :]
    target_test = op_dataset['output'][dev_end_index:, :]

    return (input_train, input_dev, input_test, target_train, target_dev,
            target_test)
Example #4
0
def import_data(operator, input_bits, mode=''):
    '''
    operator: 'add', 'subtract', 'multiply', 'divide', 'modulo'
    '''
    dir_data_request = '{}/{}-bit/{}'.format(config.dir_data(), input_bits,
                                             operator)
    input_train_path = '{}/input_train.pickle'.format(dir_data_request)
    input_dev_path = '{}/input_dev.pickle'.format(dir_data_request)
    input_test_path = '{}/input_test.pickle'.format(dir_data_request)
    output_train_path = '{}/output_train.pickle'.format(dir_data_request)
    output_dev_path = '{}/output_dev.pickle'.format(dir_data_request)
    output_test_path = '{}/output_test.pickle'.format(dir_data_request)

    with open(input_train_path, 'rb') as f:
        input_train = pickle.load(f)
        #print('Imported from {}.'.format(input_train_path))
    with open(input_dev_path, 'rb') as f:
        input_dev = pickle.load(f)
        #print('Imported from {}.'.format(input_dev_path))
    with open(input_test_path, 'rb') as f:
        input_test = pickle.load(f)
        #print('Imported from {}.'.format(input_test_path))
    with open(output_train_path, 'rb') as f:
        output_train = pickle.load(f)
        #print('Imported from {}.'.format(output_train_path))
    with open(output_dev_path, 'rb') as f:
        output_dev = pickle.load(f)
        #print('Imported from {}.'.format(output_dev_path))
    with open(output_test_path, 'rb') as f:
        output_test = pickle.load(f)
        #print('Imported from {}.'.format(output_test_path))

    return (input_train, input_dev, input_test, output_train, output_dev,
            output_test)
Example #5
0
        def login(self, userId, userPwd):
		self.req('http://ielts.etest.net.cn/login')
                url="http://ielts.etest.net.cn/login"
                body=(("userId",userId),("userPwd",userPwd),("checkImageCode",""))
                res = self.req(url,body)
		if self.debug:
			open('%s/login.res' % config.dir_data(),'a').write(res)
Example #6
0
def save_random_datasets(random_datasets, operand_digits):
    (zero_output_dataset, one_output_dataset, fixed_random_output_dataset,
     random_output_dataset) = random_datasets

    # zero_output_dataset
    save_dir = '{}/{}-bit/{}'.format(config.dir_data(), operand_digits, 'zero')
    create_dir(save_dir)
    save_path = '{}/op_dataset.pickle'.format(save_dir)
    with open(save_path, 'wb') as f:
        pickle.dump(zero_output_dataset, f)
    print("Saved in '{}'.".format(save_path))

    # one_output_dataset
    save_dir = '{}/{}-bit/{}'.format(config.dir_data(), operand_digits, 'one')
    create_dir(save_dir)
    save_path = '{}/op_dataset.pickle'.format(save_dir)
    with open(save_path, 'wb') as f:
        pickle.dump(one_output_dataset, f)
    print("Saved in '{}'.".format(save_path))

    # fixed_random_output_dataset
    save_dir = '{}/{}-bit/{}'.format(config.dir_data(), operand_digits,
                                     'fixed_random')
    create_dir(save_dir)
    save_path = '{}/op_dataset.pickle'.format(save_dir)
    with open(save_path, 'wb') as f:
        pickle.dump(fixed_random_output_dataset, f)
    print("Saved in '{}'.".format(save_path))

    # random_output_dataset
    save_dir = '{}/{}-bit/{}'.format(config.dir_data(), operand_digits,
                                     'random')
    create_dir(save_dir)
    save_path = '{}/op_dataset.pickle'.format(save_dir)
    with open(save_path, 'wb') as f:
        pickle.dump(random_output_dataset, f)
    print("Saved in '{}'.".format(save_path))
Example #7
0
def write_carry_dataset_statistics():
    carry_dataset_info_list = list()
    csv_file_path = get_carry_ds_stat_path()
    create_dir(config.dir_data())

    for operator in config.operators_list():
        for operand_digits in config.operand_digits_list():
            carry_datasets = generate_datasets(operand_digits, operator)
            carry_dataset_info_list = carry_dataset_info_list + get_carry_dataset_info_list(
                carry_datasets, operator)

    with open(csv_file_path, mode='w') as csv_file:
        fieldnames = [
            'operator', 'operand digits', 'input dimension',
            'output dimension', 'total operations', 'carries',
            'carry operations', 'carry percentage'
        ]
        writer = csv.DictWriter(csv_file, fieldnames=fieldnames)

        writer.writeheader()
        for carry_dataset_info in carry_dataset_info_list:
            writer.writerow(carry_dataset_info)

    print('{} saved!'.format(csv_file_path))
Example #8
0
def get_carry_ds_stat_path():
    carry_ds_stat_path = '{}/{}'.format(config.dir_data(),
                                        config.carry_dataset_statistics_name())
    return carry_ds_stat_path