def test_catch_infinite_loop(self): offset = offsets.DateOffset(minute=5) # blow up, don't loop forever msg = "Offset <DateOffset: minute=5> did not increment date" with pytest.raises(ValueError, match=msg): date_range(datetime(2011, 11, 11), datetime(2011, 11, 12), freq=offset)
def test_catch_infinite_loop(self): offset = offsets.DateOffset(minute=5) # blow up, don't loop forever pytest.raises(Exception, date_range, datetime(2011, 11, 11), datetime(2011, 11, 12), freq=offset)
offsets.BMonthEnd(), offsets.BMonthBegin(), offsets.CustomBusinessDay(), offsets.CustomBusinessDay(calendar=hcal), offsets.CustomBusinessMonthBegin(calendar=hcal), offsets.CustomBusinessMonthEnd(calendar=hcal), offsets.CustomBusinessMonthEnd(calendar=hcal), ] other_offsets = [ offsets.YearEnd(), offsets.YearBegin(), offsets.QuarterEnd(), offsets.QuarterBegin(), offsets.MonthEnd(), offsets.MonthBegin(), offsets.DateOffset(months=2, days=2), offsets.BusinessDay(), offsets.SemiMonthEnd(), offsets.SemiMonthBegin(), ] offset_objs = non_apply + other_offsets class OnOffset: params = offset_objs param_names = ["offset"] def setup(self, offset): self.dates = [ datetime(2016, m, d) for m in [10, 11, 12]
def __getitem__(self, index): current_frame_time = pd.to_datetime( self.data_list[index].split(" ")[2] + " " + self.data_list[index].split(" ")[3]) #print(current_frame_time) time_index = int(current_frame_time.microsecond / 50000) start_time = pd.to_datetime( str(current_frame_time)[:-7] + "." + str(time_index * 50000).zfill(6)) #print(start_time) next_frame_time = start_time + offsets.DateOffset(microseconds=50000) #print(next_frame_time) # start_time = str(start_time.hour).zfill(2) + ":" + str(start_time.minute).zfill(2) + ":" + str(start_time.second).zfill(2) + "." + str(start_time.microsecond).zfill(6) # end_time = str(next_frame_time.hour).zfill(2) + ":" + str(next_frame_time.minute).zfill(2) + ":" + str(next_frame_time.second).zfill(2) + "." + str(next_frame_time.microsecond).zfill(6) # print("'start_time', {}, 'end_time', {}, 'len', {}".format(start_time, end_time, len(self.current_csi_dat[start_time:end_time]))) #print(self.csi_dat[self.data_list[index].split(" ")[0][:-6]]) #print(self.csi_dat) while (len( np.array(self.csi_dat[self.data_list[index].split(" ")[0][:-6]] [str(start_time):str(next_frame_time)])) < 5): next_frame_time = next_frame_time + offsets.DateOffset( microseconds=50000) csi_temp = self.csi_dat[self.data_list[index].split( " ")[0][:-6]][str(start_time):str(next_frame_time)][:5] # print(csi_temp[0]) # csi_temp = np.array(csi_temp) # print(np.concatenate(np.array(csi_temp), axis=2).shape) # csi_temp = np.absolute(np.concatenate(np.array(csi_temp))) # print(csi_temp.shape) # csi_temp = csi_temp.transpose(0 ,2, 1) # print(csi_temp.shape) # # sample_csi = np.absolute(csi_temp) # sample_csi = np.resize(csi_temp, (150, 3, 3)) #print(np.concatenate(csi_temp, axis=2)) sample_csi = np.concatenate(csi_temp, axis=2).transpose(2, 0, 1) #print(sample_csi.shape) #print(self.root_dir) #print(self.data_list[index].split(" ")[:1][0]) #print(type(self.data_list[index].split(" ")[:1][0])) sample_SM = sio.loadmat( join( self.root_dir, "mask_resize", self.data_list[index].split(" ") [:1][0] + "_" + str(index + 2)) + ".mat")['masks'] sample_SM = self.pose_transform(sample_SM) #转变float64 torch.size(1,46,82) sample_SM = sample_SM.double() #print(sample_SM.shape) json_dir = join(self.root_dir, 'res', 'alphapose_results.json') with open(json_dir) as f: # print(f) sample = json.load(f) JHM = sample[index]["keypoints"] JHM = np.array(JHM) sample_JHMs = get_heatmap((1280, 720), JHM, (82, 46))[:, :, :-1] sample_PAFs = get_vectormap((1280, 720), JHM, (82, 46)) ''' x = sample_PAFs.transpose((2,0,1)) print(x[0]) cv.namedWindow('input_image', cv.WINDOW_AUTOSIZE) mask = 255*x[10] mask = mask.astype(np.uint8) cv.imshow('input_image', mask) cv.waitKey(0) cv.destroyAllWindows() ''' sample_PAFs = self.pose_transform(sample_PAFs) sample_JHMs = self.pose_transform(sample_JHMs) sample_PAFs = sample_PAFs.double() sample_JHMs = sample_JHMs.double() #print(sample_JHMs.shape) #print(sample_PAFs.shape) # 转变float64 sample_JHMs torch.size(17,46,82) sample_PAFs torch.size(36,46,82) video_name = self.data_list[index].split(" ")[0] frame_number = self.data_list[index].split(" ")[1] sample = { 'csi': sample_csi, 'JHMs': sample_JHMs, 'SM': sample_SM, 'video': video_name, 'PAFs': sample_PAFs, 'frame': frame_number } if self.transforms: sample = self.transforms(sample) return sample
else: print('Values match') print('\n') #--------------------------------------------------------------------------------------------------- # profiling #--------------------------------------------------------------------------------------------------- from pandas import concat from pandas import offsets import timeit from pandas import to_datetime df_big = read_csv(r'.\inputs\PD 2022 Wk 1 Input - Input.csv', parse_dates=['Date of Birth'], usecols=['id', 'pupil first name', 'pupil last name', 'Date of Birth']) df_big = concat([df_big]*1000) # using replace %timeit -n 1 -r 100 df_big['This Year\'s Birthday'] = df_big['Date of Birth'].apply(lambda x: x.replace(year=datetime.now().year)) # using offsets %timeit -n 1 -r 100 df_big['This Year\'s Birthday'] = df_big['Date of Birth'] + offsets.DateOffset(year=2022) # using string formatting %timeit -n 1 -r 100 df_big['This Year\'s Birthday'] = to_datetime('2022-' + df_big['Date of Birth'].dt.strftime('%m-%d'))