def trade_date_collection(date): start_date = DateUtil.rough_datetime(date, years=-1) price_list = list( StockTrend.stock_trend_info(Stock.SH_INDEX, start_date, date).items()) date_list = [DateUtil.datetime_formatter(x[0]) for x in price_list] return date_list
def create_proxy_day_off_assignation(day_off_assignations_data): dumb_day_off_assignations = [] for day_off_assignation_data in day_off_assignations_data: starting_date_str = day_off_assignation_data['starting_date'] day_off_assignation_data['starting_date'] = DateUtil.str_to_date( starting_date_str ) ending_date_str = day_off_assignation_data['ending_date'] day_off_assignation_data['ending_date'] = DateUtil.str_to_date(ending_date_str) starting_time_str = day_off_assignation_data['starting_time'] day_off_assignation_data['starting_time'] = DateUtil.str_to_time( starting_time_str ) ending_time_str = day_off_assignation_data['ending_time'] day_off_assignation_data['ending_time'] = DateUtil.str_to_time(ending_time_str) dumb_day_off_assignation = DumbDayOffAssignation( **day_off_assignation_data ) dumb_day_off_assignations.append(dumb_day_off_assignation) return ProxyFactory.create_multiple_day_off_assignation_proxies( dumb_day_off_assignations )
def create_proxy_workshifts(workshifts_data): dumb_workshifts = [] for workshift_data in workshifts_data: days_data = workshift_data.pop('days', []) days = [] for day_data in days_data: starting_time_str = day_data.get('starting_time') if starting_time_str: day_data['starting_time'] = DateUtil.str_to_time(starting_time_str) ending_time_str = day_data.get('ending_time') if ending_time_str: day_data['ending_time'] = DateUtil.str_to_time(ending_time_str) date_str = day_data.get('date', None) if date_str: day_data['date'] = DateUtil.str_to_date(date_str) day = DumbDay(**day_data) days.append(day) dumb_workshift = DumbWorkShift(**workshift_data) dumb_workshift.days = days dumb_workshifts.append(dumb_workshift) return ProxyFactory.create_multiple_workshift_proxies(dumb_workshifts)
def split_borders( range_obj: RangeDateTime ) -> List[Union[RangeDateTime, Range, None]]: starting_datetime = range_obj.starting_datetime ending_datetime = range_obj.ending_datetime delta = ending_datetime.date() - starting_datetime.date() if delta.days == 0: return [range_obj, None, None] time_obj = DateUtil.str_to_time('23:59') aux_ending_datetime = datetime.combine(starting_datetime.date(), time_obj) left_range_datetime = RangeDateTime(starting_datetime, aux_ending_datetime) time_obj = DateUtil.str_to_time('00:00') aux_starting_datetime = datetime.combine(ending_datetime.date(), time_obj) right_range_datetime = RangeDateTime(aux_starting_datetime, ending_datetime) body = None if delta.days > 1: body = Range(starting_datetime.date() + timedelta(days=1), ending_datetime.date() - timedelta(days=1)) return [left_range_datetime, body, right_range_datetime]
def create_an_assignation( data, workshift_db=None, day_off_assignation_db=None ): assignation_data = data.get('assignation') starting_date_str = assignation_data.get('starting_date') assignation_data['starting_date'] = DateUtil.str_to_date(starting_date_str) ending_date_str = assignation_data.get('ending_date') assignation_data['ending_date'] = DateUtil.str_to_date(ending_date_str) assignation = DumbAssignation(**assignation_data) if workshift_db: workshift_proxy = workshift_db.get_by_id(assignation.workshift_id) assignation.workshift_proxy = workshift_proxy if day_off_assignation_db: day_off_assignations = day_off_assignation_db.get_by_person_id( assignation.person_id ) assignation.day_off_assignations = day_off_assignations person_data = data.get('person', {}) assignation.person = DumbPerson(**person_data) return ProxyFactory.create_assignation_proxy(assignation)
def get(self, request): lesson_list = LessonModel.objects.all() response = [] for item in lesson_list: start = DateUtil.datetime_to_date_str(item.class_time) title = "%s:%s :%s" % (DateUtil.datetime_to_hour_minute_str( item.class_time), item.name, item.person_num) response.append({"title": title, "start": start}) return render(request, 'app/index.html', {"data": response})
def transfer_wechat_feature_2_tfRecord(start_date, end_date, raw_feature_file_path, tf_record_file_path): raw_feature_folder_name = os.path.basename(raw_feature_file_path) tf_record_folder_name = os.path.basename(tf_record_file_path) raw_feature_file = os.path.join(raw_feature_file_path, raw_feature_folder_name + "_%s") tf_record_file = os.path.join(tf_record_file_path, tf_record_folder_name + "_%s.tfrecord") data_info_csv_path = os.path.join(tf_record_file_path, "data_info.csv") if not os.path.isdir(tf_record_file_path): os.makedirs(tf_record_file_path) date_ls = DateUtil.get_every_date(start_date, end_date) for date in date_ls: print(date) tfRecorder.transfer_single_feature_file_2_tfRecord( raw_feature_file % date, tf_record_file % date, data_info_csv_path, column_names, label_name, need_features_cols, var_length_cols=var_length_cols, col_preprocess_func=col_preprocess_func)
def make_glass_fig(z, is_binary, title): dates_in_year = DateUtil.make_dates_in_year() weekdays_in_year = [i.weekday() for i in dates_in_year] week_number_of_dates = GrassChart.make_week_number_of_dates( dates_in_year) text = [str(i) for i in dates_in_year] color_scale = [[False, "#EAEDF0"], [True, "#C7E48B"] ] if is_binary else [[0, "#EAEDF0"], [0.2, "#C7E48B"], [1.0, "#1D6922"]] data = [ go.Heatmap(x=week_number_of_dates, y=weekdays_in_year, z=z, text=text, hoverinfo='text', xgap=3, ygap=3, showscale=False, colorscale=color_scale, zmin=0, zmax=1, zauto=False) ] layout = GrassChart.make_layout(title) fig = go.Figure(data=data, layout=layout) return fig
def merge_wechat_records_by_opp_id(start_date, end_date): print("merge_wechat_records_by_opp_id..") date_ls = DateUtil.get_every_date(start_date, end_date) rs_dict = {} for date in date_ls: print(date) wechat_file = tmp_merged_wechat_record_data_file % date with codecs.open(wechat_file, 'r', 'utf-8') as fin: for line in fin: arr = line.strip().split("\t") opp_id = arr[0] chat_ls = [] for chat_str in arr[1:]: try: chat_dict = json.loads(chat_str, encoding='utf-8') except: # print("wechat record dict can't parse by json:", chat_str) continue chat_ls.append(chat_dict) if opp_id not in rs_dict: rs_dict[opp_id] = chat_ls else: rs_dict[opp_id].extend(chat_ls) print("sort chat by create_time..") for opp_id, chat_ls in rs_dict.items(): chat_ls.sort(key=lambda json_dict: json_dict["create_time"]) return rs_dict
def stock_sma(date, days=200): start_date = DateUtil.rough_datetime(date, years=-1) price_list = list( StockTrend.stock_trend_info(Stock.SH_INDEX, start_date, date).items())[:days] price_list = [float(x[1]['close']) for x in price_list] sma = sum(price_list) / 200 return sma
def test_join_date_and_time1(self): str_time = "10:10" time_obj = datetime.strptime(str_time, '%H:%M').time() str_date = "2019-1-1" date_obj = datetime.strptime(str_date, '%Y-%m-%d').date() result = DateUtil.join_date_and_time(date_obj, time_obj) expected = datetime.strptime("2019-1-1 10:10", '%Y-%m-%d %H:%M') assert result == expected
def __init__(self, cash, stock_code, volume, time_span, end_index, strategy): self._cash = cash self._stock_code = stock_code self._volume = volume self._time_span = time_span self._end_index = end_index self._strategy_type = strategy self._cal_date_list = TradeDate.trade_date_collection( DateUtil.current_date())[self._end_index:self._end_index + self._time_span][::-1]
def get_vocabulary_list(vocab_file_dir, feature_name, start_date, end_date): vocab_set = set() file_prefix = os.path.join(vocab_file_dir, feature_name + "_vocabulary_") date_ls = DateUtil.get_every_date(start_date, end_date) for tmp_date in date_ls: with codecs.open(file_prefix + tmp_date, "r", "utf-8") as fin: for line in fin: vocab_set.add(line.strip().encode("utf-8")) vocab_set.add("0".encode("utf-8")) return list(vocab_set)
def post(self, request): name = request.POST.get("name") class_time = request.POST.get("class_time") teacher_name = request.POST.get("teacher_name") description = request.POST.get("description") if not name or not class_time or not teacher_name or not description: return render(request, 'app/lesson_add.html', {'error': 'wrong input'}) else: LessonModel.objects.create( name=name, class_time=DateUtil.str_to_datetime(class_time), teacher_name=teacher_name, description=description) return HttpResponseRedirect(reverse('lesson-list'))
def gen_multi_day_bench_mark_data(start_date, end_date): bench_mark_text_file_tmp = os.path.join(bench_mark_text_file_path, "total_chat_num_%s", "total_chat_num_%s_%s") for date in DateUtil.get_every_date(start_date, end_date): print(date) # 按chat_num分组 source_file_path = os.path.join(raw_feature_path, "wechat_record_feature_%s" % date) chat_group_dict = {x: [] for x in chat_num_ls} with codecs.open(source_file_path, 'r', 'utf-8') as fin: for line in fin: arr = line.strip().split("\t") column_names = [ "label", "opp_id", "create_time", "hist_student_chat_num", "hist_teacher_chat_num", "hist_total_chat_num", "wechat_record" ] tmp_dict = { key: value for key, value in zip(column_names, arr) } total_student_chat_num = int(tmp_dict["hist_student_chat_num"]) if total_student_chat_num in chat_group_dict: chat_group_dict[total_student_chat_num].append( line.strip()) for chat_num, line_ls in chat_group_dict.items(): if not line_ls: continue bench_mark_text_file = bench_mark_text_file_tmp % (chat_num, chat_num, date) tmp_text_folder_path = os.path.dirname(bench_mark_text_file) tmp_tf_record_folder_path = tmp_text_folder_path.replace( "feature_file", "tf_record") if not os.path.isdir(tmp_text_folder_path): os.makedirs(tmp_text_folder_path) with codecs.open(bench_mark_text_file, "w", "utf-8") as fout: for line in line_ls: fout.write(line + "\n") # 将转换好的text bench mark 文本文件 , 转换为tfRecord文件 tfRecorder.transfer_wechat_feature_2_tfRecord_default( date, date, tmp_text_folder_path, tmp_tf_record_folder_path)
def make_date_value_dic(self) -> dict: labels = self.config.get_labels() data_dic = {} for label in labels: data_dic[label] = {} file_paths = FileUtil.get_recursive_file_paths( self.config.get_base_dir()) file_paths.sort(key=lambda x: x.split("/")[-1]) for file_path in file_paths: lines = FileUtil.read_lines(file_path) for line in lines: for label in labels: if label not in line: continue value = HabitReporter.extract_value_float(line) if value is None: continue data_dic[label][DateUtil.extract_date(file_path)] = value return data_dic
def rsi(date, days=14): start_date = DateUtil.rough_datetime(date, months=-1) trend_list = list( StockTrend.stock_trend_info(Stock.CODE, start_date, date).items())[1:days + 1] price_list = [float(x[1]['close']) for x in trend_list] rise_sum, down_sum = 0, 0 for i in range(len(price_list) - 1): balance = price_list[i + 1] - price_list[i] if balance > 0: rise_sum += balance else: down_sum -= balance if (rise_sum + down_sum) != 0: rsi = 100 * rise_sum / (rise_sum + down_sum) else: rsi = 50 return rsi
def make_habit_report_figs(self) -> [dcc.Graph]: labels = self.config.get_labels() date_value_dic = self.make_date_value_dic() figs = [] for label in labels: if self.config.is_binary(label): dates_in_year_dic = DateUtil.make_dates_in_year_dic() for k, v in date_value_dic[label].items(): dates_in_year_dic[str(k)] = v z = list(dates_in_year_dic.values()) fig = GrassChart.make_glass_fig(z=z, is_binary=True, title=label) else: x = list(date_value_dic[label].keys()) y = list(date_value_dic[label].values()) fig = LineChart.make_line_chart(x, y, label) figs.append(dcc.Graph(id=label, figure=fig)) return figs
def post(self, request): lesson_id = request.GET.get("id") lesson = LessonModel.objects.get(id=lesson_id) name = request.POST.get("name") class_time = request.POST.get("class_time") teacher_name = request.POST.get("teacher_name") description = request.POST.get("description") if not name or not class_time or not teacher_name or not description: return render(request, 'app/lesson_add.html', {'error': 'Wrong input '}) else: lesson.name = name lesson.teacher_name = teacher_name lesson.class_time = DateUtil.str_to_datetime(class_time) lesson.description = description lesson.save() return HttpResponseRedirect(reverse('lesson-list'))
def get_MinMaxValue_dict(MinMaxValue_file_dir, start_date, end_date): MinMaxValue_dict = dict() file_prefix = os.path.join(MinMaxValue_file_dir, "MinMaxValue_file_") date_ls = DateUtil.get_every_date(start_date, end_date) for tmp_date in date_ls: with codecs.open(file_prefix + tmp_date, "r", "utf-8") as fin: for line in fin: arr = line.strip().split("\t") feature_name = arr[0].strip() minValue = float(arr[1].strip()) maxValue = float(arr[2].strip()) if feature_name not in MinMaxValue_dict: MinMaxValue_dict[feature_name] = [minValue, maxValue] continue if minValue < MinMaxValue_dict[feature_name][0]: MinMaxValue_dict[feature_name][0] = minValue if maxValue > MinMaxValue_dict[feature_name][1]: MinMaxValue_dict[feature_name][1] = maxValue return MinMaxValue_dict
def get_one_day_wechat_record_feature(date): wechat_record_start_date = DateUtil.get_relative_delta_time_str(date, -HISTORY_WECHAT_RECORD_DELTA_DAY) wechat_record_dict = merge_wechat_records_by_opp_id(wechat_record_start_date, date) merge_raw_data_file = tmp_merged_raw_data_file % date wechat_record_featue_file = tmp_wechat_record_feature_file % date with codecs.open(merge_raw_data_file, 'r', 'utf-8') as fin, codecs.open(wechat_record_featue_file, "w", 'utf-8') as fout: for line in fin: arr = line.strip().split("\t") label = arr[0] time_observe_point = arr[1] # time_observe_point = datetime.strptime(arr[1],"%Y-%m-%d %H:%M:%S") opp_id = arr[6] if opp_id not in wechat_record_dict: result = "\t".join(map(str, [label, opp_id, time_observe_point] + [0] * 3 + [""])) fout.write(result + "\n") continue wechat_stat_ls = wechat_record_precess(wechat_record_dict[opp_id], time_observe_point) result = "\t".join(map(str, [label, opp_id, time_observe_point] + wechat_stat_ls)) fout.write(result + "\n")
def test_str_to_time4(self): str_time = "08:01" time_obj = DateUtil.str_to_time(str_time) assert time_obj == datetime.strptime(str_time, "%H:%M").time()
def _filename_json_string(cls, name): return name + '-' + DateUtil.date_string( datetime.date.today()) + '.json'
def test_str_to_datetime2(self): str_datetime = "2019-01-03 08:01" datetime_obj = DateUtil.str_to_datetime(str_datetime) assert datetime_obj == datetime.strptime(str_datetime, '%Y-%m-%d %H:%M')
def test_str_to_time1(self): str_time = "10:10" time_obj = DateUtil.str_to_time(str_time) assert time_obj == datetime.strptime(str_time, "%H:%M").time()
def test_str_to_date1(self): str_date = "2019-10-1" date_obj = DateUtil.str_to_date(str_date) assert date_obj == datetime.strptime(str_date, "%Y-%m-%d").date()
def test_date_to_str2(self): str_date = "2019-10-10" date_obj = datetime.strptime(str_date, '%Y-%m-%d').date() assert "2019-10-10" == DateUtil.date_to_str(date_obj)
def test_time_to_str3(self): str_time = "10:10" time_obj = datetime.strptime(str_time, '%H:%M').time() assert "10:10" == DateUtil.time_to_str(time_obj)
def test_time_to_str2(self): str_time = "08:01" time_obj = datetime.strptime(str_time, '%H:%M').time() assert "08:01" == DateUtil.time_to_str(time_obj)
def test_str_to_datetime3(self): str_datetime = "2019-10-10 8:1" datetime_obj = DateUtil.str_to_datetime(str_datetime) assert datetime_obj == datetime.strptime(str_datetime, '%Y-%m-%d %H:%M')