def insert_or_get_worktimes(): db = get_db() if request.method == 'POST': if valid_form_params(request): new_id = db.insert(str_to_datetime(request.form['startDate']), str_to_datetime(request.form['endDate']), request.form['category'], request.form['description']) if new_id is not None: return jsonify(dict(id=new_id)) else: return jsonify(dict(msg='Entry could not be inserted')), 500 else: return jsonify(dict(msg='Incorrect params')), 400 else: start_date = request.args.get('startDate', None) end_date = request.args.get('endDate', None) if start_date is not None and end_date is not None: items = db.get_between(str_to_datetime(start_date), str_to_datetime(end_date)) if items is not None: return jsonify(items) else: return jsonify(dict(msg='Entries could not be retrieved')), 500 else: return jsonify(dict(msg='Incorrect params')), 400
def get_span(start, end, delta): start_date = str_to_datetime(start, DATE_FORMAT) end_date = str_to_datetime(end, DATE_FORMAT) cur_date = start_date while cur_date < end_date: yield cur_date cur_date += delta
def merge_k_sorted_files(file_list, merged_file_name): with open(merged_file_name, 'w') as merged_file: file_readers = [] try: for file_name in file_list: f = open(file_name, 'r') file_readers.append(f) heap = [] for reader in file_readers: line_read = reader.readline() heap.append((utils.str_to_datetime(line_read, DATE_FORMAT), line_read, reader)) heapq.heapify(heap) while heap: pop = heapq.heappop(heap) merged_file.write(pop[1]) line_read = pop[2].readline() if line_read.strip() != '': heapq.heappush(heap, (utils.str_to_datetime(line_read, DATE_FORMAT), line_read, pop[2])) merged_file.close() for reader in file_readers: reader.close() for file_name in file_list: os.remove(os.getcwd() + '/' + file_name) return merged_file_name except IOError as e: print('Operation failed: %s' % e.strerror)
def __init__(self, data): super(Schedule, self).__init__(data) if not hasattr(self, '_id'): self._id = ObjectId() self.begin = str_to_datetime(self.begin) self.end = str_to_datetime(self.end) self.__validate_dates()
def compare(left, right): left_time = utils.str_to_datetime(left, DATE_FORMAT, cache=cache) right_time = utils.str_to_datetime(right, DATE_FORMAT, cache=cache) if left_time > right_time: return 1 elif left_time < right_time: return -1 else: return 0
def number_of_overlap(entries_file, exits_file): def quote(val): return '\'%s\'' % str(val) def record_car_num(time_stamp, car_cnt): time_tuple = time_stamp.timetuple() year = quote(time_tuple[0]) month = quote(time_tuple[1]) week = quote(time_tuple[2] / 7) weekday = quote(time_tuple[6]) hour = quote(time_tuple[3]) my_table.insert(col_list, [year, month, week, weekday, hour, quote(car_cnt)]) num_of_cars = 0 with open(entries_file, 'r') as etry, open(exits_file, 'r') as ext: fmt = "%Y-%m-%d %H:%M:%S" entry_line = etry.readline().strip() exit_line = ext.readline().strip() my_db = DBTable() my_table = DBRecord(DATA_TABLE) col_list = [] with open( '/Users/James/Desktop/PP/Learning/BitTiger/ParkingPrediction/parkingPrediction/appserver/database/HistoryDBMetaSchema.json') as data_file: data = json.load(data_file) if my_db.is_valid_table(DATA_TABLE): my_db.remove_table(DATA_TABLE) my_db.create_table(DATA_TABLE) for col_schema in data['metaSchema']['cols']: col_name = data['colsData'][col_schema]['name'] col_type = data['colsData'][col_schema]['type'] my_db.insert_col(DATA_TABLE, col_name, col_type) col_list.append(col_name.encode('utf-8')) # find num of cars last_time_stamp = datetime.datetime(1970, 1, 1) one_hour_time_diff = datetime.timedelta(0, 0, 0, 0, 0, 1) while entry_line != '' and exit_line != '': num_of_cars = num_of_cars entry_time = utils.str_to_datetime(entry_line, fmt) exit_time = utils.str_to_datetime(exit_line, fmt) if exit_line == '' or entry_time < exit_time: num_of_cars += 1 if entry_time - last_time_stamp >= one_hour_time_diff: record_car_num(entry_time, num_of_cars) last_time_stamp = entry_time entry_line = etry.readline().strip() else: num_of_cars -= 1 if exit_time - last_time_stamp >= one_hour_time_diff: record_car_num(exit_time, num_of_cars) last_time_stamp = exit_time exit_line = ext.readline().strip() my_table.commit_record() my_db.add_index(DATA_TABLE, 'query_index', col_list[:-1]) my_db.close()
def update_worktime(id): db = get_db() if valid_form_params(request): result = db.update(id, str_to_datetime(request.form['startDate']), str_to_datetime(request.form['endDate']), request.form['category'], request.form['description']) if result is not None and result > 0: return jsonify(dict(id=id)) elif result is not None and result == 0: return jsonify(dict(msg='Entry not found')), 404 else: return jsonify(dict(msg='Entry could not be updated')), 500 else: return jsonify(dict(msg='Incorrect params')), 400
def __get_free_seats_by_time(self, seats, times, request): result = [] for seats_count, time in zip(seats, times): seat_datetime = str_to_datetime(request.required_date, time) if seats_count > 0 and request.from_time <= seat_datetime <= request.to_time: result.append(TicketResponse(seat_datetime, seats_count)) return result
def set_end(self, date): self.end = str_to_datetime(date) if type(date) == str else date self.__validate_dates()
def set_begin(self, date): self.begin = str_to_datetime(date) if type(date) == str else date self.__validate_dates()
def handle_two(self,file_path): fin = open(file_path) zero_time_str = "" for line in fin : line_list = line.strip().split(' ') time_str = strip_str_head_tail (line_list[0] + ' '+ line_list[1] ) zero_time_str = line_list[0] dt = str_to_datetime(time_str) #print dt len_list = len(line_list) if len(line_list) > 5 : if line_list[4] == "Request" : order_str = strip_str_head_tail(line_list[5]) self._order_dict[order_str] = [dt] elif len_list > 9 and line_list[6] == "created" : request_str = strip_str_head_tail( line_list[9] ) if request_str in self._order_dict : self._order_dict[request_str].append(dt) elif len_list > 9 and line_list[6] == "cancelled": request_str = strip_str_head_tail( line_list[9] ) if request_str in self._order_dict : self._order_dict[request_str].append(dt) # if request_str in self._order_user : user = self._order_user[request_str] price = self._order_info[request_str][1] self._user_dict[user][0] -= 1 self._user_dict[user][1] -= price elif len_list > 7 and line_list[7] == "rejected": request_str = strip_str_head_tail( line_list[5] ) if request_str in self._order_dict : self._order_dict[request_str].append(dt) for k,v in self._order_dict.iteritems(): if len(v) != 2 : continue t = (v[1] - v[0]).seconds if t >= 1: print k,(v[1] -v[0]).seconds self._bad_time = [] zero_dt = str_to_datetime( zero_time_str[1:] + ' ' + '0:0:0') #得到了order起始时间 bad_time_dict = {} bad_time_count_dict = {} for order in self._order_dict : order_list = self._order_dict[order] if len(order_list) != 2 : continue start_time = order_list[0] end_time = order_list[1] dt = end_time - start_time #print dt.seconds if dt.seconds >= 1 : #self._bad_time.append(start_time) #start_time = str(start_time) start_pos = (start_time - zero_dt ).seconds end_pos = (end_time - zero_dt ).seconds """ if start_time not in bad_time_count_dict : bad_time_count_dict[start_time] = 0 """ if start_pos not in bad_time_count_dict : bad_time_count_dict[start_pos] = 0 #print start_time bad_time_count_dict[start_pos] += 1 """ if start_pos not in bad_time_dict : bad_time_dict [start_pos] = end_pos if end_pos > bad_time_dict[start_pos] : bad_time_dict [start_pos] = end_pos """ if start_time not in bad_time_dict : bad_time_dict [start_time] = end_time if end_time > bad_time_dict[start_time] : bad_time_dict [start_time] = end_time #step = (start_time - zero_dt ).seconds #self._bad_time_dict[step] = self._bad_time_dict.get(step ,0) + 1 #print self._bad_time #for each in bad_time_count_dict : l = sorted(bad_time_count_dict.items() ,key = lambda bad_time_count_dict:bad_time_count_dict[0]) for each in l: print each[0] ,each[1] print "-------------------------" l = sorted(bad_time_dict.items(), lambda x, y: cmp(x[1], y[1])) bad_time_list = [] for each in l: if (each[1]-each[0]).seconds > 5* 60 : bad_time_list.append(each) write_question_two(zero_dt,bad_time_list)
def node(*args, **kwargs): action = request.args.get("operate") user_id = kwargs["user_id"] if action == "list_all": return make_response({"nodes": list_user_node(user_id)}, 200) elif action == "create": node_token: dict = request.json if "node_token" not in node_token: return make_response("format error", 403) node_token: str = node_token["node_token"] try: assign_node_owner(node_token, user_id) return make_response({"result": "success"}, 200) except IndexError: return make_response({"result": "not exsist"}, 200) except ValueError: return make_response({"result": "already taken"}, 200) elif action == "view_cfg": node_token: str = request.args.get("id") if not user_has_node(user_id, node_token): return make_response({"msg": "deny"}, 403) return make_response(get_node_config(node_token), 200) elif action == "update_cfg": node_token: str = request.args.get("id") if not user_has_node(user_id, node_token): return make_response({"msg": "deny"}, 403) try: user_config = request.json node: FarmNode = get_node_instance(node_token) if node is None: raise IndexError(f"can not found node {node_token}") # check for connection #if not test_connect(node_token, user_config["configures"]): # raise ConnectionError(f"can not link to target device with config {user_config['configures']}") update_node_config(node_token, user_config) #load_lastest_node_cfg(node_token) except Exception as e: error(format_exc()) return make_response({"msg": "invaild config"}, 400) return "OK" elif action == "set_stat_cfg": ''' this will check user's query and setup further query condition cache ''' data: dict = request.json node_ids = check_node_owner(user_id, data["node_ids"]) try: hids, pics = dev_name_2_hid(node_ids, data["dev_name"]) except IndexError as e: if str(e).startswith("device name"): return make_response({"msg": "invaild device name"}, 404) raise e rule = data["group"] user_cfg = {} target_hids = [] for dn, hidl in hids.items(): user_cfg.update({dn: {"target_hids": hidl, "icon_link": pics[dn]}}) target_hids += hidl set_user_query_prefer(user_id, target_hids, rule, user_cfg) return make_response("OK", 200) elif action == "v_stat": return make_response(execute_query(user_id), 200) elif action == "get_s_pref": return make_response(get_user_prefer(user_id), 200) elif action == "download_ss": data: dict = request.json ts_start = str_to_datetime(data["ts_range"]["gt"]) ts_end = str_to_datetime(data["ts_range"]["lt"]) limit = DEFAULT_MAX_RECORD if "max_record_count" in data: limit = data["max_record_count"] # TODO: testing logs = get_all_dev_log(data["node_id"], data["hids"], ts_start, ts_end, limit) fname = f"/tmp/{str(uuid4())}.xlsx" export_devlog_to_file(logs, fname) return send_file(fname, attachment_filename="res.xlsx") return make_response({"msg": "no such action"}, 404)