def save_message(self, message_data): chunks, chunk_size = len( message_data['message'] ), _model_field_limits['Message__message__max_length'] message_chunks = [ message_data['message'][i:i + chunk_size] for i in range(0, chunks, chunk_size) ] first_message_chunk = message_chunks.pop(0) with transaction.atomic(): message = ChatMessage(chat_room_id=self.room_id, user_id=self.user_id, datetime=datetime.fromisoformat( message_data['datetime']), message=first_message_chunk) message.save() parent_message_id = message.pk for message_chunk in message_chunks: message = ChatMessage(chat_room_id=self.room_id, user_id=self.user_id, datetime=datetime.fromisoformat( message_data['datetime']), message=first_message_chunk, parent_message_id=parent_message_id) message.save()
def create(self, validated_data): try: current_user = self._current_user() room = Room.objects.get(pk=validated_data['room']) payment_method = PaymentMethod.objects.get( pk=validated_data['payment_method']) model = Contract.objects.create( room=room, profile=current_user.user_profile, start_at=validated_data['start_at'], end_at=validated_data['end_at'], payment_method=payment_method, ) start_at = validated_data['start_at'] end_at = validated_data['end_at'] start_at = datetime.fromisoformat(start_at) end_at = datetime.fromisoformat(end_at) time_rentail = int((end_at - start_at).days) model.price = (time_rentail / 30) * room.typeroom.price model.created_by = current_user model.save() return True except Exception as e: return serializers.ValidationError("Error") return serializers.ValidationError("Server error")
def isValidDate(datestr): try: dt.fromisoformat(datestr) except: return False else: return True
def getSensorData(quake, sensor): global eq, records_df data_client = AwsDataClient('mx') eq = quake devices = data_client.get_devices_as_of_date(eq['date_utc']) quakeTime = quake['date_utc'] minute = timedelta(minutes=1) tenMinutes = timedelta(minutes=10) start_date_utc = datetime.fromisoformat(quakeTime) - minute end_date_utc = datetime.fromisoformat(quakeTime) + minute + minute + minute # Get records for the specified dates records_df = get_df_from_records( data_client.get_filtered_records( str(start_date_utc), str(end_date_utc) ) ) records_df['sample_dt'] = \ records_df['sample_t'].apply( lambda x: datetime.utcfromtimestamp(x) ) # Select required columns records_df = records_df[ ['device_id', 'x', 'y', 'z', 'sample_dt'] ] print(records_df.head()) plot_seismograms(sensor)
def get_term_data_list(cf_term_data_by_sub_account, s=None, large=None, middle=None): user_asset_acts = [] for e in cf_term_data_by_sub_account['user_asset_acts']: other = [k for k in e.keys() if k != 'user_asset_act'] if other: print("other", other) user_asset_act_ref = {} traverse(user_asset_act_ref, '', e['user_asset_act']) user_asset_acts.append(user_asset_act_ref) if s: large, middle = get_categories_form_session(s) if large and middle: for i in range(len(user_asset_acts)): user_asset_acts[i]['large_category'] = large[user_asset_acts[i] ['large_category_id']] user_asset_acts[i]['middle_category'] = middle[ user_asset_acts[i]['middle_category_id']] for i, e in enumerate(user_asset_acts): user_asset_acts[i]['date'] = datetime.fromisoformat( e['recognized_at']).strftime("%y/%m/%d") user_asset_acts[i]['year'] = datetime.fromisoformat( e['recognized_at']).strftime("CY%y") user_asset_acts[i]['month'] = datetime.fromisoformat( e['recognized_at']).strftime("%y'%m") return pd.DataFrame(user_asset_acts)
def get_timer_min(data): for timer in data[BARRE]: end = datetime.fromisoformat(timer["begin"]) + timedelta(**timer["duration"]) if datetime.now() < end: if timer_min is None or datetime.fromisoformat(timer_min["begin"]) + \ timedelta(**timer_min["duration"]) > end: timer_min = timer return timer_min
async def covid(ctx, country = ''): await ctx.send("Getting covid stats...") data = services.covid_stats(country.capitalize()) data['lastChecked'] = datetime.fromisoformat(data['lastChecked']).strftime('%c') data['lastReported'] = datetime.fromisoformat(data['lastReported']).strftime('%c') message = f"The Covid Stats for {country.capitalize() if country != '' else 'The World' }\n" for key in data: message += f"{key}: {data[key]}\n" message += f"Fatality Rate: { round((data['deaths']/data['confirmed']) * 100, 2)}%\n" message += f"Recovery Rate: { round((data['recovered']/data['confirmed']) * 100, 2)}%" await ctx.send(message)
def verify_timestamp_format(dt_str): last_one = dt_str[-1] if last_one == "Z" or last_one == "z": dt_str = dt_str[:-1] try: datetime.fromisoformat(dt_str) except: return False return True
def show_date_tips_page(): counter_w = 0 report_details = "empty" # INIT Waiter Model waiter_model = WaitersTable init_waiter_model(waiter_model) show_date = request.form.get('show_date') # get date shift = request.form.get('shift') # get Morning / Evening shift if shift is None or shift == '': # default shift shift = "morning" # While click "Search" if request.method == 'POST': report_details = "detail-not exist" if show_date == '': flash("You didn't insert a Date") return render_template('Date_Page.html', show_date=show_date, report_details=report_details) # Search the Date shift in DB money_shift = Money.query.filter_by(date=datetime.fromisoformat(show_date).date()).first() # Date exist: if money_shift is not None: if money_shift.date == datetime.fromisoformat(show_date).date(): if money_shift.selected_shift == shift: waiters = WaitersTable.query.filter_by(shift_id=money_shift.id).all() for waiter in waiters: insert_waiter(waiter_model, waiter) report_details = "detail-exist" return render_template('Date_Page.html', show_date=show_date, shift=shift, report_details=report_details, show_manager=money_shift.manager, show_selected_shift=money_shift.selected_shift, total_hours=money_shift.total_hours, total_cash=money_shift.total_cash, total_credit=money_shift.total_credit, cash_per_hour=money_shift.cash_per_hour, credit_per_hour=money_shift.credit_per_hour, total_tip=money_shift.total_tip, id=counter_w, name=waiter_model.waiters_name, start_time=waiter_model.start_time_waiter_list, finish_time=waiter_model.finish_time_waiter_list, total_waiter_time=waiter_model.total_waiter_time_list, total_cash_waiter=waiter_model.cash_waiter_list, total_credit_waiter=waiter_model.credit_waiter_list, total_tips=waiter_model.all_tips_waiters_list) if report_details == "detail-not exist": flash(" Didn't found a shift") return render_template('Date_Page.html', show_date=show_date, report_details=report_details)
def get_hourly_sum_db(gpio_pin: str, hr: int, day: str): """:return hourly sum for given day and hour, :param gpio_pin number as str :param hr number in 24h system, if 25 then full day :param day in formatted as YYYY-MM-DD, day and hr are in local tz""" db = get_db() cur = db.cursor() if hr == 25: hrs = "00" else: if hr < 10: hrs = "0" + str(hr) else: hrs = str(hr) date_format = "%Y-%m-%d %H:%M:%S" dtstart = day + " " + hrs + ":00:00" + get_offset_utc_s( ) # sqlite expects format YYYY-MM-DD HH:MM:SS dtstart_tz = datetime.fromisoformat(dtstart) dtstart_utc = dtstart_tz.astimezone(timezone.utc) dtstart_utc_s = dtstart_utc.strftime(date_format) if hr == 25: dtend_utc = dtstart_utc + timedelta(hours=24) else: dtend_utc = dtstart_utc + timedelta(hours=1) dtend_utc_s = dtend_utc.strftime(date_format) sql = 'SELECT sum(pulses) FROM pulses WHERE gpiopin==' + gpio_pin + \ ' AND created BETWEEN "' + dtstart_utc_s + '" AND "' + dtend_utc_s + '";' cur.execute(sql) row = cur.fetchone() hr_sum = row[0] if hr_sum is None: hr_sum = 0 return hr_sum
def conversion(list_of_tuples): for event in list_of_tuples: #Convert to list because we need to operate on this data temp_list = list(event) #convert from GMT+1 to GMT+8 gmt1_time = datetime.fromisoformat( f'{temp_list[0]} {temp_list[1]}') + timedelta(hours=8) #split date and time gmt8_time = gmt1_time.strftime("%Y-%m-%d %H:%M:%S").split() #Update to converted date temp_list[0] = gmt8_time[0] #Update to converted time temp_list[1] = gmt8_time[1] #Convert longitude to float() temp_list[2] = float(temp_list[2]) #Convert lantitude to float() temp_list[3] = float(temp_list[3]) #Convert klass to float() temp_list[4] = float(temp_list[4]) #Append magnitudes to temp list temp_list.append(klass_conversion(temp_list[4])) #Remove klass value del temp_list[4] #Convert back to tuple() new_ls.append(tuple(temp_list))
def getTimestamp(i, peaks): time = [] move = Search(i) c.execute("SELECT timestamp FROM Data WHERE DATASETID = {}".format(i)) rows1 = c.fetchall() for listTime in rows1: val = str(listTime).strip(')') val = val.strip('(') val = val.strip(',') val = val.replace("'", "") time.append(val) time1 = [] print(peaks) #print(time) for j in range(len(peaks)): if (j % 2 == 0): print(peaks[j]) time1.append(str(time[(peaks[j])])) print(time1) print(move) for k in range(len(move)): Timestamp1 = time1[k] Timestamp1 = datetime.fromisoformat(Timestamp1) iden = int(move[k]) c.execute("INSERT INTO Sequence(timestamp, EventID) VALUES (?, ?)", (Timestamp1, iden)) conn.commit() return time1
def get_update_state(self, entity_id): updatestate_container = self.get_updatestate_container() query = 'SELECT * FROM c where c.id = {}'.format(entity_id) for item in updatestate_container.query_items(query, enable_cross_partition_query=True): item_timestamp = datetime.fromisoformat()(item['date'], tz=datetime.timezone.utc) return types.updates.State(item['pts'], item['qts'], item_timestamp, item['seq'], unread_count=0)
def stuff(): time = request.form.get("time") print(time) print('ojuu') time = datetime.fromisoformat(str(time)) time = time.replace(tzinfo=None) name = request.form.get("name") if session.get(name) is None: session[name] = Person(name=name) session['persons'].append(session[name]) session[name].schedule.addtime(time) if session.get('session_db') is None: session['session_db'] = pandas.DataFrame( { 'times': pandas.Timestamp(time), 'name': name }, index=[0]) else: session['session_db'].append([time, name]) print(session['session_db']) return render_template( "index.html", time=time, userface=session[name].schedule.times, persons=[person.name for person in session['persons']], timelist=slot_check(session['persons']))
def Get_Course(courseid): # 获取课程信息 object = models.Course.objects.get(pk=courseid) # 格式化器 class CourseDetailSerializer( serializers.ModelSerializer ): # https://stackoverflow.com/questions/21925671/convert-django-model-object-to-dict-with-all-of-the-fields-intact class Meta: model = models.Course fields = "__all__" class CourseInfoSerializer(serializers.ModelSerializer): class Meta: model = models.Course fields = [ "Course_Info", "Course_Goal", "Course_Chapter", "Grade_Requirements", "Reference", "QA", ] courseDetail = CourseDetailSerializer(object).data try: courseDetail["Course_Category"] = models.CourseCategory.objects.get( pk=courseDetail["Course_Category"]) # 格式化器只获取了外键的ID,要通过ID再获取外键的数值 teacher = models.Users.objects.get(pk=courseDetail["Course_Teacher"]) courseDetail["Course_Teacher"] = models.Users.objects.get( pk=courseDetail["Course_Teacher"]) # 格式化器只获取了外键的ID,要通过ID再获取外键的数值 except: pass courseInfo = CourseInfoSerializer(object).data courseInfo["课程说明"] = courseInfo.pop("Course_Info") courseInfo["课程目标"] = courseInfo.pop("Course_Goal") courseInfo["课程大纲"] = courseInfo.pop("Course_Chapter") courseInfo["成绩要求"] = courseInfo.pop("Grade_Requirements") courseInfo["参考资料"] = courseInfo.pop("Reference") courseInfo["常见问题"] = courseInfo.pop("QA") # 将时间格式从TZ格式(2021-04-26T22:55:09.695785+08:00)转换成datetime对象再转换成字符串格式输出 # https://stackoverflow.com/questions/13182075/how-to-convert-a-timezone-aware-string-to-datetime-in-python-without-dateutil courseDetail["Starting_Time"] = datetime.fromisoformat( courseDetail["Starting_Time"]).strftime("%Y年%m月%d日") courseDetail["Ending_Time"] = datetime.fromisoformat( courseDetail["Ending_Time"]).strftime("%Y年%m月%d日") return courseDetail, courseInfo, teacher
def add_batch(): eta = request.json['eta'] if eta is not None: eta = datetime.fromisoformat(eta).date() cmd = commands.CreateBatch(request.json['ref'], request.json['sku'], request.json['qty'], eta) bus.handle(cmd) return 'OK', 201
def get_day_and_hour(date): """ Parse date string, return hour and day of a week """ date = datetime.fromisoformat(date) return date.hour, date.isoweekday()
def add_batch(): session = get_session() repo = repository.SqlAlchemyRepository(session) eta = request.json['eta'] if eta is not None: eta = datetime.fromisoformat(eta).date() services.add_batch(request.json['ref'], request.json['sku'], request.json['qty'], eta, repo, session)
def _should_system_be_armed(event_start_str, event_end_str): """ This function will convert the start and end string iso times, to date times. The it will check to see if current falls between the start and end times. If so True will be returned, otherwise False will be returned :param event_start_str: IOS time that will be converted to seconds :param event_end_str: IOS time that will be converted to seconds :return: """ org_dt = datetime(1970, 1, 1, tzinfo=pytz.utc) start_time_in_sec = (datetime.fromisoformat(event_start_str) - org_dt).total_seconds() end_time_in_sec = (datetime.fromisoformat(event_end_str) - org_dt).total_seconds() now = datetime.now() now = now.replace(tzinfo=pytz.timezone('US/Eastern')) current_time_in_sec = (now - org_dt).total_seconds() return True if start_time_in_sec < current_time_in_sec < end_time_in_sec else False
def convert_not_before(cls, not_before): try: # convert notBefore param into 'aware' datetime object return pytz.UTC.localize(datetime.fromisoformat(not_before)) except Exception as e: raise ErrorHandler( "Error parsing DateTime 'notBefore' -> " + not_before + " . Value must be of format " "DateTimeFormatter.ofPattern('yyyy-MM-dd HH:mm:ss') " + str(e))
def transportar(self): from datetime import datetime database = sqlite3.connect( "linioexp_parcial_lab3.db") # ABRIR CONEXION CON BASE DE DATOS can = 1 try: cursor = database.cursor() # OBTENER OBJETO CURSOR query = ''' SELECT idOrden,fechaRegistro, estado from orden where estado= 'C' ''' cursor.execute(query) corden = cursor.fetchall() try: for ped in corden: self.__idOrden = ped[0] self.obtenerOrden() date_string = self.__fechaRegistro.split(".")[0] if (datetime.now() - datetime.fromisoformat(date_string) ).days > 4 and self.__estado == 'C': self.__estado = 'T' except: pass try: cursor = database.cursor() # OBTENER OBJETO CURSOR query = ''' SELECT idOrden,fechaRegistro, estado from orden where estado= 'T' ''' cursor.execute(query) corden = cursor.fetchall() for ped in corden: self.__idOrden = ped[0] self.obtenerOrden() date_string = self.__fechaRegistro.split(".")[0] if (datetime.now() - datetime.fromisoformat(date_string) ).days > 5 and self.__estado == 'T': self.__estado = 'E' except: pass except Exception as e: print("Error: {}".format(e)) finally: database.close() # CERRAR CONEXION CON BASE DE DATOS
def main(argv): import os, getopt directory = './' verbose = False try: opts, args = getopt.getopt(argv, "hvd:s:e:", ["directory="]) except getopt.GetoptError: print( 'dashboard.py <-v,--verbose> -d <directory> -s <start_date> -e <end_date>' ) sys.exit(2) for opt, arg in opts: if opt == '-h': print('dashboard.py <-v,--verbose> -d <directory>') sys.exit() elif opt in ('-d', '--directory'): directory = arg print(f'directory={directory}') elif opt in ('-s', '--start-date'): import datetime start = datetime.fromisoformat(arg) print(f'start_date={start}') elif opt in ('-e', '--end-date'): import datetime end = datetime.fromisoformat(arg) print(f'end_date={end}') elif opt in ('-v', '--verbose'): verbose = True print(f'verbose={verbose}') from strym import dashboard try: db = dashboard(directory=directory, verbose=verbose) print(db.statistics()) print( f'Total driving distance (miles): {db.miles()} ({db.error_count} files not parsed)' ) print( f'Total driving distance (km): {db.kilometers()} ({db.error_count} files not parsed)' ) except Exception as ex: print(f'Exception when processing {directory} (msg={ex})')
def that_day_entries(chat_id: Union[str, int], iso_date: str): chat_id = str(chat_id) start = _beginning(datetime.fromisoformat(iso_date)) end = _end(start).isoformat() start = start.isoformat() items = _table.query( ConsistentRead=True, KeyConditionExpression=Key('chat_id').eq(chat_id) & Key('date').between(start, end) ) return items['Items']
def parse_line(line): # 23803 20190101 0005 20181231 1805 3 -89.43 34.82 12.4 # 0.0 0 0 10.9 C 0 88 0 -99.000 -9999.0 1115 0 0.79 0 line = line.split() ds = line[1] # Date string. ts = line[2] # Time string. year, mon, day, hr, mn = (ds[0:4], ds[4:6], ds[6:8], ts[0:2], ts[2:4]) # 2011-11-04T00:05 dt = datetime.fromisoformat(f"{year}-{mon}-{day}T{hr}:{mn}") return [dt, float(line[-15]), float(line[-13])]
def data_cleared_barre(data, barre): data_cleared = [] for timer in data[barre]: end = datetime.fromisoformat(timer["begin"]) + timedelta(**timer["duration"]) if datetime.now() >= end: data_cleared.append(timer) else: pass #NOTIFICATION? return data_cleared
def post(self, request, *args, **kwargs): repo = ORMRepository kwargs = request.environ["kwargs"] eta = kwargs['eta'] if eta is not None: eta = datetime.fromisoformat(eta).date() services.add_batch(kwargs['ref'], kwargs['sku'], kwargs['qty'], eta, repo) return HttpResponse(status=201)
def get_all_usb_device_records(self) -> Iterable[dict]: """ read udev log from /var/log/udev-all.log """ return self._read_udev_log("/var/log/udev-all.log", { "serial": lambda x: x.get("ID_SERIAL_SHORT"), "manufacture": lambda x: x.get("ID_VENDOR_FROM_DATABASE"), "device_name": lambda x: x.get("ID_MODEL"), "last_plugin_time": lambda x: datetime.fromisoformat(x["time"]).strftime("%Y-%m-%d %H:%M:%S"), })
def onTweet(tweet): obj = { 'tweetid': tweet.id, 'username': tweet.username, 'hashtags': tweet.hashtags, 'likes_count': tweet.likes_count, 'replies_count': tweet.replies_count, 'retweets_count': tweet.retweets_count, 'date': datetime.fromisoformat(tweet.datetime.split(" ")[0]), } db.tweets.update_one({ "tweetid": tweet.id }, {"$set": obj}, True)
def test_for_zero_day_bug(monkeypatch): """ There's a very specific case, where on the 1st day of the new month, before 12 UTC, a url with a month day zero is construced. This test ensures that if it's before 12 UTC, we look for the previous days 12 UTC model run""" problem_date = datetime.fromisoformat('2020-09-01T00:13:58+00:00') urls = env_canada.get_global_model_run_download_urls(problem_date, 12) url = next(urls) expected_url = ('https://dd.weather.gc.ca/model_gem_global/15km/' 'grib2/lat_lon/12/000/CMC_glb_TMP_TGL_2_latlon.' '15x.15_2020083112_P000.grib2') assert url == expected_url
def get_samples(before_datetime=None, page=1): """Get stored pipeline samples. Parameters ---------- before_datetime : date time in ISO 8601 compatible format, YYYY-MM-DDTHH:MM:SS. For example '2002-12-25 00:00:00-06:39'. It uses python's standard function datetime.fromisoformat(). If not provided, the function will start with the most recent available sample. page : positive integer Paginates samples in batches of 5. Defaults to page=1. Returns ------- dictionary Returns a dictionary of previously saved pipeline samples. """ parsed_datetime = None assert isinstance(page, int) assert page > 0 page_size = 5 if before_datetime: try: parsed_datetime = datetime.fromisoformat(before_datetime) log.debug('Fetching samples saved before %s', parsed_datetime) except ValueError as e: log.warning( 'Unable to parse before_datetime parameter: %s. ' ' Error: %s', before_datetime, str(e)) page_start_position = (page - 1) * page_size page_end_position = page_start_position + page_size if not parsed_datetime: log.debug('Fetching most recent saved samples') log.debug( 'Fetching samples page %d. Page size %d. ' 'Sample index range [%d:%d]. ', page, page_size, page_start_position, page_end_position) p = Path('./data/detections/front-door/faces/') log.debug('Samples path: %s', p.resolve()) files = list(p.glob("*-json.txt")) log.debug('Fetched %d file names.', len(files)) files = sorted(files, key=os.path.getmtime, reverse=True) samples = [] for json_file in files[page_start_position:page_end_position]: with open(json_file) as f: sample = json.load(f) sample['id'] = uuid.uuid4().hex sample['file'] = str(json_file) samples.append(sample) # lines = map(str, files) # log.debug('File names follow:\n %s', "\n".join(lines)) return samples