def api_scheduled_item_add(request, feeder_id, receiver_id): if request.method == 'POST' and request.user.is_authenticated: timezone = pytz.timezone(settings.TIME_ZONE) try: post_data = json.loads(request.body.decode('utf-8')) except ValueError as error: return JsonResponse({ 'status': 400, 'data': 'data not in json format' }) if set(['start', 'end']).issubset(post_data) == False: return default400(request) receiver = Receiver.objects.get(id=receiver_id) if request.user.id != receiver.client.id: return JsonResponse({ 'status': 400, 'data': 'you are not the client for this receiver' }) feeder = Feeder.objects.get(id=feeder_id) try: start = timezone.localize( datetime.strptime(post_data['start'], '%Y/%m/%d %H:%M:%S')) end = timezone.localize( datetime.strptime(post_data['end'], '%Y/%m/%d %H:%M:%S')) except ValueError as error: return JsonResponse({'status': 400, 'data': str(error)}) scheduledItem = ScheduledItem(feeder=feeder, receiver=receiver, start=start, end=end) if scheduledItem.save() == False: return JsonResponse({'status': 400, 'data': 'slot unavailable'}) return default201(request) return default404(request)
def utc_timestamp(dt): """ Converts a datetime object into a timestamp """ timezone = pytz.timezone("utc") dt = timezone.localize(dt) return int(dt.timestamp())
def _to_python_datetime( value: Union[int, float], dtype: Type[DataType], tu: Optional[str] = "ns", tz: Optional["str"] = None, ) -> Union[date, datetime]: if dtype == Date: # days to seconds # important to create from utc. Not doing this leads # to inconsistencies dependent on the timezone you are in. return datetime.utcfromtimestamp(value * 3600 * 24).date() elif dtype == Datetime: if tu == "ns": # nanoseconds to seconds dt = EPOCH + timedelta(microseconds=value / 1000) elif tu == "us": dt = EPOCH + timedelta(microseconds=value) elif tu == "ms": # milliseconds to seconds dt = datetime.utcfromtimestamp(value / 1_000) else: raise ValueError(f"time unit: {tu} not expected") if tz is not None and len(tz) > 0: import pytz timezone = pytz.timezone(tz) return timezone.localize(dt) return dt else: raise NotImplementedError # pragma: no cover
def Ru_to_UTC(Ru_timestamp): timestamp_Ru = datetime.strptime(Ru_timestamp, '%d.%m.%Y %H:%M:%S') timezone = pytz.timezone('Europe/Moscow') dt = arrow.get(timezone.localize(timestamp_Ru)).to('UTC') convertedStamp = str(dt.date()) + ' ' + str(dt.time()) Ru_to_UTC = datetime.strptime(convertedStamp, '%Y-%m-%d %H:%M:%S') return Ru_to_UTC
def localtime_to_ust(datetime): date_time_naive = parse(datetime) timezone = pytz.timezone(schema['timezone']) local_dt = timezone.localize(date_time_naive, is_dst=None) return local_dt
def time_verify(self, request, pk): order = Order.objects.get( id=pk) # Obtener id de la orden que se quiere consultar # Variable que indica si ya han pasado o no las 24 horas desde que se confirmó la orden. # Flag igual a True indica que aún puede modificarse la orden, dado que no han pasado 24 horas. # Flag igual a False indica que ya no puede modificarse la orden, dado que ya pasaron 24 horas. flag = True todays_date = datetime.now( ) # Obtener fecha y hora del sistema (esta variable no considera la zona horaria) timezone = pytz.timezone( settings.TIME_ZONE ) # Se crea una zona horaria en formato tzfile basada en la zona horaria # definida en el archivo settings del proyecto. todays_date_wo = timezone.localize( todays_date ) # Se asigna la zona horaria 'timezone' a la fecha del sistema takeaway_hours = todays_date_wo - timedelta( hours=24 ) # Se obtienen la fecha y hora exactas 24 antes de la fecha actual. # Se obtiene la fecha en la que fue confirmada la orden y se transforma a la zona horaria del sistema local (provista en el archivo settings). order_date = order.created.replace(tzinfo=pytz.utc) order_date = order_date.astimezone(timezone) # ¿Ya pasaron 24 horas desde la confirmación del pedido?: if (takeaway_hours > order_date): flag = False return Response(flag)
def prepare(self, data): if data.get('external_ticket_url') and self.request['session']['role'] != 'admin': raise JsonErrors.HTTPForbidden(message='external_ticket_url may only be set by admins') date = data.pop('date', None) timezone: TzInfo = data.pop('timezone', None) if timezone: data['timezone'] = str(timezone) if date: dt: datetime = timezone.localize(date['dt'].replace(tzinfo=None)) duration: Optional[int] = date['dur'] if duration: duration = timedelta(seconds=duration) else: dt = datetime(dt.year, dt.month, dt.day) data.update( start_ts=dt, duration=duration, ) loc = data.pop('location', None) if loc: data.update( location_name=loc['name'], location_lat=loc['lat'], location_lng=loc['lng'], ) return data
def get_date_time(): d = datetime.now() timezone = pytz.timezone("America/New_York") d_local = timezone.localize(d) _time = datetime.strftime(d_local, "%I:%M %p") _date = datetime.strftime(d_local, "%m/%d/%Y") print("time: {} date: {}".format(_time, _date)) return _time, _date
def forecast(self, bIntentMessage): """ Complete answer: - condition - current temperature - max and min temperature - warning about rain or snow if needed """ intentMessage = json.loads(bIntentMessage.decode()) try: timezone = pytz.timezone("Europe/Berlin") current_date = timezone.localize(datetime.now()).date() target_date = date_parser.parse( intentMessage["slots"][0]["value"]["value"] # intentMessage.slots.forecast_date_time.value ).date() delta = (target_date - current_date).days except: delta = 0 weather_forecast = self.get_weather_forecast(intentMessage) if delta > len(weather_forecast.daily): weather_forecast.rc = 3 if weather_forecast.rc != 0: response = self.error_response(weather_forecast) else: weather_target_day = weather_forecast.daily[delta] if delta > 0: response = ("Wetter {1}: {0}. " "Höchsttemperatur: {2} Grad. " "Tiefsttemperatur: {3} Grad. ").format( weather_target_day.summary, weather_forecast.inLocation, str(round(weather_target_day.temperatureMax, 1) ).replace('.', ','), str(round(weather_target_day.temperatureMin, 1)).replace('.', ',') ) else: response = ("Wetter heute{1}: {0}. " "Aktuelle Temperatur ist {2} Grad. " "Höchsttemperatur: {3} Grad. " "Tiefsttemperatur: {4} Grad. ").format( weather_target_day.summary, weather_forecast.inLocation, str(round(weather_forecast.currently.temperature, 1) ).replace('.', ','), str(round(weather_target_day.temperatureMax, 1) ).replace('.', ','), str(round(weather_target_day.temperatureMin, 1)).replace('.', ',') ) response = self.add_warning_if_needed(response, weather_target_day) return response
def is_dst(self, dt=None, timezone="UTC"): if dt == None: dt = datetime.timezone(timezone) timezone = pytz.timezone(timezone) try: timezone_aware_date = timezone.localize(dt, is_dst=None) except pytz.NonExistentTimeError: return False except pytz.AmbiguousTimeError: return True return timezone_aware_date.tzinfo._dst.seconds != 0
def Zurich_to_UTC(self, Zurich_timestamp): timestamp_Ru = datetime.strptime(Zurich_timestamp, '%Y/%m/%d %H:%M:%S.%f') timezone = pytz.timezone('Europe/Zurich') dt = arrow.get(timezone.localize(timestamp_Ru)).to('UTC') if '.' in str(dt.time()): convertedStamp = str(dt.date()) + ' ' + str(dt.time()) else: convertedStamp = str(dt.date()) + ' ' + str(dt.time()) + '.000' return convertedStamp
def make_aware(value, timezone=None, is_dst=None): """Make a naive datetime.datetime in a given time zone aware.""" if timezone is None: timezone = get_current_timezone() if hasattr(timezone, "localize"): # This method is available for pytz time zones. return timezone.localize(value, is_dst=is_dst) else: # Check that we won't overwrite the timezone of an aware datetime. if is_aware(value): raise ValueError("make_aware expects a naive datetime, got %s" % value) # This may be wrong around DST changes! return value.replace(tzinfo=timezone)
def make_aware(value, timezone=None, is_dst=None): """Make a naive datetime.datetime in a given time zone aware.""" if timezone is None: timezone = get_current_timezone() if hasattr(timezone, 'localize'): # This method is available for pytz time zones. return timezone.localize(value, is_dst=is_dst) else: # Check that we won't overwrite the timezone of an aware datetime. if is_aware(value): raise ValueError( "make_aware expects a naive datetime, got %s" % value) # This may be wrong around DST changes! return value.replace(tzinfo=timezone)
async def prepare_edit_data(self, pk, data): timezone: TzInfo = data.get('timezone') if not timezone and 'date' in data: # timezone is needed when date is being updated tz = await self.conn.fetchval('SELECT timezone FROM events WHERE id=$1', pk) data['timezone'] = pytz.timezone(tz) data = self.prepare(data) if timezone and 'start_ts' not in data: # timezone has changed but not start_ts, need to update start_ts to account for timezone change dt = await self.conn.fetchval("SELECT start_ts AT TIME ZONE timezone FROM events WHERE id=$1", pk) data['start_ts'] = timezone.localize(dt) return data
def to_utc(naive_time, tz_name): date_time = None timezone = pytz.timezone(tz_name) if isinstance(naive_time, date): date_time = datetime(naive_time.year, naive_time.month, naive_time.day) elif isinstance(naive_time, datetime): date_time = naive_time else: return if date_time.tzinfo is None: dt_local = timezone.localize(date_time) else: dt_local = date_time return dt_local.astimezone(pytz.utc)
def receive(self, text_data): room_name = self.scope['url_route']['kwargs']['room_name'] text_data_json = json.loads(text_data) message = text_data_json['message'] sender_name = text_data_json['sender'] room = Room.objects.get(room_name=room_name) sender_user = User.objects.get(username=sender_name) msg = Message(room=room, sender=sender_user, message=message, timestamp=datetime.utcnow()) msg_date = msg.timestamp.strftime("%b. %d, %Y") msg_time = msg.timestamp.strftime("%I:%M ") message_p = msg.timestamp.strftime("%p") timezone = pytz.timezone("UTC") with_timezone = timezone.localize(msg.timestamp) msg.timestamp = with_timezone msg_time_millis = msg.convert message_period = '.'.join( [x.lower() for x in message_p if x == 'A' or x == 'M' or x == 'P']) msg_time += message_period + '.' message_time = { 'date': msg_date, 'time': msg_time, 'hour': msg.timestamp.hour, 'minutes': msg.timestamp.minute, 'time_millis': msg_time_millis, 'inbox_time': msg_date } today_date = datetime.now() if msg.timestamp.date() == today_date.date(): message_time['inbox_time'] = msg_time msg.save() sender_id = str(sender_user.key) # Send message to room group async_to_sync(self.channel_layer.group_send)( self.room_group_name, { 'type': 'chat_message', 'message': message, 'sender': sender_id, 'message_time': message_time })
def make_aware(value, timezone=None, is_dst=NOT_PASSED): """Make a naive datetime.datetime in a given time zone aware.""" if is_dst is NOT_PASSED: is_dst = None else: warnings.warn( 'The is_dst argument to make_aware(), used by the Trunc() ' 'database functions and QuerySet.datetimes(), is deprecated as it ' 'has no effect with zoneinfo time zones.', RemovedInDjango50Warning, ) if timezone is None: timezone = get_current_timezone() if _is_pytz_zone(timezone): # This method is available for pytz time zones. return timezone.localize(value, is_dst=is_dst) else: # Check that we won't overwrite the timezone of an aware datetime. if is_aware(value): raise ValueError("make_aware expects a naive datetime, got %s" % value) # This may be wrong around DST changes! return value.replace(tzinfo=timezone)
def getActivitiesFromMidnight(self): from redminelib import Redmine import datetime redmine = Redmine(url=self.url, key=self.apiKey) startDateTime = datetime.datetime.utcnow() baseTime = ( startDateTime - datetime.timedelta(hours=10)).strftime('%Y-%m-%dT%H:%M:%SZ') print("%s %s" % (startDateTime, baseTime)) issues = redmine.issue.filter(updated_on=">=%s" % baseTime) count = 0 rtnArray = [] for issue in issues: count = count + 1 tmpArray = {} timezone = pytz.timezone("UTC") utcDateTime = timezone.localize(issue.updated_on) convertTime = utcDateTime.astimezone( pytz.timezone('Asia/Seoul')).strftime('%H:%M') tmpArray['time'] = convertTime tmpArray['issue.id'] = issue.id tmpArray['issue.project.name'] = issue.project.name tmpArray['issue.author'] = "%s" % issue.author tmpArray['issue.subject'] = issue.subject print("%d %s %s #%s %s %s" % (count, convertTime, issue.id, issue.project.name, issue.author, issue.author.id)) rtnArray.append(tmpArray) from operator import itemgetter rtnArray = sorted(rtnArray, key=itemgetter('time'), reverse=False) return rtnArray
def getOutput(endPoint, code): d = datetime.now() timezone = pytz.timezone("Europe/London") d_aware = timezone.localize(d) details = getDetails(endPoint, code) output = {} output['header'] = '{}'.format(d_aware.strftime('%Y-%m-%d %H:%M')) output['subHeader'] = '{}'.format(details['country']) output['body'] = '' output['body'] = output['body'] + ' Total Today' output['body'] = output['body'] + '\nCases: {} {}'.format( '{:,}'.format(details['cases']).rjust(6), '{:,}'.format( details['todayCases']).rjust(6)) output['body'] = output['body'] + '\nDeaths: {} {}'.format( '{:,}'.format(details['deaths']).rjust(6), '{:,}'.format( details['todayDeaths']).rjust(6)) output['body'] = output['body'] + '\nPer million' output['body'] = output['body'] + '\nCases: {} Deaths: {}'.format( '{:,}'.format(details['casesPerOneMillion']).rjust(6), '{:,}'.format( details['deathsPerOneMillion']).rjust(6)) output['body'] = output['body'] + '\nRecovered: {}'.format('{:,}'.format( details['recovered']).rjust(6)) output['bodyLines'] = 6 return output
async def prepare_edit_data(self, pk, data): timezone: TzInfo = data.get('timezone') if not timezone and 'date' in data: # timezone is needed when date is being updated tz = await self.conn.fetchval( 'SELECT timezone FROM events WHERE id=$1', pk) data['timezone'] = pytz.timezone(tz) data = self.prepare(data) if timezone and 'start_ts' not in data: # timezone has changed but not start_ts, need to update start_ts to account for timezone change dt = await self.conn.fetchval( "SELECT start_ts AT TIME ZONE timezone FROM events WHERE id=$1", pk) data['start_ts'] = timezone.localize(dt) mode: EventMode = data.pop('mode', None) if mode is not None: data.update( allow_tickets=mode in (EventMode.tickets, EventMode.both), allow_donations=mode in (EventMode.donations, EventMode.both), ) return data
# combining paragraphs of news article article_lines = [] news_paragraphs = news_content.find('div', { 'class': 'paragraph' }).find_all('p') for line in news_paragraphs: article_lines.append(line.text) # reformat time to utc date_test = news_content.find('meta', { "itemprop": 'datePublished' }).get('content') date_time_naive = parse(date_test) timezone = pytz.timezone('Asia/Taipei') tw_local_dt = timezone.localize(date_time_naive, is_dst=None) tw_utc_str = tw_local_dt.astimezone(pytz.utc).strftime(DATE_FORMAT) newsObject = { 'title': news_content.find('article', { "class": "article" }).get('data-title'), 'description': news_content.find('meta', { "name": "description" }).get('content'), 'content': "".join(article_lines), 'author': news_content.find('meta', {
def is_finished(self): now = datetime.now() timezone = pytz.timezone("America/New_York") now_aware = timezone.localize(now) return self.date_time < now_aware and not self.in_progress
def is_upcoming(self): now = datetime.now() timezone = pytz.timezone("America/New_York") now_aware = timezone.localize(now) return self.date_time > now_aware
async def run_remind(self, context, user, cmd): """ Set or configure a reminder @param context: @param user: @param cmd: @return: """ now = int(time.time()) remind_time = None message = None channel = None repeat = None # Check the first format: in x send y to #z. E.g. in 15 send Hi there everyone to #channel. Or: in 25 send Hey there regex = { 'in': '^in\s(\d+)\ssend\s(.*?)(\sto\s\<\#([0-9]+)\>)?$', 'at': '^at\s(\d{4}|\d{2}\:\d{2})(\son\s(.*?))?\ssend\s(.*?)(\sto\s\<\#([0-9]+)\>)?$', 'every': '^every\s(day|hour|week)\s(from|at)\s(\d{4}|\d{2}\:\d{2})\ssend\s(.*?)(\sto\s\<\#([0-9]+)\>)?$' } if re.search(regex['in'], cmd, re.IGNORECASE): matches = re.findall(regex['in'], cmd, re.IGNORECASE) # Make sure the time in mins is valid. if int(matches[0][0]) <= 0: return await context.send(lib.get_string('remind:err:time', user.get_guild())) remind_time = now + (60 * int(matches[0][0])) message = matches[0][1] if lib.is_number(matches[0][3]): channel = int(matches[0][3]) else: channel = context.message.channel.id # Next format to check: at hh:mm send y to #z. E.g. at 17:00 send Hello there to #channel. elif re.search(regex['at'], cmd, re.IGNORECASE): matches = re.findall(regex['at'], cmd, re.IGNORECASE) requested_time = matches[0][0] requested_date = matches[0][2] if matches[0][2] != '' else None # If they passed the time through with a colon, remove that. if ':' in requested_time: requested_time = requested_time.replace(':', '') # Now convert the time to an int. requested_time = int(requested_time) timezone = pytz.timezone(user.get_setting('timezone')) timezone_date = datetime.now(timezone).strftime('%d-%m-%Y') if requested_date is None else requested_date timezone_time = int(datetime.now(timezone).strftime('%H%M')) # Build the datetime object for the current date (in user's timezone) and the requested time. try: reminder_time = datetime.strptime(timezone_date + ' ' + str(requested_time), '%d-%m-%Y %H%M') except ValueError: return await context.send(lib.get_string('remind:err:date', user.get_guild())) # If they manually specified a date and it is in the past, send an error. if requested_date is not None and int(timezone.localize(reminder_time).timestamp()) <= now: return await context.send(lib.get_string('remind:err:date', user.get_guild())) # If the time they requested has already passed (but they did not specify a date), alter the date ahead by 1 day. if requested_time <= timezone_time: reminder_time += timedelta(days=1) # Convert it to a UTC timestamp. remind_time = int(timezone.localize(reminder_time).timestamp()) message = matches[0][3] if lib.is_number(matches[0][5]): channel = int(matches[0][5]) else: channel = context.message.channel.id elif re.search(regex['every'], cmd, re.IGNORECASE): matches = re.findall(regex['every'], cmd, re.IGNORECASE) interval = matches[0][0] requested_time = matches[0][2] message = matches[0][3] if lib.is_number(matches[0][5]): channel = int(matches[0][5]) else: channel = context.message.channel.id # If they passed the time through with a colon, remove that. if ':' in requested_time: requested_time = requested_time.replace(':', '') # Check interval is valid. if interval not in list(self._reminder_intervals.keys()): return await context.send(lib.get_string('remind:err:interval', user.get_guild())) # Now convert the time to an int. requested_time = int(requested_time) timezone = pytz.timezone(user.get_setting('timezone')) timezone_date = datetime.now(timezone).strftime('%d-%m-%Y') timezone_time = int(datetime.now(timezone).strftime('%H%M')) # Build the datetime object for the current date (in user's timezone) and the requested time. try: reminder_time = datetime.strptime(timezone_date + ' ' + str(requested_time), '%d-%m-%Y %H%M') except ValueError: return await context.send(lib.get_string('remind:err:date', user.get_guild())) # If the time they requested has already passed (but they did not specify a date), alter the date ahead by 1 day. if requested_time <= timezone_time: reminder_time += timedelta(days=1) # Convert it to a UTC timestamp. remind_time = int(timezone.localize(reminder_time).timestamp()) # Now get the interval time to add each time the reminder is set. repeat = self._reminder_intervals[interval] else: return await context.send(user.get_mention() + ', ' + lib.get_string('remind:err:format', user.get_guild())) # Check the channel is valid. if not context.guild.get_channel(channel): return await context.send(lib.get_string('remind:err:channel', user.get_guild())) # Check that the message is not too long? if len(message) > 255: return await context.send(lib.get_string('remind:err:message', user.get_guild()).format(len(message))) # If we get this far, we have parsed the command into variables. params = { 'user': user.get_id(), 'guild': user.get_guild(), 'time': remind_time, 'channel': channel, 'message': message, 'intervaltime': repeat } reminder = Reminder.create(params) if reminder: return await context.send(user.get_mention() + ', ' + lib.get_string('remind:created', user.get_guild()).format( lib.secs_to_days(remind_time - now)) )
def updateTimeline(request, user): tweetObjects = [] tweets = [] if not 'count' in request.POST: response = {'response': "Please enter a value!"} return response else: count = int(request.POST['count']) twitter = Twython( app_key=settings.APP_KEY, app_secret=settings.APP_SECRET, oauth_token=request.session['oauth_token'], oauth_token_secret=request.session['oauth_token_secret']) credentials = twitter.verify_credentials() SCREEN_NAME = credentials['screen_name'] try: user_timeline = twitter.get_user_timeline(screen_name=SCREEN_NAME, exclude_replies=True, include_rts=True, count=count) except TwythonError as e: raise Http404(e) for tweet in user_timeline: id = tweet['id_str'] favouritecounter = tweet['favorite_count'] retweetcounter = tweet['retweet_count'] if Tweet.objects.filter(tweetID=id).exists(): continue text = tweet['text'] timezone = pytz.timezone('UTC') tweetDate = datetime.strptime(tweet["created_at"], '%a %b %d %H:%M:%S +0000 %Y') date = timezone.localize(tweetDate) userTweet = Tweet(userTweeted=user, tweetID=id, tweetedBy=SCREEN_NAME, tweetText=text, tweetDate=date, tweetFavourites=favouritecounter, tweetRetweets=retweetcounter) tweetObjects.append(userTweet) try: print(userTweet) userTweet.save() except IntegrityError as e: print("IntegrityError") print(e) continue for tweet in tweetObjects: text = tweet.getTweetText() date = tweet.getDateTweeted() userTweeted = tweet.getUserTweeted() retweetCount = tweet.getRetweetCount() favouriteCount = tweet.getFavouriteCount() tweetAttributes = { 'tweetText': text, 'tweetDate': date, 'tweetedBy': userTweeted, 'tweetFavourites': favouriteCount, 'tweetRetweets': retweetCount } tweets.append(tweetAttributes) trainModel(user) return JsonResponse(tweets, safe=False)
def test_datetime_to_utc_timestamp(input, expected, timezone): if input: input = timezone.localize(input) assert datetime_to_utc_timestamp(input) == expected
def parse_exif_values(self, _path_file): # Disable exifread log logging.getLogger('exifread').setLevel(logging.CRITICAL) with open(_path_file, 'rb') as f: tags = exifread.process_file(f, details=False) try: if 'Image Make' in tags: try: self.camera_make = tags['Image Make'].values except UnicodeDecodeError: log.ODM_WARNING("EXIF Image Make might be corrupted") self.camera_make = "unknown" if 'Image Model' in tags: try: self.camera_model = tags['Image Model'].values except UnicodeDecodeError: log.ODM_WARNING("EXIF Image Model might be corrupted") self.camera_model = "unknown" if 'GPS GPSAltitude' in tags: self.altitude = self.float_value(tags['GPS GPSAltitude']) if 'GPS GPSAltitudeRef' in tags and self.int_value( tags['GPS GPSAltitudeRef']) > 0: self.altitude *= -1 if 'GPS GPSLatitude' in tags and 'GPS GPSLatitudeRef' in tags: self.latitude = self.dms_to_decimal( tags['GPS GPSLatitude'], tags['GPS GPSLatitudeRef']) if 'GPS GPSLongitude' in tags and 'GPS GPSLongitudeRef' in tags: self.longitude = self.dms_to_decimal( tags['GPS GPSLongitude'], tags['GPS GPSLongitudeRef']) except IndexError as e: log.ODM_WARNING("Cannot read basic EXIF tags for %s: %s" % (_path_file, str(e))) try: if 'Image Tag 0xC61A' in tags: self.black_level = self.list_values( tags['Image Tag 0xC61A']) elif 'BlackLevel' in tags: self.black_level = self.list_values(tags['BlackLevel']) if 'EXIF ExposureTime' in tags: self.exposure_time = self.float_value( tags['EXIF ExposureTime']) if 'EXIF FNumber' in tags: self.fnumber = self.float_value(tags['EXIF FNumber']) if 'EXIF ISOSpeed' in tags: self.iso_speed = self.int_value(tags['EXIF ISOSpeed']) elif 'EXIF PhotographicSensitivity' in tags: self.iso_speed = self.int_value( tags['EXIF PhotographicSensitivity']) elif 'EXIF ISOSpeedRatings' in tags: self.iso_speed = self.int_value( tags['EXIF ISOSpeedRatings']) if 'Image BitsPerSample' in tags: self.bits_per_sample = self.int_value( tags['Image BitsPerSample']) if 'EXIF DateTimeOriginal' in tags: str_time = tags['EXIF DateTimeOriginal'].values utc_time = datetime.strptime(str_time, "%Y:%m:%d %H:%M:%S") subsec = 0 if 'EXIF SubSecTime' in tags: subsec = self.int_value(tags['EXIF SubSecTime']) negative = 1.0 if subsec < 0: negative = -1.0 subsec *= -1.0 subsec = float('0.{}'.format(int(subsec))) subsec *= negative ms = subsec * 1e3 utc_time += timedelta(milliseconds=ms) timezone = pytz.timezone('UTC') epoch = timezone.localize(datetime.utcfromtimestamp(0)) self.utc_time = (timezone.localize(utc_time) - epoch).total_seconds() * 1000.0 except Exception as e: log.ODM_WARNING("Cannot read extended EXIF tags for %s: %s" % (_path_file, str(e))) # Extract XMP tags f.seek(0) xmp = self.get_xmp(f) for tags in xmp: try: band_name = self.get_xmp_tag( tags, ['Camera:BandName', '@Camera:BandName']) if band_name is not None: self.band_name = band_name.replace(" ", "") self.set_attr_from_xmp_tag( 'band_index', tags, [ 'DLS:SensorId', # Micasense RedEdge '@Camera:RigCameraIndex', # Parrot Sequoia, Sentera 21244-00_3.2MP-GS-0001 'Camera:RigCameraIndex', # MicaSense Altum ]) self.set_attr_from_xmp_tag( 'radiometric_calibration', tags, [ 'MicaSense:RadiometricCalibration', ]) self.set_attr_from_xmp_tag('vignetting_center', tags, [ 'Camera:VignettingCenter', 'Sentera:VignettingCenter', ]) self.set_attr_from_xmp_tag('vignetting_polynomial', tags, [ 'Camera:VignettingPolynomial', 'Sentera:VignettingPolynomial', ]) self.set_attr_from_xmp_tag('horizontal_irradiance', tags, ['Camera:HorizontalIrradiance'], float) self.set_attr_from_xmp_tag( 'irradiance_scale_to_si', tags, ['Camera:IrradianceScaleToSIUnits'], float) self.set_attr_from_xmp_tag('sun_sensor', tags, [ 'Camera:SunSensor', ], float) self.set_attr_from_xmp_tag('spectral_irradiance', tags, [ 'Camera:SpectralIrradiance', 'Camera:Irradiance', ], float) # Phantom 4 RTK if '@drone-dji:RtkStdLon' in tags: y = float( self.get_xmp_tag(tags, '@drone-dji:RtkStdLon')) x = float( self.get_xmp_tag(tags, '@drone-dji:RtkStdLat')) self.gps_xy_stddev = max(x, y) if '@drone-dji:RtkStdHgt' in tags: self.gps_z_stddev = float( self.get_xmp_tag(tags, '@drone-dji:RtkStdHgt')) else: self.set_attr_from_xmp_tag( 'gps_xy_stddev', tags, ['@Camera:GPSXYAccuracy', 'GPSXYAccuracy'], float) self.set_attr_from_xmp_tag( 'gps_z_stddev', tags, ['@Camera:GPSZAccuracy', 'GPSZAccuracy'], float) if 'DLS:Yaw' in tags: self.set_attr_from_xmp_tag('dls_yaw', tags, ['DLS:Yaw'], float) self.set_attr_from_xmp_tag('dls_pitch', tags, ['DLS:Pitch'], float) self.set_attr_from_xmp_tag('dls_roll', tags, ['DLS:Roll'], float) except Exception as e: log.ODM_WARNING("Cannot read XMP tags for %s: %s" % (_path_file, str(e))) # self.set_attr_from_xmp_tag('center_wavelength', tags, [ # 'Camera:CentralWavelength' # ], float) # self.set_attr_from_xmp_tag('bandwidth', tags, [ # 'Camera:WavelengthFWHM' # ], float) self.width, self.height = get_image_size.get_image_size(_path_file) # Sanitize band name since we use it in folder paths self.band_name = re.sub('[^A-Za-z0-9]+', '', self.band_name)
def parse_exif_values(self, _path_file): # Disable exifread log logging.getLogger('exifread').setLevel(logging.CRITICAL) try: self.width, self.height = get_image_size.get_image_size(_path_file) except Exception as e: raise PhotoCorruptedException(str(e)) tags = {} xtags = {} with open(_path_file, 'rb') as f: tags = exifread.process_file(f, details=True, extract_thumbnail=False) try: if 'Image Make' in tags: try: self.camera_make = tags['Image Make'].values self.camera_make = self.camera_make.strip() except UnicodeDecodeError: log.ODM_WARNING("EXIF Image Make might be corrupted") self.camera_make = "unknown" if 'Image Model' in tags: try: self.camera_model = tags['Image Model'].values self.camera_model = self.camera_model.strip() except UnicodeDecodeError: log.ODM_WARNING("EXIF Image Model might be corrupted") self.camera_model = "unknown" if 'GPS GPSAltitude' in tags: self.altitude = self.float_value(tags['GPS GPSAltitude']) if 'GPS GPSAltitudeRef' in tags and self.int_value( tags['GPS GPSAltitudeRef'] ) is not None and self.int_value( tags['GPS GPSAltitudeRef']) > 0: self.altitude *= -1 if 'GPS GPSLatitude' in tags and 'GPS GPSLatitudeRef' in tags: self.latitude = self.dms_to_decimal( tags['GPS GPSLatitude'], tags['GPS GPSLatitudeRef']) elif 'GPS GPSLatitude' in tags: log.ODM_WARNING( "GPS position for %s might be incorrect, GPSLatitudeRef tag is missing (assuming N)" % self.filename) self.latitude = self.dms_to_decimal( tags['GPS GPSLatitude'], GPSRefMock('N')) if 'GPS GPSLongitude' in tags and 'GPS GPSLongitudeRef' in tags: self.longitude = self.dms_to_decimal( tags['GPS GPSLongitude'], tags['GPS GPSLongitudeRef']) elif 'GPS GPSLongitude' in tags: log.ODM_WARNING( "GPS position for %s might be incorrect, GPSLongitudeRef tag is missing (assuming E)" % self.filename) self.longitude = self.dms_to_decimal( tags['GPS GPSLongitude'], GPSRefMock('E')) if 'Image Orientation' in tags: self.orientation = self.int_value( tags['Image Orientation']) except (IndexError, ValueError) as e: log.ODM_WARNING("Cannot read basic EXIF tags for %s: %s" % (self.filename, str(e))) try: if 'Image Tag 0xC61A' in tags: self.black_level = self.list_values( tags['Image Tag 0xC61A']) elif 'BlackLevel' in tags: self.black_level = self.list_values(tags['BlackLevel']) if 'EXIF ExposureTime' in tags: self.exposure_time = self.float_value( tags['EXIF ExposureTime']) if 'EXIF FNumber' in tags: self.fnumber = self.float_value(tags['EXIF FNumber']) if 'EXIF ISOSpeed' in tags: self.iso_speed = self.int_value(tags['EXIF ISOSpeed']) elif 'EXIF PhotographicSensitivity' in tags: self.iso_speed = self.int_value( tags['EXIF PhotographicSensitivity']) elif 'EXIF ISOSpeedRatings' in tags: self.iso_speed = self.int_value( tags['EXIF ISOSpeedRatings']) if 'Image BitsPerSample' in tags: self.bits_per_sample = self.int_value( tags['Image BitsPerSample']) if 'EXIF DateTimeOriginal' in tags: str_time = tags['EXIF DateTimeOriginal'].values utc_time = datetime.strptime(str_time, "%Y:%m:%d %H:%M:%S") subsec = 0 if 'EXIF SubSecTime' in tags: subsec = self.int_value(tags['EXIF SubSecTime']) negative = 1.0 if subsec < 0: negative = -1.0 subsec *= -1.0 subsec = float('0.{}'.format(int(subsec))) subsec *= negative ms = subsec * 1e3 utc_time += timedelta(milliseconds=ms) timezone = pytz.timezone('UTC') epoch = timezone.localize(datetime.utcfromtimestamp(0)) self.utc_time = (timezone.localize(utc_time) - epoch).total_seconds() * 1000.0 if 'MakerNote SpeedX' in tags and \ 'MakerNote SpeedY' in tags and \ 'MakerNote SpeedZ' in tags: self.speed_x = self.float_value(tags['MakerNote SpeedX']) self.speed_y = self.float_value(tags['MakerNote SpeedY']) self.speed_z = self.float_value(tags['MakerNote SpeedZ']) except Exception as e: log.ODM_WARNING("Cannot read extended EXIF tags for %s: %s" % (self.filename, str(e))) # Warn if GPS coordinates are suspiciously wrong if self.latitude is not None and self.latitude == 0 and \ self.longitude is not None and self.longitude == 0: log.ODM_WARNING( "%s has GPS position (0,0), possibly corrupted" % self.filename) # Extract XMP tags f.seek(0) xmp = self.get_xmp(f) for xtags in xmp: try: band_name = self.get_xmp_tag( xtags, ['Camera:BandName', '@Camera:BandName']) if band_name is not None: self.band_name = band_name.replace(" ", "") self.set_attr_from_xmp_tag( 'band_index', xtags, [ 'DLS:SensorId', # Micasense RedEdge '@Camera:RigCameraIndex', # Parrot Sequoia, Sentera 21244-00_3.2MP-GS-0001 'Camera:RigCameraIndex', # MicaSense Altum ]) self.set_attr_from_xmp_tag( 'radiometric_calibration', xtags, [ 'MicaSense:RadiometricCalibration', ]) self.set_attr_from_xmp_tag('vignetting_center', xtags, [ 'Camera:VignettingCenter', 'Sentera:VignettingCenter', ]) self.set_attr_from_xmp_tag( 'vignetting_polynomial', xtags, [ 'Camera:VignettingPolynomial', 'Sentera:VignettingPolynomial', ]) self.set_attr_from_xmp_tag('horizontal_irradiance', xtags, ['Camera:HorizontalIrradiance'], float) self.set_attr_from_xmp_tag( 'irradiance_scale_to_si', xtags, ['Camera:IrradianceScaleToSIUnits'], float) self.set_attr_from_xmp_tag('sun_sensor', xtags, [ 'Camera:SunSensor', ], float) self.set_attr_from_xmp_tag('spectral_irradiance', xtags, [ 'Camera:SpectralIrradiance', 'Camera:Irradiance', ], float) self.set_attr_from_xmp_tag( 'capture_uuid', xtags, [ '@drone-dji:CaptureUUID', # DJI 'MicaSense:CaptureId', # MicaSense Altum '@Camera:ImageUniqueID', # sentera 6x ]) # Camera make / model for some cameras is stored in the XMP if self.camera_make == '': self.set_attr_from_xmp_tag('camera_make', xtags, ['@tiff:Make']) if self.camera_model == '': self.set_attr_from_xmp_tag('camera_model', xtags, ['@tiff:Model']) # DJI GPS tags self.set_attr_from_xmp_tag('longitude', xtags, ['@drone-dji:Longitude'], float) self.set_attr_from_xmp_tag('latitude', xtags, ['@drone-dji:Latitude'], float) self.set_attr_from_xmp_tag('altitude', xtags, ['@drone-dji:AbsoluteAltitude'], float) # Phantom 4 RTK if '@drone-dji:RtkStdLon' in xtags: y = float( self.get_xmp_tag(xtags, '@drone-dji:RtkStdLon')) x = float( self.get_xmp_tag(xtags, '@drone-dji:RtkStdLat')) self.gps_xy_stddev = max(x, y) if '@drone-dji:RtkStdHgt' in xtags: self.gps_z_stddev = float( self.get_xmp_tag(xtags, '@drone-dji:RtkStdHgt')) else: self.set_attr_from_xmp_tag( 'gps_xy_stddev', xtags, ['@Camera:GPSXYAccuracy', 'GPSXYAccuracy'], float) self.set_attr_from_xmp_tag( 'gps_z_stddev', xtags, ['@Camera:GPSZAccuracy', 'GPSZAccuracy'], float) # DJI Speed tags if '@drone-dji:FlightXSpeed' in xtags and \ '@drone-dji:FlightYSpeed' in xtags and \ '@drone-dji:FlightZSpeed' in xtags: self.set_attr_from_xmp_tag('speed_x', xtags, ['@drone-dji:FlightXSpeed'], float) self.set_attr_from_xmp_tag('speed_y', xtags, [ '@drone-dji:FlightYSpeed', ], float) self.set_attr_from_xmp_tag('speed_z', xtags, [ '@drone-dji:FlightZSpeed', ], float) # Account for over-estimation if self.gps_xy_stddev is not None: self.gps_xy_stddev *= 2.0 if self.gps_z_stddev is not None: self.gps_z_stddev *= 2.0 if 'DLS:Yaw' in xtags: self.set_attr_from_xmp_tag('dls_yaw', xtags, ['DLS:Yaw'], float) self.set_attr_from_xmp_tag('dls_pitch', xtags, ['DLS:Pitch'], float) self.set_attr_from_xmp_tag('dls_roll', xtags, ['DLS:Roll'], float) camera_projection = self.get_xmp_tag( xtags, ['@Camera:ModelType', 'Camera:ModelType']) if camera_projection is not None: camera_projection = camera_projection.lower() if camera_projection in projections: self.camera_projection = camera_projection # OPK self.set_attr_from_xmp_tag('yaw', xtags, [ '@drone-dji:FlightYawDegree', '@Camera:Yaw', 'Camera:Yaw' ], float) self.set_attr_from_xmp_tag('pitch', xtags, [ '@drone-dji:GimbalPitchDegree', '@Camera:Pitch', 'Camera:Pitch' ], float) self.set_attr_from_xmp_tag('roll', xtags, [ '@drone-dji:GimbalRollDegree', '@Camera:Roll', 'Camera:Roll' ], float) # Normalize YPR conventions (assuming nadir camera) # Yaw: 0 --> top of image points north # Yaw: 90 --> top of image points east # Yaw: 270 --> top of image points west # Pitch: 0 --> nadir camera # Pitch: 90 --> camera is looking forward # Roll: 0 (assuming gimbal) if self.has_ypr(): if self.camera_make.lower() in ['dji', 'hasselblad']: self.pitch = 90 + self.pitch if self.camera_make.lower() == 'sensefly': self.roll *= -1 except Exception as e: log.ODM_WARNING("Cannot read XMP tags for %s: %s" % (self.filename, str(e))) # self.set_attr_from_xmp_tag('center_wavelength', xtags, [ # 'Camera:CentralWavelength' # ], float) # self.set_attr_from_xmp_tag('bandwidth', xtags, [ # 'Camera:WavelengthFWHM' # ], float) # Sanitize band name since we use it in folder paths self.band_name = re.sub('[^A-Za-z0-9]+', '', self.band_name) self.compute_focal(tags, xtags) self.compute_opk()
def get_te_match_json(match_url = "/match-detail/?id=1680141", matchtype='single', tour='atp'): """ get tennisexplorer match as JSON """ result_id = 5 url = 'http://www.tennisexplorer.com' + str(match_url) id = int(url.split('=')[1]) req = urllib.request.Request(url) #http://live-tennis.eu/en/official-atp-ranking response = urllib.request.urlopen(req) html = response.read() soup = BeautifulSoup(html, "html.parser") soup.unicode event = {} event['tennisexplorer_id'] = id center = soup.find("div", attrs={"id": "center"}) event['matchtype'] = matchtype event['tour'] = tour event['event_name'] = center.find("h1", attrs={"class": "bg"}).text event_details = center.find("div") event_date, event_time, tournament, round, surface = event_details.text.split(',') if event_date == 'Today' : event_date = datetime.now().strftime("%d.%m.%Y") start = datetime.strptime(event_date + ' '+ event_time.strip(), '%d.%m.%Y %H:%M') start_ts = timezone.localize(start) event['datetime'] = {'startdatetime' : start_ts, 'date' : start_ts.date(), 'start_time' : event_time.strip(),\ 'year' : start_ts.year, 'month' : start_ts.month, 'day' : start_ts.day, \ 'weekday' : start_ts.weekday(), 'calendar_week' : start_ts.isocalendar()[1], \ 'hour' : start_ts.hour, 'minutes' : start_ts.minute} event['tournament'] = tournament.strip() event['round'] = round.strip() event['surface'] = surface.strip() players = center.findAll("th", attrs={"class": "plName"}) players_tbody = center.find("tbody") # get player name playerA = players[0] playerB = players[1] if playerA.text > playerB.text : change_sort = True else : change_sort = False if change_sort : player1 = playerB player2 = playerA else : player1 = playerA player2 = playerB # get player details player_left_attributes = players_tbody.findAll('td', {"class" : "tl"}) player_righ_attributes = players_tbody.findAll('td', {"class" : "tr"}) if change_sort : player1_attr = player_left_attributes player2_attr = player_righ_attributes else : player1_attr = player_righ_attributes player2_attr = player_left_attributes dict_player1 = {'name' : player1.text, 'te_link' : player1.a.attrs['href'].strip(), \ 'ranking_pos' : player1_attr[0].text, 'date_of_birth' : player1_attr[1].text, \ 'height' : player1_attr[2].text, 'weight' : player1_attr[3].text, \ 'plays' : player1_attr[4].text, 'turn_pro' : player1_attr[5].text} dict_player2 = {'name' : player2.text, 'te_link' : player2.a.attrs['href'].strip(), \ 'ranking_pos' : player2_attr[0].text, 'date_of_birth' : player2_attr[1].text, \ 'height' : player2_attr[2].text, 'weight' : player2_attr[3].text, \ 'plays' : player2_attr[4].text, 'turn_pro' : player2_attr[5].text} event['player1'] = dict_player1 event['player2'] = dict_player2 # Result thead_result = center.find("thead") td_result_sets = thead_result.find("td").text.split("(")[0] if td_result_sets != '\xa0' : try : td_result_games = thead_result.find("td").text.split("(")[1][:-1] sets = td_result_games.split(',') player_left_sets = td_result_sets[0:1] player_right_sets = td_result_sets[4:5] sets_player_left = {} sets_player_right = {} i = 0 for set in sets : i = i + 1 set_text = str(i) sets_player_left.update({set_text : int(set.split('-')[0].strip()[0:1])}) sets_player_right.update({set_text : int(set.split('-')[1].strip()[0:1])}) player1 = {'sets' : player_left_sets, 'set_results' : sets_player_left} player2 = {'sets' : player_right_sets, 'set_results' : sets_player_right} if change_sort : player1_result = {'sets' : player_right_sets, 'set_results' : sets_player_right} player2_result = {'sets' : player_left_sets, 'set_results' : sets_player_left} result = td_result_sets[::-1] else : player1_result = {'sets' : player_left_sets, 'set_results' : sets_player_left} player2_result = {'sets' : player_right_sets, 'set_results' : sets_player_right} result = td_result_sets event['result'] = {'result' : result} event['result']['player1'] = player1_result event['result']['player2'] = player2_result event['status'] = 'complete' except : event['status'] = 'complete' else: event['status'] = 'planned' # Head to Head headtohead = center.findAll("h2", attrs={"class": "bg"})[0].text head_to_head_formated = headtohead[-5:] if head_to_head_formated != '-head' : player_left_head_to_head = int(head_to_head_formated[:1]) player_right_head_to_head = int(head_to_head_formated[-1:]) if change_sort : head_to_head = {'head-to-head' : head_to_head_formated[::-1], \ 'player1' : player_right_head_to_head, \ 'player2' : player_left_head_to_head} else : head_to_head = {'head-to-head' : head_to_head_formated, \ 'player1' : player_left_head_to_head, \ 'player2' : player_right_head_to_head} event['head-to-head'] = head_to_head else : result_id = result_id - 1 # ODDS # odds_tabs = soup.findAll("ul", attrs={"class": "tabs"})[2] #odds_table = soup.findAll('table')[5] odds_result = soup.findAll('table', attrs={"class": "result"}) odds_ou = soup.findAll('table', attrs={"class": "odds-ou"}) odds_ah = soup.findAll('table', attrs={"class": "odds-ah"}) odds_cs = soup.findAll('table', attrs={"class": "odds-cs"}) event['te_odds'] = {} # Result Odds if odds_result != [] : odds_table = odds_result[result_id].findAll('tr') #if odds_table[0]['class'] == 'one' or odds_table[0]['class'] == 'two' : odds = {} try : for tr in odds_table: if ((tr.a) and (tr['class'] == ['one'] or tr['class'] == ['two'])) : bookie = tr.a.text.replace('\xa0','').lower() odds_left = float(tr.findAll('td', {'class' : 'k1'})[0].find(text=True)) odds_right = float(tr.findAll('td', {'class' : 'k2'})[0].find(text=True)) if change_sort : player1_odds = odds_right player2_odds = odds_left else : player1_odds = odds_left player2_odds = odds_right odds.update({bookie : {'player1' : player1_odds, 'player2' : player2_odds}}) #odds[bookie] = {'player1' : player1_odds, 'player2' : player2_odds} #odds[bookie].update({'bookie' : bookie'player1' : player1_odds, 'player2' : player2_odds}) #odds[bookie].update({'player1' : player1_odds, 'player2' : player2_odds}) #print(bookie, odds_left, odds_right) # if 'Pinnacle' in tr.a.text : # tds_home = tr.findAll('td', {'class' : 'k1'}) # tds_away = tr.findAll('td', {'class' : 'k2'}) # odds_home = float(tds_home[0].find(text=True)) # odds_away = float(tds_away[0].find(text=True)) event['te_odds'] = {'result' : odds } except : event['te_odds'] = None # Over,Under Odds if odds_ou != [] : odds_table_ou = odds_ou[0].findAll('tr') odds = {} for tr in odds_table_ou : if tr.a : bookie = tr.a.text.replace('\xa0','').lower() value = tr.find('td', attrs={'class' : 'value'}).text over = tr.find('td', attrs={'class' : 'k1'}).text[:4] under = tr.find('td', attrs={'class' : 'k2'}).text[:4] #print(bookie, value, over, under) if value in odds : odds[value].update({bookie : {'over' : over, 'under' : under}}) else : odds[value] = {bookie : {'over' : over, 'under' : under}} event['te_odds']['over_under'] = odds # Asia Handicap Odds if odds_ah != [] : odds_table_ah = odds_ah[0].findAll('tr') odds = {} for tr in odds_table_ah : try : if tr['class'] == ['odds-type'] : ah_bet_type = tr.text.replace("\n",'') continue except : continue if tr.a : bookie = tr.a.text.replace('\xa0','').lower() value = tr.find('td', attrs={'class' : 'value'}).text odds_left = tr.find('td', attrs={'class' : 'k1'}).text[:4] odds_right = tr.find('td', attrs={'class' : 'k2'}).text[:4] #print(bookie, ah_bet_type, odds_left, odds_right) if change_sort : player1_odds = odds_right player2_odds = odds_left else : player1_odds = odds_left player2_odds = odds_right if ah_bet_type in odds : odds[ah_bet_type].update({bookie : {'player1' : player1_odds, 'player2' : player2_odds}}) else : odds[ah_bet_type] = {bookie : {'player1' : player1_odds, 'player2' : player2_odds}} event['te_odds']['ah'] = odds # correct Score Odds if odds_cs != [] : odds_table_cs = odds_cs[0].findAll('tr') odds = {} for tr in odds_table_cs : if tr.a : bookie = tr.a.text.replace('\xa0','').lower() value = tr.find('td', attrs={'class' : 'value'}).text odds_cs = tr.find('td', attrs={'class' : 'k1'}).text[:4] #print(bookie, value, odds) if value in odds : odds[value].update({bookie : {'odds' : odds_cs}}) else : try : odds[value] = {bookie : {'odds' : odds_cs}} except: continue event['te_odds']['correct_score'] = odds event['change_sort'] = change_sort return event
def get(self): try: csr = CSR.find_by_username(g.oidc_token_info['username']) is_designate = csr.finance_designate start_param = request.args.get("start_date") end_param = request.args.get("end_date") exam_type = request.args.get("exam_type") validate_params(start_param, end_param) try: start_date = datetime.strptime(request.args['start_date'], "%Y-%m-%d") end_date = datetime.strptime(request.args['end_date'], "%Y-%m-%d") except ValueError as err: print(err) return {"message", "Unable to return date time string"}, 422 # Code for UTC time. csr_office = Office.query.filter( Office.office_id == csr.office_id).first() csr_timezone = Timezone.query.filter( Timezone.timezone_id == csr_office.timezone_id).first() csr_timename = csr_timezone.timezone_name timezone = pytz.timezone(csr_timename) start_local = timezone.localize(start_date) end_date += timedelta(days=1) end_local = timezone.localize(end_date) exams = Exam.query.join(Booking, Exam.booking_id == Booking.booking_id) \ .filter(Booking.start_time >= start_local) \ .filter(Booking.start_time < end_local) \ .join(Room, Booking.room_id == Room.room_id, isouter=True) \ .join(Office, Booking.office_id == Office.office_id) \ .join(ExamType, Exam.exam_type_id == ExamType.exam_type_id) if exam_type == 'all_bookings': non_exams = Booking.query.join(Exam, Booking.booking_id == Exam.booking_id, isouter=True) \ .filter(Booking.start_time >= start_local) \ .filter(Booking.start_time < end_local) \ .filter(Exam.booking_id.is_(None)) \ .join(Room, Booking.room_id == Room.room_id, isouter=True) \ .join(Office, Booking.office_id == Office.office_id) \ if not is_designate: exams = exams.filter(Booking.office_id == csr.office_id) if exam_type == 'all_bookings': non_exams = non_exams.filter( Booking.office_id == csr.office_id) if exam_type == 'ita': exams = exams.filter(ExamType.ita_ind == 1) elif exam_type == 'all_non_ita': exams = exams.filter(ExamType.ita_ind == 0) dest = io.StringIO() out = csv.writer(dest) out.writerow([ 'Office Name', 'Exam Type', 'Exam ID', 'Exam Name', 'Examinee Name', 'Event ID', 'Room Name', 'Invigilator Name(s)', 'Shadow Invigilator Name', 'SBC Invigilator', 'Start Time', 'End Time', 'Booking ID', 'Booking Name', 'Number Of Students', 'Exam Received', 'Exam Written', 'Exam Returned', 'Notes', 'Collect Fees' ]) keys = [ "office_name", "exam_type_name", "exam_id", "exam_name", "examinee_name", "event_id", "room_name", "invigilator_names", "shadow_invigilator_id", "sbc_staff_invigilated", "start_time", "end_time", "booking_id", "booking_name", "number_of_students", "exam_received_date", "exam_written_ind", "exam_returned_date", "notes", "fees" ] exam_keys = [ "exam_id", "exam_name", "examinee_name", "event_id", "number_of_students", "notes", ] booking_keys = [ "start_time", "end_time", "booking_id", "booking_name" ] non_exam_keys = [ "exam_name", "notes", ] for exam in exams: row = [] if exam.booking.shadow_invigilator_id: shadow_invigilator_id = exam.booking.shadow_invigilator_id else: shadow_invigilator_id = None try: for key in keys: if key == "room_name": write_room(row, exam) elif key == "invigilator_names": write_invigilator(row, exam) elif key == "shadow_invigilator_id": write_shadow_invigilator(row, shadow_invigilator_id) elif key == "sbc_staff_invigilated": write_sbc(row, exam) elif key == "exam_received_date": write_exam_received(row, exam) elif key == "exam_written_ind": write_exam_written(row, exam) elif key == "exam_returned_date": write_exam_returned(row, exam) elif key == "office_name": row.append(getattr(exam.office, key)) elif key == "exam_type_name": row.append(getattr(exam.exam_type, key)) elif key in booking_keys: value = getattr(exam.booking, key) if isinstance(value, datetime): row.append('="' + localize_time(value, timezone) + '"') else: row.append(value) elif key in exam_keys: row.append(getattr(exam, key)) elif key == "fees": row.append("") out.writerow(row) except AttributeError as error: logging.error(error, exc_info=True) return { "message": "Issue writing row to CSV ", "key": key }, 500 if exam_type == 'all_bookings': for non_exam in non_exams: row = [] try: for key in keys: if key == "room_name": write_booking_room(row, non_exam) elif key == "invigilator_names": row.append("") elif key == "shadow_invigilator_id": row.append("") elif key == "sbc_staff_invigilated": row.append("") elif key == "exam_received_date": row.append("") elif key == "exam_written_ind": row.append("") elif key == "exam_returned_date": row.append("") elif key == "office_name": row.append(getattr(non_exam.office, key)) elif key == "exam_type_name": row.append("Non Exam Booking") elif key in booking_keys: value = getattr(non_exam, key) if isinstance(value, datetime): row.append('="' + localize_time(value, timezone) + '"') else: row.append(value) elif key in non_exam_keys: which_non_exam_key(non_exam, row, key) elif key == "exam_id": row.append("") elif key == "exam_name": row.append("") elif key == "examinee_name": row.append("") elif key == "event_id": row.append("") elif key == "fees": which_non_exam_key(non_exam, row, key) elif key == "number_of_students": row.append("") elif key == "exam_received_ind": row.append("") out.writerow(row) except AttributeError as error: logging.error(error, exc_info=True) return { "message": "Issue writing row to CSV ", "key": key }, 500 output = make_response(dest.getvalue()) output.headers[ "Content-Disposition"] = "attachment; filename=export.csv" output.headers["Content-type"] = "text/csv" return output except exc.SQLAlchemyError as error: logging.error(error, exc_info=True) return {"message": "api is down"}, 500
def test_convert_to_timezone(input, expected, timezone): if expected: expected = timezone.localize(expected) assert convert_to_timezone(input, timezone) == expected
if (str(mydate) in data): data[str(mydate)] = data[str(mydate)] + 1 else: data[str(mydate)] = 1 i = i + offset else: offset = 0 i = i + 1 start_date = datetime(year=2019, month=1, day=1) end_date = datetime(year=2019, month=12, day=31) delta = timedelta(days=1) timezone = pytz.timezone('UTC') f = open("output.csv", "w") # looping through the all dates in 2019 and writing to csv file while start_date <= end_date: if (str(start_date) in data): f.write( str(timezone.localize(start_date).isoformat()) + ", " + str(data[str(start_date)])) f.write("\n") else: f.write(str(timezone.localize(start_date).isoformat()) + ", 0") f.write("\n") start_date += delta f.close()
def test_convert_to_datetime(input, expected, timezone): if expected: expected = timezone.localize(expected) assert convert_to_datetime(input, timezone) == expected