def _add_post_to_db(post_data): InstagramPost( **{ 'id': post_data['id'], 'image_thumbnail_url': post_data['images']['thumbnail']['url'], 'image_low_resolution_url': post_data['images']['low_resolution']['url'], 'image_standard_resolution_url': post_data['images']['standard_resolution']['url'], 'created_time': datetime.utcfromtimestamp(int(post_data['created_time'])).replace( tzinfo=pytz.utc), 'caption': post_data['caption']['text'], 'likes': int(post_data['likes']['count']), 'tags': str(post_data['tags']).replace('\'', '').replace('[', '').replace( ']', ''), 'link': post_data['link'], 'location': Point(float(post_data['location']['longitude']), float(post_data['location']['latitude'])), 'location_name': post_data['location']['name'] }).save()
def check_next_launch(self): for launch in self.repository.get_next_launches(next_count=10): if launch.netstamp > 0: current_time = datetime.utcnow() launch_time = datetime.utcfromtimestamp(int(launch.netstamp)) if current_time <= launch_time: diff = int((launch_time - current_time).total_seconds()) logger.debug('%s in %s hours' % (launch.name, (diff / 60) / 60)) self.check_next_stamp_changed(diff, launch)
def generate_uid(text_to_append='', salt=''): # pris ici : http://stackoverflow.com/questions/ # 6999726/how-can-i-convert-a-datetime-object-to # -milliseconds-since-epoch-unix-time-in-p # epoch = datetime_safe.utcfromtimestamp(0) def millis(dt): return (dt - epoch).total_seconds() * 1000.0 nom = str(randint(0, 90000000) + int(millis(datetime_safe.now()))) return str(uuid.uuid5(uuid.NAMESPACE_OID, nom + salt)) + text_to_append
def netstamp_changed(self, launch, notification, diff): logger.info('Netstamp change detected for %s - now launching in %d seconds.' % (launch.name, diff)) date = datetime.fromtimestamp(launch.netstamp).replace(tzinfo=pytz.UTC) message = 'SCHEDULE UPDATE: %s now launching in %s at %s.' % (launch.name, seconds_to_time(diff), date.strftime("%H:%M %Z (%d/%m)")) old_diff = datetime.utcfromtimestamp(int(notification.last_net_stamp)) - datetime.now() if old_diff.total_seconds() < 86400: logger.info('Netstamp Changed and within window - sending mobile notification.') self.send_notification(launch, 'netstampChanged', notification) self.send_to_twitter(message, notification) notification.last_net_stamp = notification.launch.netstamp notification.last_net_stamp_timestamp = datetime.now() launch.save() # If launch is within 24 hours... if 86400 >= diff > 3600: logger.info('Launch is within 24 hours, resetting notifications.') notification.wasNotifiedTwentyFourHour = True notification.wasNotifiedOneHour = False notification.wasNotifiedTenMinutes = False notification.wasNotifiedTwentyFourHourTwitter = True notification.wasNotifiedOneHourTwitter = False notification.wasNotifiedTenMinutesTwitter = False elif 3600 >= diff > 600: logger.info('Launch is within one hour, resetting Ten minute notifications.') notification.wasNotifiedOneHour = True notification.wasNotifiedTwentyFourHour = True notification.wasNotifiedOneHourTwitter = True notification.wasNotifiedTwentyFourHourTwitter = True elif diff <= 600: logger.info('Launch is within ten minutes.') notification.wasNotifiedOneHour = True notification.wasNotifiedTwentyFourHour = True notification.wasNotifiedTenMinutes = True notification.wasNotifiedOneHourTwitter = True notification.wasNotifiedTwentyFourHourTwitter = True notification.wasNotifiedTenMinutesTwitter = True elif diff >= 86400: notification.wasNotifiedTwentyFourHour = False notification.wasNotifiedOneHour = False notification.wasNotifiedTenMinutes = False notification.wasNotifiedTwentyFourHourTwitter = False notification.wasNotifiedOneHourTwitter = False notification.wasNotifiedTenMinutesTwitter = False notification.save()
def get_datetime(timestamp_string): # Try to interpret as a unix timestamp timestamp = int_safe(timestamp_string) if timestamp is not None: try: dt = datetime.utcfromtimestamp(timestamp) dt.replace(tzinfo=pytz.utc) return dt except ValueError: print "Bad timestamp: %d" % timestamp return None # Try to interpret as an ISO date return parse_iso_datetime(timestamp_string)
def crondis(request): try: kk = request.GET['kek'] except Exception: return HttpResponse( '<html>\n<head><title>404 Not Found</title></head>\n<body bgcolor="white">\n<center><h1>404 Not Found</h1></center>\n<hr><center>nginx/1.10.0 (Ubuntu)</center>\n</body>\n</html>' ) if kk == settings.CRON_KEY: tdel = datetime.utcfromtimestamp( calendar.timegm(datetime.now().timetuple()) - 2629743) # 1 month mod = models.Team.objects.exclude( registered__gte=tdel).delete() # TODO: Player model return HttpResponse("Deleted " + str(mod[0]) + ' entries.') else: return HttpResponse( '<html>\n<head><title>404 Not Found</title></head>\n<body bgcolor="white">\n<center><h1>404 Not Found</h1></center>\n<hr><center>nginx/1.10.0 (Ubuntu)</center>\n</body>\n</html>' )
def get_token(): """ This function will generate the token for the Service Account. This token is most likely going to be used to update information for the logged-in user (not to be confused with the service account) such as auto-mapping the user upon first login. """ cached_token = cache.get('sa-token') cached_token_expiry = cache.get('sa-token-expiry') if cached_token and cached_token_expiry and cached_token_expiry > datetime.utcnow(): logger.debug('returning cached token (within expiry)') return cached_token token_url = '{keycloak}/auth/realms/{realm}/protocol/openid-connect/token'.format( keycloak=settings.KEYCLOAK['SERVICE_ACCOUNT_KEYCLOAK_API_BASE'], realm=settings.KEYCLOAK['SERVICE_ACCOUNT_REALM'] ) response = requests.post( token_url, auth=( settings.KEYCLOAK['SERVICE_ACCOUNT_CLIENT_ID'], settings.KEYCLOAK['SERVICE_ACCOUNT_CLIENT_SECRET'] ), data={'grant_type': 'client_credentials'} ) token = response.json()['access_token'] token_dict = jwt.decode( token, verify=False ) expiry_time = datetime.utcfromtimestamp(token_dict['exp']) now = datetime.utcnow() ttl = expiry_time - now cache.set('sa-token', token, int(ttl.total_seconds() - slack_time)) cache.set('sa-token-expiry', expiry_time, int(ttl.total_seconds() - slack_time)) logger.info('our token is good for {} seconds'.format(int(ttl.total_seconds() - slack_time))) return token
def send_notification(self, launch, notification_type, notification): logger.info('Creating notification for %s' % launch.name) if notification_type == 'netstampChanged': launch_time = datetime.utcfromtimestamp(int(launch.netstamp)) contents = 'UPDATE: New launch attempt scheduled on %s at %s.' % (launch_time.strftime("%A, %B %d"), launch_time.strftime("%H:%M UTC")) elif notification_type == 'tenMinutes': contents = 'Launch attempt from %s in ten minutes.' % launch.location.name elif notification_type == 'twentyFourHour': contents = 'Launch attempt from %s in 24 hours.' % launch.location.name elif notification_type == 'oneHour': contents = 'Launch attempt from %s in one hour.' % launch.location.name else: launch_time = datetime.utcfromtimestamp(int(launch.netstamp)) contents = 'Launch attempt from %s on %s at %s.' % (launch.location.name, launch_time.strftime("%A, %B %d"), launch_time.strftime("%H:%M UTC")) # Create a notification topics_and_segments = get_fcm_topics_and_onesignal_segments(launch, debug=self.DEBUG) include_segments = topics_and_segments['segments'] exclude_segments = ['firebase'] if self.DEBUG: exclude_segments.append('Production') if len(launch.vid_urls.all()) > 0: webcast = True else: webcast = False image = '' if launch.launcher.image_url: image = launch.launcher.image_url.url elif launch.launcher.legacy_image_url: image = launch.launcher.legacy_image_url kwargs = dict( content_available=True, excluded_segments=exclude_segments, included_segments=include_segments, isAndroid=True, data={"silent": True, "background": True, "launch_id": launch.id, "launch_name": launch.name, "launch_image": image, "launch_net": launch.net.strftime("%B %d, %Y %H:%M:%S %Z"), "launch_location": launch.location.name, "notification_type": notification_type, "webcast": webcast } ) # url = 'https://spacelaunchnow.me/launch/%d/' % launch.id heading = 'Space Launch Now' time_since_last_notification = None if notification.last_notification_sent is not None: time_since_last_notification = datetime.now(pytz.utc) - notification.last_notification_sent if time_since_last_notification is not None and time_since_last_notification.total_seconds() < 600 and not self.DEBUG: logger.info('Cannot send notification - too soon since last notification!') else: logger.info('----------------------------------------------------------') logger.info('Sending notification - %s' % contents) logger.info('Notification Data - %s' % kwargs) push_service = FCMNotification(api_key=keys['FCM_KEY']) android_topics = topics_and_segments['topics'] flutter_topics = get_fcm_topics_and_onesignal_segments(launch, debug=self.DEBUG, flutter=True, notification_type=notification_type)['topics'] logger.info("Flutter Topics: %s" % flutter_topics) logger.info(topics_and_segments) android_result = push_service.notify_topic_subscribers(data_message=kwargs['data'], condition=android_topics, time_to_live=86400, ) flutter_result = push_service.notify_topic_subscribers(data_message=kwargs['data'], condition=flutter_topics, time_to_live=86400, message_title=launch.name, message_body=contents) logger.debug(android_result) logger.debug(flutter_result) response = self.one_signal.create_notification(contents, heading, **kwargs) if response.status_code == 200: logger.info('Notification Sent - Status: %s Response: %s' % (response.status_code, response.json())) notification_data = response.json() notification_id = notification_data['id'] assert notification_data['id'] and notification_data['recipients'] notification.last_notification_recipient_count = notification_data['recipients'] notification.last_notification_sent = datetime.now(pytz.utc) notification.save() # Get the notification response = self.one_signal.get_notification(notification_id) if response.status_code == 200: logger.info('Notification Status: %s Content: %s' % (response.status_code, response.json())) else: logger.error(response.text) notification_data = response.json() assert notification_data['id'] == notification_id assert notification_data['contents']['en'] == contents else: logger.error(response.text) logger.info('----------------------------------------------------------')
def humane_time(timestamp, tz_offset=0): """Render time (number of second from epoch) to an human readable string""" return format_date(datetime.utcfromtimestamp(timestamp - tz_offset))
# sql = 'select distinct l.id_app, l.session, r.time as record_time, r.latitude, r.longitude, s.user_full_name, r.value from models_log l inner join models_record r on l.id = r.log_id inner join models_sensor s on r.sensor_id = s.id where s.pid like "ff1001" and l.session = 1615807853045 order by record_time;' csv_file_path = 'test.csv' try: # cur.execute(sql) # rows = cur.fetchall() cur.execute(sessions_list) sessions_rows = cur.fetchall() finally: pass # Continue only if there are rows returned. if sessions_rows: for row in sessions_rows: print('session ID: ' + str(row[0]) + ' --> date: ' + datetime.utcfromtimestamp(row[2] / 1000).strftime( '%Y-%m-%d %H:%M:%S' '.' '%f')) try: session_id = raw_input('Enter id of session you wish export to csv: ') sql_speed = \ ' select ' \ ' distinct l.id_app, l.session, r.time as record_time,' \ ' r.latitude, r.longitude, s.user_full_name, r.value' \ ' from ' \ ' models_log l ' \ ' inner join ' \ ' models_record r on l.id = r.log_id' \ ' inner join models_sensor s on r.sensor_id = s.id ' \ ' where s.pid like "ff1001" and l.id = %s order by record_time;' % session_id sql_co2 = \