def import_from_twitter_hashtag(self): log('import_from_twitter_hashtag()') from hackerspace.models.meetingnotes import startChrome import time import requests from getConfig import get_config # check if hastag exists HASHTAG = get_config('SOCIAL.HASHTAG') # check if instagram tag is saved in settings if HASHTAG: show_message( '✅ Found SOCIAL.HASHTAG - Start importing photos from Twitter with your hashtag ...' ) time.sleep(2) if requests.get('https://twitter.com/hashtag/{}?f=image'.format( HASHTAG.split('#')[1])).status_code == 200: browser = startChrome( True, 'https://twitter.com/hashtag/{}?f=image'.format( HASHTAG.split('#')[1])) else: show_message( 'WARNING: I can\'t access your SOCIAL.HASHTAG on Twitter. Is the hashtag correct? Will skip importing photos from Twitter with your hashtag for now.' ) time.sleep(4) return else: show_message( 'WARNING: Can\'t find SOCIAL.HASHTAG in your config.json. Will skip importing photos from Twitter with your hashtag for now.' ) time.sleep(4) return save_twitter_photos(browser)
def str_relative_time(self): log('photo.str_relative_time') import time from datetime import datetime timestamp = self.int_UNIXtime_created # in last 60 minutes if timestamp >= time.time() - (60 * 60): minutes_in_past = int((time.time() - timestamp) / 60) log('--> return STR') return str(minutes_in_past) + ' minute' + ( 's' if minutes_in_past > 1 else '') + ' ago' # in last 24 hours elif timestamp >= time.time() - (60 * 60 * 24): hours_in_past = int(((time.time() - timestamp) / 60) / 60) log('--> return STR') return str(hours_in_past) + ' hour' + ('s' if hours_in_past > 1 else '') + ' ago' # else if in last 6 days, return number of days ago elif timestamp >= time.time() - (60 * 60 * 24 * 6): days_in_past = int((((time.time() - timestamp) / 60) / 60) / 24) log('--> return STR') return str(days_in_past) + ' day' + ('s' if days_in_past > 1 else '') + ' ago' # else date string else: log('--> return STR') return datetime.utcfromtimestamp(timestamp).strftime('%b %d, %Y')
def import_from_twitter(self): log('import_from_twitter()') from hackerspace.models.meetingnotes import startChrome import time import requests from getConfig import get_config # check if twitter is saved in social channels for entry in get_config('SOCIAL.SOCIAL_NETWORKS'): if 'twitter.com/' in entry['url']: show_message( '✅ Found Twitter in SOCIAL.SOCIAL_NETWORKS - Start importing photos from your Twitter page ...' ) time.sleep(2) if requests.get(entry['url']).status_code == 200: browser = startChrome(True, entry['url'] + '/media') else: show_message( 'WARNING: I can\'t access your Twitter page. Is the URL correct? Will skip importing photos from Twitter for now.' ) time.sleep(4) return break else: show_message( 'WARNING: Can\'t find Twitter in SOCIAL.SOCIAL_NETWORKS in your config.json. Will skip importing photos from Twitter for now.' ) time.sleep(4) return save_twitter_photos(browser)
def JSON_RESPONSE_more_photos(request): log('JSON_RESPONSE_more_photos(request)') from django.http import JsonResponse from django.template.loader import get_template from hackerspace.models import Photo start_from = int(request.GET.get('from', None)) upt_to = int(start_from + 30) # get photos: latest, oldest or random if request.GET.get('type', None) == 'latest': queryset = Photo.objects.latest() elif request.GET.get('type', None) == 'oldest': queryset = Photo.objects.oldest() elif request.GET.get('type', None) == 'random': queryset = Photo.objects.random() log('--> return JsonResponse') return JsonResponse({ 'html': get_template('components/body/photos_list.html').render({ 'photos': queryset[start_from:upt_to] if request.GET.get('type', None) != 'random' else queryset, }), 'continue_from': upt_to, 'more_results': True if request.GET.get('type', None) == 'random' or queryset.count() > upt_to else False })
def import_from_wiki(self, page): from getConfig import get_config import requests if not get_config('BASICS.WIKI.API_URL'): log('--> BASICS.WIKI.API_URL not found in config.json -> BASICS - Please add your WIKI_API_URL first.') return self.text_date = page.split('Notes ')[1].replace(' ', '-') # see if notes already exist, else, create if MeetingNote.objects.filter(text_date=self.text_date).exists() == False: # remove all links from bs4 import BeautifulSoup response_json = requests.get( get_config('BASICS.WIKI.API_URL')+'?action=parse&page='+page+'&format=json').json()['parse'] soup = BeautifulSoup(str(response_json['text']).replace( "{\'*\': \'", "").replace("'}", "").replace("\\n", "").replace("\\\'", "\'"), 'html.parser') for a in soup.findAll('a'): del a['href'] self.text_notes = str(soup) self.updateCreatedBasedOnName() self.save() print('Imported from wiki - '+self.text_date) else: print('Skipped - Already exists. '+self.text_date)
def project_view(request, sub_page): log('project_view(request, {})'.format(sub_page)) sub_page = 'project/'+sub_page # if space not found, redirect to all spaces page if not Project.objects.filter(str_slug=sub_page).exists(): return HttpResponseRedirect('/projects') return get_page_response(request, 'project', sub_page)
def machine_view(request, sub_page): log('machine_view(request, {})'.format(sub_page)) sub_page = 'machine/'+sub_page # if space not found, redirect to all spaces page if not Machine.objects.filter(str_slug=sub_page).exists(): return HttpResponseRedirect('/machines') return get_page_response(request, 'machine', sub_page)
def INT__timezoneToOffset(timezone_name): log('INT__timezoneToOffset(timezone_name={})'.format(timezone_name)) from datetime import datetime import pytz log('--> return INT') return int(datetime.now(pytz.timezone(timezone_name)).utcoffset().total_seconds()*1000)
def import_all_from_wiki(self): import requests from getConfig import get_config WIKI_API_URL = get_config('BASICS.WIKI.API_URL') if not WIKI_API_URL: log('--> BASICS.WIKI.API_URL not found in config.json -> BASICS - Please add your WIKI_API_URL first.') return response_json = requests.get(WIKI_API_URL + '?action=query&list=categorymembers&cmtitle=Category:Meeting_Notes&cmlimit=500&format=json').json() all_wiki_pages = [ x['title'] for x in response_json['query']['categorymembers'] if 'Meeting Notes 20' in x['title']] while 'continue' in response_json and 'cmcontinue' in response_json['continue']: response_json = requests.get(WIKI_API_URL + '?action=query&list=categorymembers&cmcontinue='+response_json['continue']['cmcontinue']+'&cmtitle=Category:Meeting_Notes&cmlimit=500&format=json').json() all_wiki_pages += [ x['title'] for x in response_json['query']['categorymembers'] if 'Meeting Notes 20' in x['title']] for meeting in all_wiki_pages: MeetingNote().import_from_wiki(meeting) print('Imported all meeting notes from wiki')
def approve_event_view(request): log('approve_event_view(request)') if request.user.is_authenticated==False: log('--> Failed: User not logged in') response = JsonResponse({'success': False}) response.status_code = 403 elif not request.GET.get('str_slug', None) or Event.objects.filter(boolean_approved=False,str_slug=request.GET.get('str_slug', None)).exists()==False: log('--> Failed: Result not found') response = JsonResponse({'success': False}) response.status_code = 404 else: from hackerspace.APIs.slack import send_message DOMAIN = get_config('WEBSITE.DOMAIN') # approve event and all upcoming ones event = Event.objects.filter(boolean_approved=False,str_slug=request.GET.get('str_slug', None)).first() upcoming_events = Event.objects.filter(boolean_approved=False,str_name_en_US=event.str_name_en_US).all() log('--> Approve all upcoming events') for event in upcoming_events: event.publish() # notify via slack that event was approved and by who if 'HTTP_HOST' in request.META and request.META['HTTP_HOST']==DOMAIN: send_message('✅'+str(request.user)+' approved the event "'+event.str_name_en_US+'":\nhttps://'+DOMAIN+'/'+event.str_slug) response = JsonResponse({'success': True}) response.status_code = 200 log('--> return response') return response
def load_more_view(request): log('load_more_view(request)') from hackerspace.Website.views_helper_functions import JSON_RESPONSE_more_results,JSON_RESPONSE_more_photos if request.GET.get('what', None) and request.GET.get('from', None): if request.GET.get('what', None) == 'meeting_notes': response = JSON_RESPONSE_more_results( request, 'meetings/meetings_list.html', MeetingNote.objects.past()) elif request.GET.get('what', None) == 'events': response = JSON_RESPONSE_more_results( request, 'results_list_entries.html', Event.objects.QUERYSET__upcoming()) elif request.GET.get('what', None) == 'projects': response = JSON_RESPONSE_more_results( request, 'results_list_entries.html', Project.objects.latest()) elif request.GET.get('what', None) == 'spaces': response = JSON_RESPONSE_more_results( request, 'results_list_entries.html', Space.objects.all()) elif request.GET.get('what', None) == 'machines': response = JSON_RESPONSE_more_results( request, 'results_list_entries.html', Machine.objects.all()) elif request.GET.get('what', None) == 'guildes': response = JSON_RESPONSE_more_results( request, 'results_list_entries.html', Guilde.objects.all()) elif request.GET.get('what', None) == 'consensus': response = JSON_RESPONSE_more_results( request, 'consensus_items_entries.html', Consensus.objects.latest()) elif request.GET.get('what', None) == 'photos': response = JSON_RESPONSE_more_photos(request) else: response = JsonResponse({'error': 'Request incomplete or wrong'}) response.status_code = 404 log('--> return response') return response
def QUERYSET__in_timeframe(self, from_UNIX_time, to_UNIX_time, str_space_name=None): log('Event.objects.QUERYSET__in_timeframe(self, from_UNIX_time={}, to_UNIX_time={}, str_space_name={})'.format(from_UNIX_time,to_UNIX_time,str_space_name)) from django.db.models import Q from hackerspace.models import Space if str_space_name: space = Space.objects.QUERYSET__by_name(str_space_name) if space: self = self.filter(one_space=space) output = self.filter( # get events that start after from_UNIX_time and end before to_UNIX_time ( Q(int_UNIXtime_event_start__gte=from_UNIX_time) & Q(int_UNIXtime_event_start__lte=to_UNIX_time) ) # get events that end after from_UNIX_time and before to_UNIX_time | ( Q(int_UNIXtime_event_end__gte=from_UNIX_time) & Q(int_UNIXtime_event_end__lte=to_UNIX_time) ) # get events that start before from_UNIX_time and end after to_UNIX_time | ( Q(int_UNIXtime_event_start__lte=from_UNIX_time) & Q(int_UNIXtime_event_end__gte=to_UNIX_time) ) ).exclude( Q(int_UNIXtime_event_start__gte=to_UNIX_time) | Q(int_UNIXtime_event_end__lte=from_UNIX_time) ).exclude( boolean_approved=False ) log('--> return QUERYSET ({} results)'.format(output.count())) return output
def delete_post(post_url): log('delete_post()') if BOOLEAN__key_exists('DISCOURSE.API_KEY') == False: log('--> Failed: DISCOURSE.API_KEY not set') return None # get ID of post response = requests.get(post_url) if response.status_code != 200: log('--> Couldn\'t find post on Discourse. Skipped deleting.') return False topic_id = response.url.split('/')[-1] response = requests.delete(DISCOURSE_URL+'/t/'+topic_id+'.json', headers={ 'content-type': 'application/json' }, params={ 'api_key': STR__get_key('DISCOURSE.API_KEY'), 'api_username': STR__get_key('DISCOURSE.API_USERNAME') }) if response.status_code == 200: log('--> Deleted') return True else: log('--> Not deleted') print(response.status_code) print(response.json()) return False
def JSON_RESPONSE_more_results(request, template_path, queryset): log('JSON_RESPONSE_more_results(request, template_path, queryset)') from django.http import JsonResponse from django.template.loader import get_template # see if request comes from a guilde/space page, then show guilde/space events, not all events if request.GET.get('origin', None): if 'guilde/' in request.GET.get('origin', None): queryset = queryset.filter( one_guilde__str_slug=request.GET.get('origin', None)[1:]) elif 'space/' in request.GET.get('origin', None): queryset = queryset.filter( one_space__str_slug=request.GET.get('origin', None)[1:]) start_from = int(request.GET.get('from', None)) upt_to = int(start_from + 10) log('--> return JsonResponse') return JsonResponse({ 'html': get_template('components/body/' + template_path).render({ 'all_results': queryset[start_from:upt_to], 'specific_selector': request.GET.get('specific_selector', None) }), 'continue_from': upt_to, 'more_results': True if queryset.count() > upt_to else False })
def guilde_view(request, sub_page): log('guilde_view(request, {})'.format(sub_page)) sub_page = 'guilde/'+sub_page # if guilde not found, redirect to all guildes page if not Guilde.objects.filter(str_slug=sub_page).exists(): return HttpResponseRedirect('/guildes') return get_page_response(request, 'guilde', sub_page)
def get_categories(output='list'): log('get_categories()') response = requests.get( DISCOURSE_URL+'categories.json', headers={'Accept': 'application/json'}) if output == 'list' and response.status_code == 200: return [x['slug'] for x in response.json()['category_list']['categories']] else: return response.json()
def RESULT__updateTime(result): log('RESULT__updateTime(result={})'.format(result)) import time # update time if not result.int_UNIXtime_created: result.int_UNIXtime_created = time.time() result.int_UNIXtime_updated = time.time() return result
def save(self, *args, **kwargs): try: log('event.save()') import urllib.parse from hackerspace.models.events import RESULT__updateTime from hackerspace.models import Space, Person import bleach from getConfig import get_config import re log('--> clean from scripts') if self.str_name_en_US: self.str_name_en_US = bleach.clean(self.str_name_en_US) if self.text_description_en_US: if not self.url_meetup_event: self.text_description_en_US = bleach.clean(self.text_description_en_US) if self.text_description_he_IL: self.text_description_he_IL = bleach.clean(self.text_description_he_IL) if self.str_location: self.str_location = bleach.clean(self.str_location).replace('<br>','<br>') if self.str_series_repeat_how_often: self.str_series_repeat_how_often = bleach.clean(self.str_series_repeat_how_often) if self.text_series_timing: self.text_series_timing = bleach.clean(self.text_series_timing) if self.str_crowd_size: self.str_crowd_size = bleach.clean(self.str_crowd_size) if self.str_welcomer: self.str_welcomer = bleach.clean(self.str_welcomer) if self.str_timezone: self.str_timezone = bleach.clean(self.str_timezone) self = RESULT__updateTime(self) if not self.str_slug: self.str_slug = 'event/'+(str(self.datetime_start.date())+'-' if self.datetime_start else '')+re.sub('[\W_]+', '', self.str_name_en_US.lower()) counter=0 while Event.objects.filter(str_slug=self.str_slug).exists()==True: counter+=1 self.str_slug = 'event/'+(str(self.datetime_start.date())+'-' if self.datetime_start else '')+re.sub('[\W_]+', '', self.str_name_en_US.lower())+str(counter) log('--> Save lat/lon if not exist yet') if not self.float_lat: self.str_location, self.float_lat, self.float_lon = get_lat_lon_and_location(self.str_location) super(Event, self).save(*args, **kwargs) log('--> Save hosts') if not self.many_hosts.exists(): EVENTS_HOSTS_OVERWRITE = get_config('EVENTS.EVENTS_HOSTS_OVERWRITE') # search in predefined event hosts in YOURHACKERSPACE for event_name in EVENTS_HOSTS_OVERWRITE: if event_name in self.str_name_en_US: for host_name in EVENTS_HOSTS_OVERWRITE[event_name]: host = Person.objects.QUERYSET__by_name(host_name) if host: self.many_hosts.add(host) except: log('--> ERROR: coudlnt save event - '+str(self))
def LIST__offsetToTimezone(offset_ms): log('LIST__offsetToTimezone(offset_ms={})'.format(offset_ms)) from datetime import datetime import pytz now = datetime.now(pytz.utc) # current time log('--> return LIST') return [tz.zone for tz in map(pytz.timezone, pytz.all_timezones_set) if now.astimezone(tz).utcoffset().total_seconds()*1000 == offset_ms][0]
def STR__extractTimezone(json_meetup_result): log('STR__extractTimezone(json_meetup_result)') from getConfig import get_config TIMEZONE_STRING = get_config('PHYSICAL_SPACE.TIMEZONE_STRING') if 'utc_offset' in json_meetup_result and json_meetup_result['utc_offset'] != INT__timezoneToOffset(TIMEZONE_STRING): return LIST__offsetToTimezone(json_meetup_result['utc_offset']) log('--> return STR') return TIMEZONE_STRING
def delete_discourse_event(self): log('event.delete_discourse_event()') from hackerspace.APIs.discourse import delete_post if self.url_discourse_event: deleted = delete_post(self.url_discourse_event) if deleted==True: self.url_discourse_event=None super(Event, self).save() log('--> return event') return self
def delete(self): log('event.delete()') # delete discourse posts self.delete_discourse_event() # delete uploaded photo self.delete_photo() super(Event, self).delete() log('--> Deleted')
def announce_via_marry(self): log('event.announce_via_marry()') import time from hackerspace.hackerspace_specific.noisebridge_sf_ca_us.marry import speak start_time = self.str_relative_time if self.int_UNIXtime_event_start < time.time( )+(60*60) else self.datetime_start.strftime('%I %p') if start_time == 'Now': speak(str(self.str_name_en_US)+' is happening now', None) else: speak(str(self.str_name_en_US)+' starts at '+start_time, None)
def INT__getWeekday(number): log('INT__getWeekday(number={})'.format(number)) days = { 0: 'Mon', 1: 'Tue', 2: 'Wed', 3: 'Thu', 4: 'Fri', 5: 'Sat', 6: 'Sun', } return days[number]
def meeting_end_view(request): log('meeting_end_view(request)') current_meeting = MeetingNote.objects.current() if current_meeting: current_meeting.end() response = JsonResponse( {'meeting_url': '/meeting/'+current_meeting.text_date}) else: response = JsonResponse({'alert': 'No current meeting found'}) response.status_code = 500 return response
def discourse_search(query, limit=5): log('discourse_search()') results = [] response_json = requests.get( DISCOURSE_URL+'/search/query.json?term='+query).json() if 'topics' in response_json: for post in response_json['topics']: results.append({ 'icon': 'discourse', 'name': post['title'], 'url': DISCOURSE_URL+'/t/'+str(post['id']) }) return results
def delete_event_view(request): log('delete_event_view(request)') if not request.GET.get('str_slug', None) or Event.objects.filter(str_slug=request.GET.get('str_slug', None)).exists()==False: log('--> Failed: Result not found') response = JsonResponse({'success': False}) response.status_code = 404 else: from hackerspace.APIs.slack import send_message # approve event and all upcoming ones event = Event.objects.filter(str_slug=request.GET.get('str_slug', None)).first() log('--> Delete all upcoming events') event.delete_series() # notify via slack that event was deleted and by who if 'HTTP_HOST' in request.META and request.META['HTTP_HOST']==get_config('WEBSITE.DOMAIN'): send_message('🚫'+str(request.user)+' deleted the event "'+event.str_name_en_US+'"') response = JsonResponse({'success': True}) response.status_code = 200 log('--> return response') return response
def remove_view(request): log('remove_view(request)') if request.GET.get('keyword', None) and request.GET.get('origin', None) and MeetingNote.objects.filter(text_date=request.GET.get('origin', None).split('/')[1]).exists(): meeting = MeetingNote.objects.filter( text_date=request.GET.get('origin', None).split('/')[1]).first() meeting.remove_keyword(request.GET.get('keyword')) response = JsonResponse({'success': True}) else: response = JsonResponse({'error': 'Request incomplete or wrong'}) response.status_code = 404 log('--> return response') return response
def create_post(str_headline, str_text, str_category): log('create_post()') from html import unescape import emoji if BOOLEAN__key_exists('DISCOURSE.API_KEY') == False: log('--> Failed: DISCOURSE.API_KEY not set') return None response = requests.post(DISCOURSE_URL+'posts.json', headers={ 'content-type': 'application/json' }, params={ 'api_key': STR__get_key('DISCOURSE.API_KEY'), 'api_username': STR__get_key('DISCOURSE.API_USERNAME'), 'title': emoji.get_emoji_regexp().sub(u'', unescape(str_headline)), 'raw': str_text, 'category': get_category_id(str_category) # TODO add event details # 'event': {'start': '2019-12-13T15:00:00+00:00', 'end': '2019-12-13T19:00:00+00:00'} }) if response.status_code == 200: if DISCOURSE_URL.endswith('/'): url = DISCOURSE_URL+'t/'+str(response.json()['topic_id']) log('--> Created Discourse post: '+url) return url else: url = DISCOURSE_URL+'/t/'+str(response.json()['topic_id']) log('--> Created Discourse post: '+url) return url else: print(response.status_code) print(response.json())
def get_page_response(request, page, sub_page = None): log('get_page_response(request,page={},sub_page={})'.format(page, sub_page)) page=page hash_name=request.build_absolute_uri().split( '#')[1] if '#' in request.build_absolute_uri() else None if page == 'meeting_present': html = 'meeting_present.html' elif page == 'event_banner': html = 'event_banner_view.html' else: html = 'page.html' response = render(request, html, get_view_response(request, page, sub_page, hash_name)) return response