def ReadSheet(self, error_): clear_cells = { "requests": [{ "updateCells": { "range": { "sheetId": 0, "startColumnIndex": 0, "endColumnIndex": 37, "startRowIndex": self.ROW_data_out - 1, "endRowIndex": self.ROW_data_out + self.Max_keys }, "fields": "userEnteredValue" } }] } sheetId = '' SCOPES = 'https://www.googleapis.com/auth/spreadsheets' store = file.Storage(os.path.dirname(os.path.abspath(__file__)) + '/secrets/credentials.json') creds = store.get() # авторизация на гугл таблицах черз token if not creds or creds.invalid: try: flow = client.flow_from_clientsecrets( os.path.dirname(os.path.abspath(__file__)) + '/secrets/google_client_secret.json', SCOPES) except oauth2client.clientsecrets.InvalidClientSecretsError: error_.append(u'<p><b>ERROR: </b>Файл google_client_secret.json не найден.</p>' \ u'<p><b>ERROR: </b>Авторизация в Google Sheets не выполнена</p>') return self.DataInShets creds = tools.run_flow(flow, store) self.service = build('sheets', 'v4', http=creds.authorize(Http())) # читаем входящие данные из диапазона RANGE_NAME try: result = self.service.spreadsheets().values().get(spreadsheetId=self.SPREADSHEET_ID, majorDimension='COLUMNS', range=self.RANGE_data_in).execute() except googleapiclient.errors.HttpError: error_.append(u'<p><b>ERROR: </b>Таблица с идентификатором ' + self.SPREADSHEET_ID + ' не найдена.</p>') return self.DataInShets #получаем id листа с названием Лист1 sheet_metadata = self.service.spreadsheets().get(spreadsheetId=self.SPREADSHEET_ID).execute() for i in sheet_metadata.get('sheets', ''): sheetTitle = i.get("properties", {}).get("title", "") if sheetTitle == 'Лист1': sheetId = i.get("properties", {}).get('sheetId', "") if sheetId != '': # обновляем параметер sheetId в clear_cells clear_cells['requests'][0]['updateCells']['range'].update({'sheetId' : sheetId}) # очищаем диапазон вывода request = self.service.spreadsheets().batchUpdate(spreadsheetId=self.SPREADSHEET_ID, body=clear_cells) request.execute() # получаем столбцы с значениями values = result.get('values', []) if not values: error_.append(u'<p><b>ERROR: </b>Входящие данные в диапазоне ' + self.RANGE_data_in + ' не найдены</p>') return self.DataInShets else: # отделяем ключевые фразы от регионов for col in values: collumn = [] for el in col: if el == '': continue else: collumn.append(el.strip().lower()) self.DataInShets.append(collumn) if len(self.DataInShets[0]) == 0 or len(self.DataInShets[1]) == 0: error_.append(u'<p><b>ERROR: </b>Вы не заполнили одну или несколько колонок с входными данными</p>' \ u'<ul>' \ u'<li>А - колонка с ключами</li>' \ u'<li>В - колонка с регионами</li>' \ u'</ul') return self.DataInShets return self.DataInShets else: error_.append(u'<p><b>ERROR: </b>В таблице с идентификатором ' + self.SPREADSHEET_ID + ' не найден лист с названием "Лист1".</p>') return self.DataInShets
dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) secret_dir = dir + '/modules/secret.json' #Decide whether to execute this module from the Runflag file runFlag_dir = dir + '/cache/runFlag' runFlag_file = open(runFlag_dir, 'r') runFlag = int(runFlag_file.read()) runFlag_file.close() if not runFlag: quit() credentials_dir = dir + '/modules/credentials.json' # Setup the Calendar API SCOPES = 'https://www.googleapis.com/auth/calendar.readonly' store = file.Storage(credentials_dir) creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets(secret_dir, SCOPES) creds = tools.run_flow(flow, store) service = build('calendar', 'v3', http=creds.authorize(Http())) # Call the Calendar API now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time now_ref = time.time() calendarid = '*****@*****.**' events_result = service.events().list(calendarId=calendarid, timeMin=now, maxResults=60, singleEvents=True, orderBy='startTime').execute()
def getCalander(self): SCOPES = 'https://www.googleapis.com/auth/calendar.readonly' store = oauth_file.Storage('tokenPersonalIan.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentialIan.json', SCOPES) creds = tools.run_flow(flow, store) service = build('calendar', 'v3', http=creds.authorize(Http())) # Call the Calendar API now = datetime.datetime.utcnow().isoformat( ) + 'Z' # 'Z' indicates UTC time print('Getting Event(s)') events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=4, singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) if not events: print('No upcoming events found.') event1 = events[0] event2 = events[1] event3 = events[2] event4 = events[3] #event 1 datePullOne = self.getDate(event1) locationPullOne = self.geteventLocation(event1) eventNameOne = self.geteventName(event1) timeFinalOne = self.getTime(event1) #event 2 datePull2 = self.getDate(event2) locationPull2 = self.geteventLocation(event2) eventName2 = self.geteventName(event2) timeFinal2 = self.getTime(event2) #event 3 datePull3 = self.getDate(event3) locationPull3 = self.geteventLocation(event3) eventName3 = self.geteventName(event3) timeFinal3 = self.getTime(event3) #event 4 datePull4 = self.getDate(event4) locationPull4 = self.geteventLocation(event4) eventName4 = self.geteventName(event4) timeFinal4 = self.getTime(event4) #set statements if self.dateItem != datePullOne: self.dateItem = datePullOne self.dateLable.config(text=datePullOne + "\n" + datePull2 + "\n" + datePull3 + "\n" + datePull4) if self.timeItem != timeFinalOne: self.timeItem = timeFinalOne self.timeLable.config(text=timeFinalOne + "\n" + timeFinal2 + "\n" + timeFinal3 + "\n" + timeFinal4) if self.calanderItem != eventNameOne: self.calanderItem = eventNameOne self.calanderLable.config(text=eventNameOne + "\n" + eventName2 + "\n" + eventName3 + "\n" + eventName4) if self.locationName != locationPullOne[0]: self.locationName = locationPullOne[0] self.locationLable.config(text=locationPullOne + "\n" + locationPull2 + "\n" + locationPull3 + "\n" + locationPull4) self.after(3600000, self.getCalander)
from __future__ import print_function import httplib2 import os from apiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools # Setup the Gmail API SCOPES = 'https://www.googleapis.com/auth/gmail.readonly' store = file.Storage('credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) creds = tools.run_flow(flow, store) service = build('gmail', 'v1', http=creds.authorize(Http())) # Call the Gmail API results = service.users().labels().list(userId='me').execute() labels = results.get('labels', []) if not labels: print('No labels found.') else: print('Labels:') for label in labels: print(label['name'])
from apiclient import discovery from httplib2 import Http from oauth2client import client, file, tools # define path variables credentials_file_path = './credentials/credentials.json' clientsecret_file_path = './credentials/client_secret.json' # define API scope SCOPE = 'https://www.googleapis.com/auth/drive' # define store store = file.Storage(credentials_file_path) credentials = store.get() # get access token if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(clientsecret_file_path, SCOPE) credentials = tools.run_flow(flow, store)
import datetime from apiclient.discovery import build from apiclient.http import MediaFileUpload from httplib2 import Http from oauth2client import file, client, tools copyfile('credentials.json', '/tmp/credentials.json') copyfile('client_secret.json', '/tmp/client_secret.json') # Setup the Drive v3 API CSV_FILE = '/tmp/qa_list.csv' SCOPES = [ 'https://www.googleapis.com/auth/drive.file', 'https://www.googleapis.com/auth/spreadsheets.readonly' ] store = file.Storage('/tmp/credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('/tmp/client_secret.json', SCOPES) creds = tools.run_flow(flow, store) httpObj = creds.authorize(Http()) def get_project_ids(spreadsheetId): discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?version=v4') projects_sheet = build('sheets', 'v4', http=httpObj, discoveryServiceUrl=discoveryUrl) rangeName = 'Sheet1!A:A' result = projects_sheet.spreadsheets().values().get(
def __init__(self, host, refresh_token, user_agent, source, host_override=None, extra_headers=None, save_cookies=False, auth_tries=None, account_type=None, debug_data=True, secure=True, rpc_tries=3): """Creates a new HttpRpcServerOauth2. Args: host: The host to send requests to. refresh_token: A string refresh token to use, or None to guide the user through the auth flow. (Replaces auth_function on parent class.) user_agent: The user-agent string to send to the server. Specify None to omit the user-agent header. source: Tuple, (client_id, client_secret, scope), for oauth credentials. host_override: The host header to send to the server (defaults to host). extra_headers: A dict of extra headers to append to every request. Values supplied here will override other default headers that are supplied. save_cookies: If the refresh token should be saved. auth_tries: The number of times to attempt auth_function before failing. account_type: Ignored. debug_data: Whether debugging output should include data contents. secure: If the requests sent using Send should be sent over HTTPS. rpc_tries: The number of rpc retries upon http server error (i.e. Response code >= 500 and < 600) before failing. """ super(HttpRpcServerOauth2, self).__init__(host, None, user_agent, None, host_override=host_override, extra_headers=extra_headers, auth_tries=auth_tries, debug_data=debug_data, secure=secure, rpc_tries=rpc_tries) if save_cookies: self.storage = oauth2client_file.Storage( os.path.expanduser('~/.appcfg_oauth2_tokens')) else: self.storage = NoStorage() if not isinstance(source, tuple) or len(source) != 3: raise TypeError( 'Source must be tuple (client_id, client_secret, scope).') self.client_id = source[0] self.client_secret = source[1] self.scope = source[2] self.refresh_token = refresh_token if refresh_token: self.credentials = client.OAuth2Credentials( None, self.client_id, self.client_secret, refresh_token, None, ('https://%s/o/oauth2/token' % os.getenv('APPENGINE_AUTH_SERVER', 'accounts.google.com')), self.user_agent) else: self.credentials = self.storage.get()
def main(): # The file token.json stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. global service today = datetime.date.today() # Doing authentication store = file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('sheets', 'v4', http=creds.authorize(Http())) # Todo: Send mail if authentication failed # Get logging data values = [] # Read fail log if exist --> /var/log/doorphone_fail.log if os.path.isfile(FAIL_LOG): with open(FAIL_LOG, mode='r', encoding='utf-8') as csvfile: rows = csv.reader(csvfile) for row in rows: value = [] # Skipped if name of card is "Test" if row[1] == 'Test': continue # Construct data to write to spreadsheet for each_col in row: value.append(each_col) if len(value) > 0: values.append(value) # Read log file --> /var/log/doorphone.tmp with open(TEMP_LOG, mode='r', encoding='utf-8') as csvfile: rows = csv.reader(csvfile) for row in rows: # Skipped if name of card is "Test" if row[1] == 'Test': continue # Construct data to write to spreadsheet if '{} {}'.format(STR_MONTH_MAP[today.month], str(today.day)).lower() in row[0].lower() \ or '{} {}'.format(STR_MONTH_MAP[today.month], str(today.day)).lower() in row[0].lower(): value = [_compose_date_format(row[0][:16])] for each_col in row[1:-1]: value.append(each_col) if len(value) > 0: values.append(value) # Separate data using year # Check if sheet of year exist --> if not, create new sheet of year try: # Use data to determine which year of sheet to write to first_cell = '{}!A1:A1'.format(today.year) service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID, range=first_cell).execute() except Exception as e: state = _add_new_sheet(SPREADSHEET_ID, str(today.year)) # Add new sheet failed if not state: _write_fail_log(values, FAIL_LOG) # Todo: Send mail if failed sys.exit(11) # Write data to Google Spreadsheet last_row = _get_last_row(SPREADSHEET_ID, '{}{}'.format(today.year, RANGE_FIRST_COL)) write_range = '{}!A{}:E'.format(today.year, last_row + 1) try: if values != []: _write_values(SPREADSHEET_ID, write_range, values) print('Write value success') else: print('No value to write') except Exception as e: print('Exception: {}'.format(e)) _write_fail_log(values, FAIL_LOG) # Todo: Send fail if failed sys.exit(11) else: if os.path.isfile(FAIL_LOG): os.remove(FAIL_LOG)
def makeEvent(id, root, inputNeeded=True, summary=None, month=None, day=None, year=None, startTime=None, endTime=None): # Setup the Calendar API SCOPES = 'https://www.googleapis.com/auth/calendar' store = file.Storage('credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) creds = tools.run_flow(flow, store) service = build('calendar', 'v3', http=creds.authorize(Http()), cache=MemoryCache()) calendar_list_entry = service.calendarList().get(calendarId=id).execute() if inputNeeded: msg1 = 'name of event: ' summary = simpledialog.askstring('name', msg1, parent=root) msg2 = 'what month?' msg3 = 'what day?' msg4 = 'what year?' month = simpledialog.askstring('month', msg2, parent=root) day = simpledialog.askstring('day', msg3, parent=root) year = simpledialog.askstring('year', msg4, parent=root) msg5 = 'when does this event start?' msg6 = 'when does this event end?' startTime = simpledialog.askstring('start', msg5, parent=root) endTime = simpledialog.askstring('end', msg6, parent=root) else: summary = summary month = month day = day year = year startTime = startTime endTime = endTime month = getMonth(month) startTime = timeToMilitary(startTime) first = year + '-' + month + '-' + day + 'T' + startTime + ':00-04:00' endTime = timeToMilitary(endTime) last = year + '-' + month + '-' + day + 'T' + endTime + ':00-04:00' event = { 'summary': summary, 'location': "", 'description': "", 'start': { 'dateTime': first, 'timeZone': "", }, 'end': { 'dateTime': last, 'timeZone': "", }, 'recurrence': [], 'attendees': [], 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } event = service.events().insert(calendarId=id, body=event).execute() print('Event created: %s' % (event.get('htmlLink')))
def get_actual_picks() : # Setup the Gmail API SCOPES = 'https://www.googleapis.com/auth/gmail.readonly' store = file.Storage('credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret_195389700512-1gscoh1moo86k7878o93pmi7j1l4uiv5.apps.googleusercontent.com.json', SCOPES) creds = tools.run_flow(flow, store) service = build('gmail', 'v1', http=creds.authorize(Http())) # Call the Gmail API results = service.users().labels().list(userId='me').execute() ttt_msgs = service.users().messages().list(userId='me',labelIds=['INBOX'], q="from:[email protected] newer_than:2d").execute() result = pd.DataFrame() if ttt_msgs['resultSizeEstimate'] != 0 : for msg in ttt_msgs['messages']: msg_id = msg['id'] message = service.users().messages().get(userId='me', id=msg_id, format='raw').execute() msg_str = base64.urlsafe_b64decode(message['raw'].encode('ASCII')) html = msg_str soup = BeautifulSoup(html, "html.parser") table = soup.find("table", attrs={"class": "templateDataTable"}) trs = table.findAll("tr") for tr in trs : tds = tr.findAll("td") if len(tds) > 0 : dict = { 'matchdatetime' : tds[0].text.strip(), 'match' : tds[1].text.strip(), 'winner' : tds[2].text.strip(), 'bet_value' : tds[3].text.strip()[0:4], 'win_proba' : float(tds[5].text.strip()[0:4]), 'at_odd' : float(tds[4].text.strip()), # 'bet_value' : float(tds[6].text.strip()), 'event' : tds[6].text.strip(), 'level' : tds[7].text.strip(), #'html' : html } data = pd.DataFrame([dict]) result = result.append(data, ignore_index=True) return result
def main(): for i in range(1, 10): if is_connected(): break print(sys.argv) if sys.argv[1] == 'start' or len(sys.argv) < 2: is_start = True else: is_start = False """Shows basic usage of the Google Calendar API. Prints the start and name of the next 10 events on the user's calendar. """ store = file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('calendar', 'v3', http=creds.authorize(Http())) # Call the Calendar API now = datetime.datetime.utcnow().isoformat( ) + 'Z' # 'Z' indicates UTC time if is_start: event = { 'summary': 'Start Work', 'description': '', 'start': { 'dateTime': now }, 'end': { 'dateTime': now } } print(event) event_result = service.events().insert(calendarId='primary', body=event).execute() print('Event created: %s' % (event_result.get('htmlLink'))) else: print('Getting the upcoming 10 events') events_result = service.events().list( calendarId='primary', timeMin=datetime.datetime.today().replace( hour=0, minute=0, second=0, microsecond=0).isoformat() + 'Z', timeMax=datetime.datetime.today().replace( hour=23, minute=59, second=59, microsecond=999999).isoformat() + 'Z', maxResults=1, singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) for event in events: # start = event['start'].get('dateTime', event['start'].get('date')) # print(start, event['summary']) if event['summary'] == 'Start Work': event = service.events().get(calendarId='primary', eventId=event['id']).execute() event['end'] = {'dateTime': now} event['summary'] = 'At work' service.events().update(calendarId='primary', eventId=event['id'], body=event).execute()
""" Shows basic usage of the Sheets API. Prints values from a Google Spreadsheet. """ from __future__ import print_function from apiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools # Setup the Sheets API SCOPES = 'https://www.googleapis.com/auth/drive' store = file.Storage('sheets_credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) creds = tools.run_flow(flow, store) service = build('sheets', 'v4', http=creds.authorize(Http())) SPREADSHEET_ID = '1bEFilMbsfdIvr0XT84KkE0WtUgCdZAWYhGJQdEAcp1E' RANGE_NAME = 'Sheet1!A2:C5' value_input_option = 'RAW' def write_values(range_name, values): #values = [ #[1,2,3], # Additional rows ... #[4,5,6] #] body = {'values': values} result = service.spreadsheets().values().update(
from __future__ import print_function # import sqlite3 import time import uuid from googleapiclient import errors from googleapiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools SCOPES = 'https://www.googleapis.com/auth/drive' store = file.Storage( '/home/odoo/odoo-dev/Projects/saif/google_api_integreation/storage.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets( '/home/odoo/odoo-dev/Projects/saif/google_api_integreation/client_id.json', SCOPES) creds = tools.run_flow(flow, store) SHEETS = build('sheets', 'v4', http=creds.authorize(Http())) data = {'properties': {'title': 'Odoo Sheets [%s]' % time.ctime()}} res = SHEETS.spreadsheets().create(body=data).execute() SHEET_ID = res['spreadsheetId'] service = build('drive', 'v2', http=creds.authorize(Http())) new_permission = {'value': 'default', 'type': 'anyone', 'role': 'writer'} try: service.permissions().insert(fileId=SHEET_ID, body=new_permission).execute() except errors.HttpError, error:
# Reference: https://developers.google.com/calendar/quickstart/python # Documentation: https://developers.google.com/calendar/overview # Be sure to enable the Google Calendar API on your Google account by following the reference link above and # download the credentials.json file and place it in the same directory as this file. from __future__ import print_function from datetime import datetime from datetime import timedelta from googleapiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools # If modifying these scopes, delete the file token.json. SCOPES = "https://www.googleapis.com/auth/calendar" store = file.Storage("token.json") creds = store.get() if(not creds or creds.invalid): flow = client.flow_from_clientsecrets("credentials.json", SCOPES) creds = tools.run_flow(flow, store) service = build("calendar", "v3", http=creds.authorize(Http())) class Add_event: """ This class helps creating google calender events upon borrowing a book from the library and removes the event upon returning it. ... Methods
def updateCalender(): textToAnalyze_json = request.get_json() textToAnalyze = json.loads(json.dumps(textToAnalyze_json['selection'])) print('selection') def read(): # Call the Calendar API now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time print('Getting the upcoming 10 events') events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) if not events: print('No upcoming events found.') for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start, event['summary']) return events def order(t): work = dict({"assignment":"1", "project":"1", "work":"1", "homework":"1", "lab":"1", "report":"1", "paper":"1", "math":"1", "engineering":"1", "biology":"1", "physics":"1", "boring":"1", "job":"1", "computer":"1", "science":"1", "journal":"1", "lecture":"1", "tutorial":"1", "exam":"1", "assessment":"1", "test":"1"}) costs = np.zeros(7) #either work or other now = datetime.datetime.utcnow() counter = 0 current_day = now.replace(hour=0, minute=0, second=0,microsecond=0) + relativedelta(days=1) current_day_limit = current_day + relativedelta(days=1) while(counter < 7): events_result = service.events().list(calendarId='primary', timeMin=(current_day.isoformat()+'Z'), timeMax=(current_day_limit.isoformat()+'Z'), singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) for event in events: description = '' if(event.get('summary')): description += event.get('summary') + ' : ' if(event.get('description')): description += event.get('description') is_work = False for word in description.split(): if word in work.keys(): is_work = True break if((is_work and t or'work') or (not is_work and t!='work')): costs[counter] += 1 current_day = current_day_limit current_day_limit = current_day_limit + relativedelta(days=1) counter+=1 lists = [1] for i in range(1, 7): counter = 0 while(costs[i] > costs[counter]): counter+=1 lists.insert(counter, i+1) #for i in range(0, 7): #print(lists[i]) return lists def analyze(order, duration): options = ['', '', ''] number_options = 0 for day in order: current_day = datetime.datetime.utcnow() + relativedelta(days=day) current_day = current_day.replace(hour=9, minute=0, second=0,microsecond=0) #current_day_limit = current_day + relativedelta(days=1) current_day_limit = current_day.replace(hour=23, minute=0, second=0, microsecond=0) - relativedelta(minutes=duration) current_time = current_day events_result = service.events().list(calendarId='primary', timeMin=current_day.isoformat() + 'Z', timeMax=current_day_limit.isoformat() + 'Z', singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) if not events: #print('No upcoming events found.') return current_time while(current_time <= current_day_limit): #start = event['start'].get('dateTime') #start = datetime.datetime.strptime(start, '%Y-%m-%dT%H:%M:%S-04:00') #end = event['end'].get('dateTime') #end = datetime.datetime.strptime(end, '%Y-%m-%dT%H:%M:%S-04:00') #print(datetime.datetime.now()) #print(datetime.datetime.now().replace(hour=23, microsecond=0).isoformat()) #test = relativedelta(minutes=15) #start1 = end + relativedelta(minutes=15) #end1 = start1 + relativedelta(minutes=duration+15) #end2 = start - relativedelta(minutes=15) #start2 = end2 - relativedelta(minutes=duration+15) start = current_time - relativedelta(minutes=15) end = current_time + relativedelta(minutes=duration+15) #start = current_time #end = current_time + relativedelta(minutes=duration) body = { "timeMin": start.isoformat()+'-04:00', "timeMax": end.isoformat()+'-04:00', "timeZone": 'America/New_York', "items": [{"id": '*****@*****.**'}] } eventsResult = service.freebusy().query(body=body).execute() #print('The event result is: ') #print(start) #print(end) #print(eventsResult) #print(eventsResult[u'calendars']) calendar_state = eventsResult[u'calendars'] #print(test) email_state = calendar_state[u'*****@*****.**'] #print(test) busy_state = email_state[u'busy'] #print(test) #print('end') if(not busy_state): options[number_options] = current_time.strftime("%Y-%m-%d %H:%M") number_options+=1 current_time = current_time.replace(hour=23) if(number_options==3): return options #cal_dict = eventsResult[u'calendars'] #for cal_name in cal_dict: # print(cal_name, cal_dict[cal_name]) current_time = current_time + relativedelta(minutes=15) return options def insert(name, duration, t): day_order = order(t) suggestions = analyze(day_order, duration) #for s in range(0, len(suggestions)): # print(suggestions[s]) #json_dump = json.dumps(suggestions, default=json_serial) #json_dump=json.dumps(suggestions, indent=4, sort_keys=True, default=str) json_dump=json.dumps({"0": suggestions[0], "1": suggestions[1], "2": suggestions[2]}, sort_keys=True) #print(json_dump) print(json_dump) return json_dump def schedule(name, duration, t, suggestion): #edit the parsing method below based on what the result of suggestion is expected to be suggestion = datetime.datetime.strptime(suggestion, '%Y-%m-%d %H:%M') #ask front end to pick which time they want suggestion_end = suggestion + relativedelta(minutes=duration) event = { 'summary': name, 'description': t, 'start': { 'dateTime': suggestion.isoformat()+ '-04:00', 'timeZone': 'America/New_York', }, 'end': { 'dateTime': suggestion_end.isoformat()+'-04:00', 'timeZone': 'America/New_York', }, 'reminders': { 'useDefault': False, 'overrides': [ {'method': 'email', 'minutes': 24 * 60}, {'method': 'popup', 'minutes': 10}, ], }, } event = service.events().insert(calendarId='primary', body=event).execute() print ('Event created: %s' % (event.get('htmlLink'))) # Setup the Calendar API SCOPES = 'https://www.googleapis.com/auth/calendar' store = file.Storage('credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) creds = tools.run_flow(flow, store) service = discovery.build('calendar', 'v3', http=creds.authorize(Http())) work = dict({"assignment":"1", "project":"1", "work":"1", "homework":"1", "lab":"1", "report":"1", "paper":"1", "math":"1", "engineering":"1", "biology":"1", "physics":"1", "boring":"1", "job":"1", "computer":"1", "science":"1", "journal":"1", "lecture":"1", "tutorial":"1", "exam":"1", "assessment":"1", "test":"1"}) isWork = False for word in sentence: if word in work: isWork = True if isWork == True: schedule('Andrew', 12, 'work', '2018-06-02 11:15') else: schedule('Andrew', 12, 'other', '2018-06-02 11:15') #See if it's in work. #schedule() return "Done!"
def __init__(self, host, oauth2_parameters, user_agent, source, host_override=None, extra_headers=None, save_cookies=False, auth_tries=None, account_type=None, debug_data=True, secure=True, ignore_certs=False, rpc_tries=3): """Creates a new HttpRpcServerOAuth2. Args: host: The host to send requests to. oauth2_parameters: An object of type OAuth2Parameters (defined above) that specifies all parameters related to OAuth2 authentication. (This replaces the auth_function parameter in the parent class.) user_agent: The user-agent string to send to the server. Specify None to omit the user-agent header. source: Saved but ignored. host_override: The host header to send to the server (defaults to host). extra_headers: A dict of extra headers to append to every request. Values supplied here will override other default headers that are supplied. save_cookies: If the refresh token should be saved. auth_tries: The number of times to attempt auth_function before failing. account_type: Ignored. debug_data: Whether debugging output should include data contents. secure: If the requests sent using Send should be sent over HTTPS. ignore_certs: If the certificate mismatches should be ignored. rpc_tries: The number of rpc retries upon http server error (i.e. Response code >= 500 and < 600) before failing. """ super(HttpRpcServerOAuth2, self).__init__(host, None, user_agent, source, host_override=host_override, extra_headers=extra_headers, auth_tries=auth_tries, debug_data=debug_data, secure=secure, ignore_certs=ignore_certs, rpc_tries=rpc_tries) if not isinstance(oauth2_parameters, self.OAuth2Parameters): raise TypeError('oauth2_parameters must be an OAuth2Parameters.') self.oauth2_parameters = oauth2_parameters if save_cookies: oauth2_credential_file = (oauth2_parameters.credential_file or '~/.appcfg_oauth2_tokens') self.storage = oauth2client_file.Storage( os.path.expanduser(oauth2_credential_file)) else: self.storage = NoStorage() if oauth2_parameters.credentials: self.credentials = oauth2_parameters.credentials elif any( (oauth2_parameters.access_token, oauth2_parameters.refresh_token, oauth2_parameters.token_uri)): token_uri = ( oauth2_parameters.token_uri or ('https://%s/o/oauth2/token' % os.getenv('APPENGINE_AUTH_SERVER', 'accounts.google.com'))) self.credentials = client.OAuth2Credentials( oauth2_parameters.access_token, oauth2_parameters.client_id, oauth2_parameters.client_secret, oauth2_parameters.refresh_token, None, token_uri, self.user_agent) else: self.credentials = self.storage.get()
def is_authorized(storage_path): # Create a unique storage for the given account_id storage = file.Storage(storage_path) credentials = storage.get() return not (credentials is None or credentials.invalid)
def sent_to_API(title, start_time1, end_time1): # credentials = pickle.load(open("token.pkl", "rb")) # service = build("calendar", "v3", credentials=credentials) """"Get Credentials""" home_dir = os.path.expanduser('~') credential_dir = os.path.join(home_dir, '.credentials') if not os.path.exists(credential_dir): os.makedirs(credential_dir) credential_path = os.path.join(credential_dir, 'calendar-python-quickstart.json') store = file.Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid is True: flow = get_flow(request) flow.params['state'] = xsrfutil.generate_token( config('SECRET_KEY'), request.user) request.session['flow'] = pickle.dumps(flow).decode('iso-8859-1') authorize_url = flow.step1_get_authorize_url() return HttpResponseRedirect(authorize_url) service = build("calendar", "v3", credentials=credentials) """"Get my calendar""" result = service.calendarList().list().execute() calendar_id = result['items'][0]['id'] """ Create a new event""" start_time1 = datetime.strptime( start_time1, "%Y-%m-%d %H:%M:%S") # convert to datetime end_time1 = datetime.strptime( end_time1, "%Y-%m-%d %H:%M:%S") # convert to datetime timezone = 'America/Sao_Paulo' event = { 'summary': title, 'location': 'ValeVerde', 'description': 'Teste', 'start': { 'dateTime': start_time1.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': timezone, }, 'end': { 'dateTime': end_time1.strftime("%Y-%m-%dT%H:%M:%S"), 'timeZone': timezone, }, 'reminders': { 'useDefault': False, 'overrides': [ { 'method': 'email', 'minutes': 24 * 60 }, { 'method': 'popup', 'minutes': 10 }, ], }, } service.events().insert(calendarId=calendar_id, body=event).execute()
words.remove('gayz') pf.define_words(words) giveawaybot = '294882584201003009' logger = logging.getLogger('discord') logger.setLevel(logging.ERROR) handler = logging.FileHandler(filename='discord.log', encoding='utf-8', mode='w') handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s')) logger.addHandler(handler) SCOPES = 'https://www.googleapis.com/auth/spreadsheets' TEMPLATE_ID = '1nrDrIZ-XmpHw7dF8EMc0LXMSYOhoXTqoX4WO-k8jAgk' SHEET_IDS = ['1HdtNLRyAMPCD2DYas7ChcS1luKcUTi4AvrdMIsbm1PQ', '1E6ew94iL4FaV2rWCIOdn_ibdFE3hJ_20lR-UmFDLwtk'] TEAM_COLS = 'C,F,I,L,O,R,U,X,AA,AD,AG,AJ'.split(',') POINT_COLS = 'E,H,K,N,Q,T,W,Z,AC,AF,AI,AL'.split(',') store = oauth_file.Storage('/root/badgebot/token.json') creds = store.get() if not creds or creds.invalid: flow = gclient.flow_from_clientsecrets('/root/badgebot/credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('sheets', 'v4', http=creds.authorize(Http())) def getmention(message): return message.mentions[0] if len(message.mentions) > 0 else None def haspermission(user): if not isinstance(user, str): user = discorduser_to_id(user) user = id_to_discorduser(user, client.get_server('372042060913442818')
def main(): scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive'] store = file.Storage('E:\Project\storage.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('C:/creds.json', scope) creds = client.OAuth2Credentials creds = tools.run_flow(flow, store) SERVICE = build('drive', 'v3', http=creds.authorize(Http())) releases_dir_id = '0B-31_xdQ4lnzY1B5eWRhVElOdHc' payload = SERVICE.files().list( q=" mimeType = 'application/vnd.google-apps.folder' and \ trashed = false and \ %r in parents " % releases_dir_id, # can add sharedWithMe = true fields='files(id, name)').execute() directories = payload.get('files', []) test = map(lambda x: x['name'], directories) print(test) client_gspread = gspread.authorize(creds) drive_sub_dir_name = "GA-2.0.0" # input("enter the sub directory name") directory_id = get_directory_id_by_name(directories, drive_sub_dir_name) spreadsheet_list = get_spreadsheets_by_directory_id(directory_id, SERVICE) print(spreadsheet_list) sheet_arr = list() summary_sheet = client_gspread.create('sheet_new').sheet1 print("New sheet is Created") print(type(spreadsheet_list)) summary_sheet.clear() bool = True # open all the sheets in the folder for i in spreadsheet_list: try: spreadsheet_name = i['name'] sh = client_gspread.open(spreadsheet_name) sum_sheet = sh.worksheet('Summary') cell_val = sum_sheet.find("Total") if (bool == True): sp_name = sum_sheet bool = False except: continue print(cell_val) print(type(cell_val)) print("Found something at R%sC%s" % (cell_val.row, cell_val.col)) summary_sheet.insert_row(sum_sheet.row_values(cell_val.row)) #print("values of sheet", sum_sheet.row_values(cell_val.row)) #print(cell_val) #print(spreadsheet_name) cell_val_2 = summary_sheet.find("Total") summary_sheet.update_cell(cell_val_2.row, cell_val_2.col, spreadsheet_name) # To clear contents of the newly created sheet summary_sheet.insert_row(list()) summary_sheet.insert_row(sp_name.row_values(1)) summary_sheet.update_cell(1, 1, " ")
def gmail_read(username): # Creating a storage.JSON file with authentication details SCOPES = 'https://www.googleapis.com/auth/gmail.modify' # we are using modify and not readonly, as we will be marking the messages Read module_dir = os.path.dirname(__file__) store = file.Storage(os.path.join(module_dir, f'jsonfile/{username}.json')) flags = tools.argparser.parse_args(args=[]) creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets( os.path.join(module_dir, 'client_secret.json'), SCOPES) creds = tools.run_flow(flow, store, flags) GMAIL = discovery.build('gmail', 'v1', http=creds.authorize(Http())) user_id = 'me' label_id_one = 'INBOX' # Getting all the unread messages from Inbox # labelIds can be changed accordingly unread_msgs = GMAIL.users().messages().list(userId='me', labelIds=[label_id_one ]).execute() # We get a dictonary. Now reading values for the key 'messages' mssg_list = unread_msgs['messages'] #print ("Total unread messages in inbox: ", str(len(mssg_list))) final_list = [] for mssg in mssg_list: temp_dict = {} m_id = mssg['id'] # get id of individual message temp_dict['m_id'] = m_id message = GMAIL.users().messages().get( userId=user_id, id=m_id).execute() # fetch the message using API #print(message) payld = message['payload'] # get payload of the message headr = payld['headers'] # get header of the payload for one in headr: # getting the Subject if one['name'] == 'Subject': msg_subject = one['value'] temp_dict['Subject'] = msg_subject else: pass for two in headr: # getting the date if two['name'] == 'Date': msg_date = two['value'] date_parse = (parser.parse(msg_date)) m_date = (date_parse.date()) temp_dict['Date'] = str(m_date) else: pass for three in headr: # getting the Sender if three['name'] == 'From': msg_from = three['value'] temp_dict['Sender'] = msg_from else: pass temp_dict['Snippet'] = message['snippet'] # fetching message snippet try: # Fetching message body mssg_parts = payld['parts'] # fetching the message parts part_one = mssg_parts[0] # fetching first element of the part part_body = part_one['body'] # fetching body of the message part_data = part_body['data'] # fetching data from the body clean_one = part_data.replace("-", "+") # decoding from Base64 to UTF-8 clean_one = clean_one.replace("_", "/") # decoding from Base64 to UTF-8 clean_two = base64.b64decode(bytes( clean_one, 'UTF-8')) # decoding from Base64 to UTF-8 soup = BeautifulSoup(clean_two, "lxml") mssg_body = soup.body() # mssg_body is a readible form of message body # depending on the end user's requirements, it can be further cleaned # using regex, beautiful soup, or any other method temp_dict['Message_body'] = mssg_body except: pass #print (temp_dict) final_list.append( temp_dict) # This will create a dictonary item in the final list #print ("Total messaged retrived: ", str(len(final_list))) ''' The final_list will have dictionary in the following format: { 'Sender': '"email.com" <*****@*****.**>', 'Subject': 'Lorem ipsum dolor sit ametLorem ipsum dolor sit amet', 'Date': 'yyyy-mm-dd', 'Snippet': 'Lorem ipsum dolor sit amet' 'Message_body': 'Lorem ipsum dolor sit amet'} The dictionary can be exported as a .csv or into a databse ''' #exporting the values as .csv with open(os.path.join(module_dir, f'csvfile/{username}.csv'), 'w', encoding='utf-8', newline='') as csvfile: fieldnames = [ 'm_id', 'Sender', 'Subject', 'Date', 'Snippet', 'Message_body' ] writer = csv.DictWriter(csvfile, fieldnames=fieldnames, delimiter=',') writer.writeheader() for val in final_list: writer.writerow(val) return f'{username}.csv'
def main(): """Shows basic usage of the Drive v3 API. Prints the names and ids of the first 10 files the user has access to. """ store = file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('drive', 'v3', http=creds.authorize(Http())) # Call the Drive v3 API # Will only work if the directory exists... results = service.files().list( pageSize=1, fields="nextPageToken, files(id, name)", q="name = 'mlp-samples-test' and mimeType = 'application/vnd.google-apps.folder'").execute() items = results.get('files', []) mlp_files_dir_id = None if not items: print('No MLP directory found.') else: print('Results from directory search (should be 1!):') for item in items: print('{0} ({1})'.format(item['name'], item['id'])) mlp_files_dir_id = item['id'] if mlp_files_dir_id is None: print('No mlp test dir found, exiting!') exit() results = service.files().list(pageSize=15, fields="nextPageToken, files(id, name)", q="'%s' in parents" % mlp_files_dir_id).execute() mlp_files_res = results.get('files', []) mlp_file_ids = None print ("Should be an array?") print (mlp_files_res) if not mlp_files_res: print('No MLP files found, exiting...') exit() else: print('MLP found:') for item in mlp_files_res: print('{0} ({1})'.format(item['name'], item['id'])) # Download something... if (mlp_files_res): for mlp_file in mlp_files_res: file_id = mlp_file['id'] request = service.files().get_media(fileId=file_id) fh = io.BytesIO() downloader = MediaIoBaseDownload(fh, request) done = False start = True print("Downloading %s..." % mlp_file['name']) while done is False and start is True: status, done = downloader.next_chunk() print(status.progress()*100.0) print ("Done") with open('./mlp_samples/' + mlp_file['name'],'wb') as out: ## Open temporary file as bytes out.write(fh.getvalue()) print (mlp_file['name'] + " written to disk!") else: print ("MLP dir not found!")
def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None): """A common initialization routine for samples. Many of the sample applications do the same initialization, which has now been consolidated into this function. This function uses common idioms found in almost all the samples, i.e. for an API with name 'apiname', the credentials are stored in a file named apiname.dat, and the client_secrets.json file is stored in the same directory as the application main file. Args: argv: list of string, the command-line parameters of the application. name: string, name of the API. version: string, version of the API. doc: string, description of the application. Usually set to __doc__. file: string, filename of the application. Usually set to __file__. parents: list of argparse.ArgumentParser, additional command-line flags. scope: string, The OAuth scope used. discovery_filename: string, name of local discovery file (JSON). Use when discovery doc not available via URL. Returns: A tuple of (service, flags), where service is the service object and flags is the parsed command-line flags. """ if scope is None: scope = 'https://www.googleapis.com/auth/' + name # Parser command-line arguments. parent_parsers = [tools.argparser] parent_parsers.extend(parents) parser = argparse.ArgumentParser( description=doc, formatter_class=argparse.RawDescriptionHelpFormatter, parents=parent_parsers) flags = parser.parse_args(argv[1:]) # Name of a file containing the OAuth 2.0 information for this # application, including client_id and client_secret, which are found # on the API Access tab on the Google APIs # Console <http://code.google.com/apis/console>. client_secrets = os.path.join(os.path.dirname(filename), 'client_secrets.json') # Set up a Flow object to be used if we need to authenticate. flow = client.flow_from_clientsecrets( client_secrets, scope=scope, message=tools.message_if_missing(client_secrets)) # Prepare credentials, and authorize HTTP object with them. # If the credentials don't exist or are invalid run through the native client # flow. The Storage object will ensure that if successful the good # credentials will get written back to a file. storage = file.Storage(name + '.dat') credentials = storage.get() if credentials is None or credentials.invalid: credentials = tools.run_flow(flow, storage, flags) http = credentials.authorize(http=httplib2.Http()) if discovery_filename is None: # Construct a service object via the discovery service. service = discovery.build(name, version, http=http) else: # Construct a service object using a local discovery document file. with open(discovery_filename) as discovery_file: service = discovery.build_from_document( discovery_file.read(), base='https://www.googleapis.com/', http=http) return (service, flags)
#!/usr/bin/env python from __future__ import print_function import os from apiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools import requests try: import argparse flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args() except ImportError: flags = None SCOPES = 'https://www.googleapis.com/auth/drive.file' store = file.Storage('storage.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secrets.json', SCOPES) creds = tools.run_flow(flow, store, flags) \ if flags else tools.run(flow, store) DRIVE = build('drive', 'v2', http=creds.authorize(Http())) print (DRIVE.children().list( folderId='0BwZkkLKYLl7WQ2FFeXFZcHpReUE').execute()['items'])
from __future__ import print_function from apiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools import datetime from firebase import firebase #Firebase connection firebase = firebase.FirebaseApplication('https://spalsa-h.firebaseio.com/', None) # Setup the Calendar API SCOPES = 'https://www.googleapis.com/auth/calendar' store = file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('calendar', 'v3', http=creds.authorize(Http())) # Call the Calendar API now = datetime.datetime.utcnow().isoformat() + 'Z' events_result = service.events().list(calendarId='primary', timeMin=now, singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) for event in events: start = int( event['start'].values()[0].split("T")[1].split("00-")[0].replace( ":", ""))
def list_of_files(): results = service.files().list( pageSize=10, fields="nextPageToken, files(id, name)").execute() items = results.get('files', []) if not items: print('No files found.') else: print('Files:') for item in items: #print(u'{0} ({1})'.format(item['name'], item['id'])) name = item['name'] id = item['id'] print(name) print(id) if __name__ == '__main__': t = os.path.realpath('token.json') store = file.Storage(t) creds = store.get() if not creds or creds.invalid: k = os.path.realpath('credentials.json') flow = client.flow_from_clientsecrets(k, SCOPES) creds = tools.run_flow(flow, store) service = build('drive', 'v3', http=creds.authorize(Http())) list_of_files()
def get_googlecalendar_events(): os.system('rm token.json') time.sleep(0.3) store = file.Storage('token.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('credentials.json', SCOPES) creds = tools.run_flow(flow, store) service = build('calendar', 'v3', http=creds.authorize(Http())) # Call the Calendar API now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time # print('Getting the upcoming 10 events') events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=75, singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) # print (events) first_time = 1 if not events: print('No upcoming events found.') event_by_day = [] for event in events: #go through 75 events #Events are given one after the other starting from the current time we made the Request #getting and parsing info start = event['start'].get('dateTime', event['start'].get('date')) start_date = start[0:10] start_time = start[11:16] end = event['end'].get('dateTime', event['end'].get('date')) end_date = end[0:10] end_time = end[11:16] event_name = event['summary'] # event_start_day = datetime.datetime.strptime(start_date, '%Y-%m-%d').strftime('%a') #Not all events have locations try: event_location = event['location'] except: event_location = '' if (first_time == 1): first_day = int(start_date[8:10]) last_day = first_day + 6 if (last_day > 31): #assuming all months are 30 days long. This is not an issue, since if we have crossed another month, #we just add more events of repeated days last_day = last_day - 31 first_time = 0 else: if (last_day < int(start_date[8:10])): return event_by_day #creating event dictionary parsed_event = { "name" : event_name, "day" : (start_date), "start_hour" : (start_time.split(':')[0]), "start_min" : (start_time.split(':')[1]), "end_hour" : (end_time.split(':')[0]), "end_min" : (end_time.split(':')[1]), "location" : event_location } event_by_day.append(parsed_event) #adding event to specific day
def apicall(): """API Call Pandas dataframe (sent as a payload) from API Call """ try: # test_json = request.get_json() # test = pd.read_json(test_json, orient='records') # #To resolve the issue of TypeError: Cannot compare types 'ndarray(dtype=int64)' and 'str' # test['Dependents'] = [str(x) for x in list(test['Dependents'])] # #Getting the Loan_IDs separated out # loan_ids = test['Loan_ID'] textToAnalyze_json = request.get_json() textToAnalyzeWhole = json.loads(json.dumps(textToAnalyze_json['text'])) textToAnalyze = json.loads(json.dumps(textToAnalyze_json['text'])) #Split text into array of sentences #textToAnalyze = textToAnalyze.split('.') # regular expressions are easiest (and fastest) sentenceEnders = re.compile('[.!?]') textToAnalyze = sentenceEnders.split(textToAnalyze) #print(textToAnalyze) except Exception as e: raise e clf = 'emotion_classifier.pk' if textToAnalyze == "": return(bad_request()) else: #Load the count vectorizer #print("Loading the count vectorizer...") count_vect = None with open('models/countVectorizer.pk', 'rb') as f: count_vect = pickle.load(f) #Load the saved model #print("Loading the model...") loaded_model = None with open('models/emotion_classifier.pk', 'rb') as f2: loaded_model = pickle.load(f2) #print("The model has been loaded...doing predictions now...") predictions = [] sequence = [] #better formatting. print("Text", textToAnalyze) for sentence in textToAnalyze: if sentence != " ": predictionIs = loaded_model.predict((count_vect.transform([sentence]))) predictions.append(predictionIs) sequence.append(predictionIs[0]) ############################################################################ #JERRY'S CODE ############################################################################ def read(): # Call the Calendar API now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time print('Getting the upcoming 10 events') events_result = service.events().list(calendarId='primary', timeMin=now, maxResults=10, singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) if not events: print('No upcoming events found.') for event in events: start = event['start'].get('dateTime', event['start'].get('date')) print(start, event['summary']) return events def order(t): work = dict({"assignment":"1", "project":"1", "work":"1", "homework":"1", "lab":"1", "report":"1", "paper":"1", "math":"1", "engineering":"1", "biology":"1", "physics":"1", "boring":"1", "job":"1", "computer":"1", "science":"1", "journal":"1", "lecture":"1", "tutorial":"1", "exam":"1", "assessment":"1", "test":"1"}) costs = np.zeros(7) #either work or other now = datetime.datetime.utcnow() counter = 0 current_day = now.replace(hour=0, minute=0, second=0,microsecond=0) + relativedelta(days=1) current_day_limit = current_day + relativedelta(days=1) while(counter < 7): events_result = service.events().list(calendarId='primary', timeMin=(current_day.isoformat()+'Z'), timeMax=(current_day_limit.isoformat()+'Z'), singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) for event in events: description = '' if(event.get('summary')): description += event.get('summary') + ' : ' if(event.get('description')): description += event.get('description') is_work = False for word in description.split(): if word in work.keys(): is_work = True break if((is_work and t or'work') or (not is_work and t!='work')): costs[counter] += 1 current_day = current_day_limit current_day_limit = current_day_limit + relativedelta(days=1) counter+=1 lists = [1] for i in range(1, 7): counter = 0 while(costs[i] > costs[counter]): counter+=1 lists.insert(counter, i+1) #for i in range(0, 7): #print(lists[i]) return lists def analyze(order, duration): options = ['', '', ''] number_options = 0 for day in order: current_day = datetime.datetime.utcnow() + relativedelta(days=day) current_day = current_day.replace(hour=9, minute=0, second=0,microsecond=0) #current_day_limit = current_day + relativedelta(days=1) current_day_limit = current_day.replace(hour=23, minute=0, second=0, microsecond=0) - relativedelta(minutes=duration) current_time = current_day events_result = service.events().list(calendarId='primary', timeMin=current_day.isoformat() + 'Z', timeMax=current_day_limit.isoformat() + 'Z', singleEvents=True, orderBy='startTime').execute() events = events_result.get('items', []) if not events: #print('No upcoming events found.') return current_time while(current_time <= current_day_limit): #start = event['start'].get('dateTime') #start = datetime.datetime.strptime(start, '%Y-%m-%dT%H:%M:%S-04:00') #end = event['end'].get('dateTime') #end = datetime.datetime.strptime(end, '%Y-%m-%dT%H:%M:%S-04:00') #print(datetime.datetime.now()) #print(datetime.datetime.now().replace(hour=23, microsecond=0).isoformat()) #test = relativedelta(minutes=15) #start1 = end + relativedelta(minutes=15) #end1 = start1 + relativedelta(minutes=duration+15) #end2 = start - relativedelta(minutes=15) #start2 = end2 - relativedelta(minutes=duration+15) start = current_time - relativedelta(minutes=15) end = current_time + relativedelta(minutes=duration+15) #start = current_time #end = current_time + relativedelta(minutes=duration) body = { "timeMin": start.isoformat()+'-04:00', "timeMax": end.isoformat()+'-04:00', "timeZone": 'America/New_York', "items": [{"id": '*****@*****.**'}] } eventsResult = service.freebusy().query(body=body).execute() #print('The event result is: ') #print(start) #print(end) #print(eventsResult) #print(eventsResult[u'calendars']) calendar_state = eventsResult[u'calendars'] #print(test) email_state = calendar_state[u'*****@*****.**'] #print(test) busy_state = email_state[u'busy'] #print(test) #print('end') if(not busy_state): options[number_options] = current_time.strftime("%Y-%m-%d %H:%M") number_options+=1 current_time = current_time.replace(hour=23) if(number_options==3): return options #cal_dict = eventsResult[u'calendars'] #for cal_name in cal_dict: # print(cal_name, cal_dict[cal_name]) current_time = current_time + relativedelta(minutes=15) return options def insert(name, duration, t): day_order = order(t) suggestions = analyze(day_order, duration) #for s in range(0, len(suggestions)): # print(suggestions[s]) #json_dump = json.dumps(suggestions, default=json_serial) #json_dump=json.dumps(suggestions, indent=4, sort_keys=True, default=str) #json_dump=json.dumps({"0": suggestions[0], "1": suggestions[1], "2": suggestions[2]}, sort_keys=True) suggestions ={"0": [suggestions[0]], "1": [suggestions[1]], "2": [suggestions[2]]} # #print(json_dump) # print(json_dump) #return json_dump return suggestions def schedule(name, duration, t, suggestion): #edit the parsing method below based on what the result of suggestion is expected to be suggestion = datetime.datetime.strptime(suggestion, '%Y-%m-%dT%H:%M:%S-04:00') #ask front end to pick which time they want answer = input('Would you like to have an event put on your calendar called ' + name + ' on ' + suggestion.strftime("%Y-%m-%d at %H:%M") + ' oclock for ' + str(duration) + ' minutes? ') if(answer != 'no'): suggestion_end = suggestion + relativedelta(minutes=duration) event = { 'summary': name, 'description': t, 'start': { 'dateTime': suggestion.isoformat()+ '-04:00', 'timeZone': 'America/New_York', }, 'end': { 'dateTime': suggestion_end.isoformat()+'-04:00', 'timeZone': 'America/New_York', }, 'reminders': { 'useDefault': False, 'overrides': [ {'method': 'email', 'minutes': 24 * 60}, {'method': 'popup', 'minutes': 10}, ], }, } event = service.events().insert(calendarId='primary', body=event).execute() print ('Event created: %s' % (event.get('htmlLink'))) def neural_network_model(data): layer_1 = tf.add(tf.matmul(data, hidden_1_layer['weights']), hidden_1_layer['biases']) # now goes through an activation function - sigmoid function layer_1 = tf.nn.relu(layer_1) # input for layer 2 = result of activ_func for layer 1 layer_2 = tf.add(tf.matmul(layer_1, hidden_2_layer['weights']), hidden_2_layer['biases']) layer_2 = tf.nn.relu(layer_2) layer_3 = tf.add(tf.matmul(layer_2, hidden_3_layer['weights']), hidden_3_layer['biases']) layer_3 = tf.nn.relu(layer_3) output = tf.matmul(layer_3, output_layer['weights']) + output_layer['biases'] return output def use_neural_network(input_data): prediction = neural_network_model(x) with open('models/lexicon.pickle','rb') as f: lexicon = pickle.load(f) with tf.Session() as sess: sess.run(tf.global_variables_initializer()) saver.restore(sess,"models/model.ckpt") # import the inspect_checkpoint library from tensorflow.python.tools import inspect_checkpoint as chkp # print all tensors in checkpoint file #chkp.print_tensors_in_checkpoint_file("./model.ckpt", tensor_name='', all_tensors=True) #saver.restore(sess,tf.train.latest_checkpoint('./')) current_words = word_tokenize(input_data.lower()) current_words = [lemmatizer.lemmatize(i) for i in current_words] features = np.zeros(len(lexicon)) for word in current_words: if word.lower() in lexicon: index_value = lexicon.index(word.lower()) # OR DO +=1, test both features[index_value] += 1 features = np.array(list(features)) # pos: [1,0] , argmax: 0 # neg: [0,1] , argmax: 1 test = prediction.eval(feed_dict={x:[features]}) print(test) test = test[0] difference = abs(test[0] - test[1]) if(difference >= 50): result = (sess.run(tf.argmax(prediction.eval(feed_dict={x:[features]}),1))) if result[0] == 0: #print('Positive:',input_data) return 0 elif result[0] == 1: #print('Negative:',input_data) return 1 return 0.5 lemmatizer = WordNetLemmatizer() n_nodes_hl1 = 500 n_nodes_hl2 = 500 n_nodes_hl3 = 500 n_classes = 2 hm_data = 2000000 batch_size = 32 hm_epochs = 10 x = tf.placeholder('float') y = tf.placeholder('float') current_epoch = tf.Variable(1) hidden_1_layer = {'f_fum':n_nodes_hl1, 'weights':tf.Variable(tf.random_normal([205, n_nodes_hl1])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl1]))} hidden_2_layer = {'f_fum':n_nodes_hl2, 'weights':tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2])), 'biases':tf.Variable(tf.random_normal([n_nodes_hl2]))} hidden_3_layer = {'weights': tf.Variable(tf.truncated_normal([n_nodes_hl2, n_nodes_hl3], stddev=0.1)), 'biases': tf.Variable(tf.constant(0.1, shape=[n_nodes_hl3]))} output_layer = {'weights': tf.Variable(tf.truncated_normal([n_nodes_hl3, n_classes], stddev=0.1)), 'biases': tf.Variable(tf.constant(0.1, shape=[n_classes])), } saver = tf.train.import_meta_graph('models/model.ckpt.meta') sentiment = use_neural_network(textToAnalyzeWhole) ##########################################################3 ''' Find key action words: ''' powerWords = {'frustrated': 'Relax', 'frustrating': 'Relax', 'chill': 'Relax', 'me': 'Depressed', 'much': 'Relax', 'my': 'Relax', 'not': 'Relax', 'overwhelmed': 'Relax', 'vacation': 'Relax', 'crazy': 'Relax', 'stress': 'Relax', 'stressed': 'Relax', 'too': 'Relax', 'sleep': 'Depressed', 'burnt': 'Relax', 'food': 'Relax', 'control': 'Relax', 'work': 'Action2', 'relax': 'Relax', 'relaxation': 'Relax', 'hesistate': 'Procrast', 'lazy': 'Procrast', 'prolong': 'Procrast', 'slow': 'Procrast', 'apathetic': 'Procrast', 'bored': 'Procrast', 'boring': 'Procrast', 'tedium': 'Procrast', 'anime': 'Procrast', 'netflix': 'Procrast', 'movies': 'Procrast', 'waste': 'Procrast', 'ice cream': 'Procrast', 'snack': 'Procrast', 'binge': 'Procrast', 'tv': 'Procrast', 'game': 'Procrast', 'video': 'Procrast', 'facebook': 'Procrast', 'twitter': 'Procrast', 'instagram': 'Procrast', 'twitch': 'Procrast', 'league': 'Procrast', 'guilt': 'Procrast', 'shame': 'Procrast', 'procrastinate': 'Procrast', 'procrastination': 'Procrast', 'procrastinated': 'Procrast', 'wasted': 'Procrast', 'time': 'Procrast', 'hesitated': 'Procrast', 'prolonged': 'Procrast', 'procrastinating': 'Procrast', 'wasting': 'Procrast', 'dark': 'Depressed', 'destroy': 'Depressed', 'die': 'Depressed', 'hate': 'Depressed', 'kill': 'Depressed', 'life': 'Depressed', 'murder': 'Depressed', 'myself': 'Depressed', 'revenge': 'Depressed', 'someone': 'Depressed', 'understand': 'Depressed', 'cry': 'Depressed', 'worst': 'Depressed', 'enemy': 'Depressed', 'hurts': 'Depressed', 'broken': 'Depressed', 'erase': 'Depressed', 'evil': 'Depressed', 'pain': 'Depressed', 'emotion': 'Depressed', 'world': 'Depressed', 'numb': 'Depressed', 'emotions': 'Depressed', 'supposed': 'Depressed', 'strength': 'Depressed', 'alone': 'Depressed', 'depression': 'Depressed', 'depressed': 'Depressed', 'suicide': 'Depressed', 'tears': 'Depressed', 'tear': 'Depressed', 'hole': 'Depressed', 'chasm': 'Depressed', 'burden': 'Depressed', 'sad': 'Depressed', 'died': 'Depressed', 'cried': 'Depressed', 'asleep': 'Depressed', 'haze': 'Depressed', 'energy': 'Depressed', 'dreading': 'Depressed', 'do': 'Action2', 'find': 'Action1', 'go': 'Action2', 'need': 'Action1', 'have': 'Action1', 'play': 'Action1', 'want': 'Action1', 'must': 'Action1', 'require': 'Action1', 'required': 'Action1', 'wish': 'Action1', 'crave': 'Action1', 'miss': 'Action1', 'accomplish': 'Action2', 'finish': 'Action2', 'ace': 'Action2', 'pass': '******', 'earn': 'Action2', 'build': 'Action2', 'achieve': 'Action2', 'win': 'Action2', 'create': 'Action2', 'implement': 'Action2', 'perform': 'Action2', 'soon': 'Action2', 'possible': 'Action2', 'buy': 'Action2', 'complete': 'Action2', 'start': 'Action2', 'exercise': 'Action2', 'visit': 'Action2', 'more': 'Action2', 'use': 'Action2', 'make': 'Action2', 'try': 'Action2', 'study': 'Action2', 'accomplishing': 'Action2', 'finishing': 'Action2', 'aceing': 'Action2', 'passing': 'Action2', 'earning': 'Action2', 'building': 'Action2', 'achieveing': 'Action2', 'wining': 'Action2', 'createing': 'Action2', 'implementing': 'Action2', 'doing': 'Action2', 'performing': 'Action2', 'sooning': 'Action2', 'possibleing': 'Action2', 'buying': 'Action2', 'completeing': 'Action2', 'starting': 'Action2', 'working': 'Action2', 'exerciseing': 'Action2', 'visiting': 'Action2', 'moreing': 'Action2', 'useing': 'Action2', 'makeing': 'Action2', 'trying': 'Action2', 'studying': 'Action2', 'going': 'Action2', 'learn': 'Action2'} workWords = dict({"assignment":"1", "project":"1", "work":"1", "homework":"1", "lab":"1", "report":"1", "paper":"1", "math":"1", "engineering":"1", "biology":"1", "physics":"1", "boring":"1", "job":"1", "computer":"1", "science":"1", "journal":"1", "lecture":"1", "tutorial":"1", "exam":"1", "assessment":"1", "test":"1"}) procrastinateCount = 0 depressionCount = 0 needRelaxCount = 0 actionToggled = False actionSentence = [] sentenceAdded = False isWork = False workState = False i = 0 for sentence in textToAnalyze: sentenceAdded = False if(sentence != " "): words = re.sub("[^\w]", " ", sentence).split() #See if words are in the hashmap for word in words: if word in workWords: isWork = True if word in powerWords: if powerWords[word] == 'Depressed' and predictions[i][0] != "joy": depressionCount += 1 if powerWords[word] == 'Relax' and predictions[i][0] != "joy": needRelaxCount += 1 if powerWords[word] == 'Procrast' and predictions[i][0] != "joy": procrastinateCount += 1 if powerWords[word] == 'Action1' or powerWords[word] == 'Action2': if actionToggled == True: if sentenceAdded is False: actionSentences = sentence sentenceAdded = True workState = isWork else: actionToggled = True if i < len(predictions): i += 1 print(workState) #Evaluate pain = max(depressionCount, needRelaxCount, procrastinateCount) if pain > int(len(textToAnalyze)*0.4): if procrastinateCount >= pain: state = "procrastinate" if needRelaxCount >= pain: state = "relax" if depressionCount >= pain: state = "depression" else: state = "neutral" #Apart from primary algo, can miss words. counter = 0 for label in sequence: if label == "sadness": counter += 1 if counter >= int(len(textToAnalyze)*0.6) and counter >= 3: state = "depression" print(procrastinateCount) print(needRelaxCount) print(depressionCount) print(state) print(actionSentences) SCOPES = 'https://www.googleapis.com/auth/calendar' store = file.Storage('credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) creds = tools.run_flow(flow, store) service = discovery.build('calendar', 'v3', http=creds.authorize(Http())) #j = 0 if actionSentences != []: suggestions = insert(actionSentences, 60, 'work') # for sentence in actionSentences: # if str(j) in suggestions: # suggestions[str(j)].append(sentence) # j += 1 else: suggestions = {} # prediction_series = list(pd.Series(predictions)) # final_predictions = pd.DataFrame(list(zip(loan_ids, prediction_series))) """We can be as creative in sending the responses. But we need to send the response codes as well. """ #print(predictions) #Create dictionary with everything that I need to return. responses = jsonify(predictions=sequence, sentence = actionSentences[0], mindState=state, calSuggestions=suggestions, sentimentState = sentiment) responses.status_code = 200 return (responses)
def main(): parser = argparse.ArgumentParser(description='Show reserved EC2 instances') parser.add_argument( '-s', '--state', action='store', choices=['payment-pending', 'active', 'payment-failed', 'retired'], help="Filer result by reservation state.") parser.add_argument( '--create-google-calendar-events', action='store_true', default=False, help="Create events in your Google Calendar, using the \ expiration dates of your active reservations") parser.add_argument('-t', '--type', help="Filer result by instance type.") arg = parser.parse_args() filters = [] if arg.create_google_calendar_events: filters = [] filters.append({'Name': 'state', 'Values': ['active']}) if arg.state and arg.create_google_calendar_events is False: filters.append({'Name': 'state', 'Values': ["" + arg.state + ""]}) if arg.type and arg.create_google_calendar_events is False: filters.append({ 'Name': 'instance-type', 'Values': ["*" + arg.type + "*"] }) events, event_ids, instances = list_reserved_instances(filters) normalization_factor = { 'nano': 0.25, 'micro': 0.5, 'small': 1, 'medium': 2, 'large': 4, 'xlarge': 8, '2xlarge': 16, '8xlarge': 32, '9xlarge': 64, '10xlarge': 72, '12xlarge': 96, '16xlarge': 128, '18xlarge': 144, '24xlarge': 192, '32xlarge': 256 } # Normalized value for regional and zonal active reservations region = {} zone = {} for instance in instances: instance_type, instance_size = instance['type'].split('.') if instance['state'] == 'active': if instance['scope'] == 'Region': if instance_type not in region: region[instance_type] = {instance_size: instance['count']} elif instance_size in region[instance_type]: region[instance_type][instance_size] += instance['count'] else: region[instance_type][instance_size] = instance['count'] elif instance['scope'] == 'Availability Zone': if instance_type not in zone: zone[instance_type] = {} zone[instance_type][instance['zone']] = {} zone[instance_type][instance['zone']] = { instance_size: instance['count'] } elif instance_size in zone[instance_type][instance['zone']]: zone[instance_type][ instance['zone']][instance_size] += instance['count'] else: zone[instance_type][ instance['zone']][instance_size] = instance['count'] nrrs = 0 nrrs_sum = 0 print "" print "Summary" print "" print " Active Standard Regional Reserverd Instances (by type and size)" for type in region: print " Instance Type: %s" % type nrrs += nrrs for size in region[type]: # Normalized reserved region size (nrrs) nrrs = normalization_factor[size] * region[type][size] nrrs_sum = nrrs_sum + nrrs print " %s x %s (%s) = %s" % ( region[type][size], size, normalization_factor[size], nrrs) print "" print " Total Regional (normalized): %s" % nrrs_sum print "" print "" nrrs = 0 nrrs_sum = 0 print " Active Standard Zonal Reserverd Instances (by type, availability zone and size)" for type in zone: print " Instance Type: %s" % type nrrs += nrrs for availability_zone in zone[type]: print " Availabilidy zone: %s" % availability_zone for size in zone[type][availability_zone]: nrrs = normalization_factor[size] * zone[type][ availability_zone][size] nrrs_sum = nrrs_sum + nrrs print " %s x %s (%s) = %s" % ( zone[type][availability_zone][size], size, normalization_factor[size], nrrs) print "" print " Total Zonal (normalized): %s" % nrrs_sum print "" if arg.create_google_calendar_events: # Setup the Calendar API SCOPES = 'https://www.googleapis.com/auth/calendar' store = file.Storage('credentials.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('client_secret.json', SCOPES) flags = tools.argparser.parse_args(args=[]) creds = tools.run_flow(flow, store, flags) service = build('calendar', 'v3', http=creds.authorize(Http())) create_events(service, events, event_ids)
from __future__ import print_function from apiclient.discovery import build from httplib2 import Http from oauth2client import file, client, tools import json try: import argparse flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args() except ImportError: flags = None SCOPES = "https://www.googleapis.com/auth/calendar" store = file.Storage('G_files/storage.json') creds = store.get() if not creds or creds.invalid: flow = client.flow_from_clientsecrets('G_files/client_secrets.json', SCOPES) creds = tools.run_flow(flow, store, flags) if flags else tools.run( flow, store) CAL = build('calendar', 'v3', http=creds.authorize(Http())) GMT_OFF = '+02:00' CALENDAR_ID = '*****@*****.**' def coloring(): with open('Data/log.json', 'r') as f: