Ejemplo n.º 1
0
def addEvents():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    file = open("data.txt", "r")

    className = ""

    class Event:
        def __init__(self, className, assignmentName, type, dueDate, numHours,
                     daysBeforeDue, priority):
            self.className = className
            self.assignmentName = assignmentName
            self.type = type
            self.dueDate = dateutil.parser.parse(dueDate)
            self.numHours = numHours
            self.daysBeforeDue = 0
            self.priority = 0

    mandatoryEvents = []

    for line in file:
        individual = line.split("|")
        mandatoryEvents.append(
            Event(individual[0], individual[1], individual[2], individual[3],
                  individual[4], 0, 0))

    for event in mandatoryEvents:

        event.daysBeforeDue = (datetime.date(event.dueDate) - datetime.date(
            datetime.now())).total_seconds() / 86400

        if (event.daysBeforeDue < 7):
            event.priority = int(event.numHours) / int(event.daysBeforeDue / 2)

    busyTimes = filledCalendar('2020-01-20T09:00:00-05:00',
                               '2020-01-24T21:00:00-05:00')

    ## Day ONE
    task1 = {
        'summary': 'Work on: CS2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T9:00:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T11:30:00-05:00'
        }
    }

    task2 = {
        'summary': 'Work on: CS2212B: Assignment 3',
        'start': {
            'dateTime': '2020-01-20T16:00:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T16:30:00-05:00'
        }
    }

    task3 = {
        'summary': 'Work on: WRTG2111G: Illiad Essay',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    task4 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T18:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T21:00:00-05:00'
        }
    }

    # Day TWO

    task5 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    task6 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    task7 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    task8 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    task9 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    task10 = {
        'summary': 'Work on: 2214B: Assignment 2 - Python',
        'start': {
            'dateTime': '2020-01-20T13:30:00-05:00'
        },
        'end': {
            'dateTime': '2020-01-20T14:30:00-05:00'
        }
    }

    event = service.events().insert(calendarId='primary', body=task1).execute()
    event = service.events().insert(calendarId='primary', body=task2).execute()
    event = service.events().insert(calendarId='primary', body=task3).execute()
    event = service.events().insert(calendarId='primary', body=task4).execute()
Ejemplo n.º 2
0
def main():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    now = datetime.utcnow().isoformat() + 'Z'  # 'Z' indicates UTC time
    print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          maxResults=10,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])

    d = datetime.now().date()
    tomorrow = datetime(d.year, d.month, d.day, 10) + timedelta(days=1)
    start = tomorrow.isoformat()
    end = (tomorrow + timedelta(hours=1)).isoformat()

    event_result = service.events().insert(
        calendarId='primary',
        body={
            "summary": 'Automating calendar',
            "description":
            'This is a tutorial example of automating google calendar with python',
            "start": {
                "dateTime": start,
                "timeZone": 'Asia/Kolkata'
            },
            "end": {
                "dateTime": end,
                "timeZone": 'Asia/Kolkata'
            },
        }).execute()

    print("created event")
    print("id: ", event_result['id'])
    print("summary: ", event_result['summary'])
    print("starts at: ", event_result['start']['dateTime'])
    print("ends at: ", event_result['end']['dateTime'])
    if not events:
        print('No upcoming events found.')
    for event in events:
        start = event['start'].get('dateTime', event['start'].get('date'))
        print(start, event['summary'])
def main():
    """Shows basic usage of the Docs API.
    Prints the title of a sample document.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    drive_service = build('drive', 'v3', credentials=creds)
    doc_service = build('docs', 'v1', credentials=creds)
    spreadsheet_service = build('sheets', 'v4', credentials=creds)

    # create external folder
    file_metadata = {
        'name': 'ParenText Moa',
        'mimeType': 'application/vnd.google-apps.folder'
    }
    folder_file = drive_service.files().create(body=file_metadata,
                                               fields='id').execute()
    parent_id = folder_file.get('id')
    parentext_folder_id = parent_id

    # get names of the JSON files for docs

    doc_file_names = [
        f for f in listdir('./JSON_files') if isfile(join('./JSON_files', f))
    ]
    doc_flows_names = [
        line.strip('\n').replace('PLH - ', '') for line in doc_file_names
    ]
    doc_flows_names = [line[:-5]
                       for line in doc_flows_names]  # remove .json from string

    # get names of the csv files for sheets

    sheet_file_names = [
        f for f in listdir('./csv_files') if isfile(join('./csv_files', f))
    ]
    sheet_flows_names = [line.strip('\n') for line in sheet_file_names]
    sheet_flows_names = [line[:-4] for line in sheet_flows_names
                         ]  # remove .csv from string

    # combine the 2 lists
    flows_names = doc_flows_names + sheet_flows_names
    # create list for building folder structure from the names of the files
    folders_names_in_strings = []
    max_depth = 3

    for name in flows_names:
        name_list = name.split(" - ")
        name_list.pop()

        if len(name_list) > max_depth:
            name_list.pop()

        folders_names_in_strings.append(' - '.join(name_list))

    folders_names_in_strings = list(set(folders_names_in_strings))
    folders_in_lists = list(
        fol.split(" - ") for fol in folders_names_in_strings)

    # variable for saving the IDs of the created folders and files
    folders_IDs = {}
    files_IDs = {}
    files_urls = {}

    parent_folders_path = ""

    # function to create the folder structure
    def create_layer_of_folders(folders_in_lists, parent_folders_path,
                                parent_id):
        curr_layer_folders_to_create = list(
            set([folder[0] for folder in folders_in_lists]))

        for folder in curr_layer_folders_to_create:

            file_metadata = {
                'name': folder,
                'mimeType': 'application/vnd.google-apps.folder',
                'parents': [parent_id]
            }
            folder_file = drive_service.files().create(body=file_metadata,
                                                       fields='id').execute()
            folders_IDs[parent_folders_path + folder +
                        ' - '] = folder_file.get('id')

            new_folders_in_lists = list(
                filter(lambda fol: (len(fol) > 0 and fol[0] == folder),
                       folders_in_lists))

            for fol in new_folders_in_lists:
                fol.pop(0)

            new_folders_in_lists = list(
                filter(lambda fol: len(fol) > 0, new_folders_in_lists))

            if len(new_folders_in_lists) != 0:
                new_parents_folder_path = parent_folders_path + folder + ' - '
                create_layer_of_folders(new_folders_in_lists,
                                        new_parents_folder_path,
                                        folder_file.get('id'))

    # create the folder structure
    create_layer_of_folders(folders_in_lists, parent_folders_path, parent_id)

    # save the IDS of the created folders
    folders_IDs['ParenText'] = parentext_folder_id

    with open('./folders_IDs.json', 'w') as outfile:
        json.dump(folders_IDs, outfile, indent=4)

##################################################################################

# functions to create spreadsheets

    def export_csv_file(file_path: str, name: str):
        if not os.path.exists(file_path):
            print("file path does not exists")
            return
        try:
            file_metadata = {
                'name': name,
                'mimeType': 'application/vnd.google-apps.spreadsheet',
                'properties': {
                    'title': title
                }
            }

            media = MediaFileUpload(filename=file_path, mimetype='text/csv')
            response = drive_service.files().create(
                media_body=media, body=file_metadata).execute()

            return response
        except Exception as e:
            print(e)
            return

    # create spreadsheets #############################################

    for fl in range(len(sheet_flows_names)):
        time.sleep(6)
        # define title
        curr_flow = sheet_flows_names[fl]
        curr_flow_split = curr_flow.split(" - ")
        title = curr_flow_split[-1]
        curr_flow_split.pop()
        curr_flow_path = ' - '.join(curr_flow_split)

        csv_file_path = './csv_files/' + sheet_file_names[fl]

        spreadsheet_file = export_csv_file(csv_file_path, title)
        print('Created spreadsheet with title: ' + title)

        DOCUMENT_ID = spreadsheet_file.get('id')

        files_IDs[curr_flow] = DOCUMENT_ID
        files_urls[
            curr_flow] = "https://docs.google.com/spreadsheets/d/" + DOCUMENT_ID + "/edit#gid=0"

        #formatting of the file (column width, background color, text wrapping)
        ranges = []
        # True if grid data should be returned.
        # This parameter is ignored if a field mask was set in the request.
        include_grid_data = False

        request = spreadsheet_service.spreadsheets().get(
            spreadsheetId=DOCUMENT_ID, includeGridData=include_grid_data)
        response = request.execute()

        sheetId = response.get("sheets")[0].get("properties").get("sheetId")

        n_rows = response.get("sheets")[0].get("properties").get(
            "gridProperties").get("rowCount")

        formatting_requests = []

        formatting_requests.append({
            "repeatCell": {
                "range": {
                    "sheetId": sheetId,
                    "startRowIndex": 1,
                    "endRowIndex": n_rows - 1,
                    "startColumnIndex": 4,
                    "endColumnIndex": 5
                },
                "cell": {
                    'userEnteredFormat': {
                        "backgroundColor": {
                            "red": 0.39,
                            "green": 0.65,
                            "blue": 0.39,
                            "alpha": 1
                        }
                    }
                },
                "fields": 'userEnteredFormat.backgroundColor'
            }
        })

        formatting_requests.append({
            "repeatCell": {
                "range": {
                    "sheetId": sheetId,
                    "startRowIndex": 0,
                    "startColumnIndex": 0
                },
                "cell": {
                    'userEnteredFormat': {
                        "wrapStrategy": "WRAP"
                    }
                },
                "fields": 'userEnteredFormat.wrapStrategy'
            }
        })

        formatting_requests.append(
            {
                "updateDimensionProperties": {
                    "range": {
                        "sheetId": sheetId,
                        "dimension": "COLUMNS",
                        "startIndex": 4,
                        "endIndex": 5
                    },
                    "properties": {
                        "pixelSize": 300
                    },
                    "fields": "pixelSize"
                }
            }, )

        formatting_requests.append(
            {
                "updateDimensionProperties": {
                    "range": {
                        "sheetId": sheetId,
                        "dimension": "COLUMNS",
                        "startIndex": 0,
                        "endIndex": 2
                    },
                    "properties": {
                        "pixelSize": 150
                    },
                    "fields": "pixelSize"
                }
            }, )

        formatting_requests.append(
            {
                "updateDimensionProperties": {
                    "range": {
                        "sheetId": sheetId,
                        "dimension": "COLUMNS",
                        "startIndex": 5,
                        "endIndex": 16
                    },
                    "properties": {
                        "pixelSize": 200
                    },
                    "fields": "pixelSize"
                }
            }, )

        spreadsheet_service.spreadsheets().batchUpdate(
            spreadsheetId=DOCUMENT_ID, body={
                'requests': formatting_requests
            }).execute()
        print('Sent requests to document: {0}'.format(
            len(formatting_requests)))

        # move document to correct folder
        folder_id = folders_IDs[curr_flow_path + ' - ']
        # Retrieve the existing parents to remove
        file = drive_service.files().get(fileId=DOCUMENT_ID,
                                         fields='parents').execute()
        previous_parents = ",".join(file.get('parents'))
        # Move the file to the new folder
        file = drive_service.files().update(fileId=DOCUMENT_ID,
                                            addParents=folder_id,
                                            removeParents=previous_parents,
                                            fields='id, parents').execute()

    ##################################################################################

    # functions to create google docs

    def insert_text(text, style, first=False):
        requests = [{
            'insertText': {
                'location': {
                    'index': 1,
                },
                'text': text if first else "\n" + text
            }
        }]
        if style:
            requests.append({
                'updateParagraphStyle': {
                    'range': {
                        'startIndex': 1 if first else 2,
                        'endIndex': len(text)
                    },
                    'paragraphStyle': {
                        'namedStyleType': style,
                    },
                    'fields': 'namedStyleType'
                }
            })
        return requests

    def make_requests(key, value, level, requests):
        requests.append(insert_text(text=key, style='HEADING_' + str(level)))
        if isinstance(value, str):
            req = insert_text(text=value, style='')
            requests.append(req)
        elif isinstance(value, dict):
            for i in value:
                make_requests(i, value[i], level=level + 1, requests=requests)
        elif isinstance(value, list):
            for item in value:
                if isinstance(item, dict):
                    for i in item:
                        make_requests(i,
                                      item[i],
                                      level=level + 1,
                                      requests=requests)
                elif isinstance(item, str):
                    req = insert_text(text=item, style='')
                    requests.append(req)

        requests

    # create google docs #############################################

    for fl in range(len(doc_flows_names)):
        time.sleep(6)
        # initialise the doc
        curr_flow = doc_flows_names[fl]
        curr_flow_split = curr_flow.split(" - ")
        title = curr_flow_split[-1]
        curr_flow_split.pop()
        curr_flow_path = ' - '.join(curr_flow_split)

        body = {
            "title": title,
        }

        doc = doc_service.documents().create(body=body).execute()
        print('Created document with title: {0}'.format(doc.get('title')))
        DOCUMENT_ID = doc.get('documentId')

        files_IDs[curr_flow] = DOCUMENT_ID
        files_urls[
            curr_flow] = "https://docs.google.com/document/d/" + DOCUMENT_ID + "/edit"

        # load json file
        with open('./JSON_files/' + doc_file_names[fl],
                  encoding="utf8") as json_file:
            data = json.load(json_file)

        requests = []

        for i in data:
            make_requests(i, data[i], level=1, requests=requests)

        requests.reverse()

        result = doc_service.documents().batchUpdate(documentId=DOCUMENT_ID,
                                                     body={
                                                         'requests': requests
                                                     }).execute()
        print('Sent requests to document: {0}'.format(len(requests)))

        # move document to correct folder
        folder_id = folders_IDs[curr_flow_path + ' - ']
        # Retrieve the existing parents to remove
        file = drive_service.files().get(fileId=DOCUMENT_ID,
                                         fields='parents').execute()
        previous_parents = ",".join(file.get('parents'))
        # Move the file to the new folder
        file = drive_service.files().update(fileId=DOCUMENT_ID,
                                            addParents=folder_id,
                                            removeParents=previous_parents,
                                            fields='id, parents').execute()

    #create files with files IDS and urls
    with open('./files_IDs.json', 'w') as outfile:
        json.dump(files_IDs, outfile)

    with open('./files_urls.json', 'w') as outfile:
        json.dump(files_urls, outfile)
Ejemplo n.º 4
0
def main():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    """if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)"""
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials (1).json', SCOPES)
            creds = flow.run_local_server("127.0.0.1")
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    """now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
    print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary', timeMin=now,
                                        maxResults=10, singleEvents=True,
                                        orderBy='startTime').execute()
    events = events_result.get('items', [])

    if not events:
        print('No upcoming events found.')
    for event in events:
        start = event['start'].get('dateTime', event['start'].get('date'))
        print(start, event['summary'])"""

    #Extract data from textprocessing
    data = main_func(
        inpramesh
    )  #Later, instead of impful we will connect it to web to read data from there
    print(data)
    current_date, time = ret_date_time(str(datetime.today()))
    dates_till_six_days = find_date_from_a_week(current_date)
    weekdays = generate_weekday_lst(datetime.today().weekday())
    dictionary_of_weekdays = {'M': 0, 'Tu': 1, 'W': 2, 'Th': 3, 'F': 4}
    dictionary_of_dates = {}
    for i in range(len(weekdays)):
        dictionary_of_dates[weekdays[i]] = dates_till_six_days[i]

    #Adding an Event to Calendar
    for d in data:
        if 'TBA' not in d and 'TBA' not in d[1]:
            for days in d[1]:
                start_time = ""
                end_time = ""
                if d[2][len(d[2]) - 1] == 'P' and d[2][:2] != "12":
                    #print(d[2][:2])
                    get_colon_index = 0
                    while d[2][get_colon_index] is not ":":
                        get_colon_index += 1
                    start_time = str(int(d[2][:get_colon_index]) +
                                     12) + d[2][get_colon_index:len(d[2]) -
                                                1] + ":00"
                else:
                    start_time = d[2][:len(d[2]) - 1] + ":00"

                if d[3][len(d[3]) - 1] == 'P' and d[3][:2] != "12":
                    get_colon_index = 0
                    while d[3][get_colon_index] is not ":":
                        get_colon_index += 1
                    end_time = str(int(d[3][:get_colon_index]) +
                                   12) + d[3][get_colon_index:len(d[3]) -
                                              1] + ":00"
                else:
                    end_time = d[3][:len(d[3]) - 1] + ":00"

                event = {
                    'summary':
                    d[0],
                    'location':
                    None,
                    'description':
                    None,
                    'start': {
                        'dateTime':
                        dictionary_of_dates[dictionary_of_weekdays[days]] +
                        "T" + start_time,
                        'timeZone':
                        'America/Los_Angeles',
                    },
                    'end': {
                        'dateTime':
                        dictionary_of_dates[dictionary_of_weekdays[days]] +
                        "T" + end_time,
                        'timeZone':
                        'America/Los_Angeles',
                    },
                    'recurrence': [
                        'RRULE:FREQ=WEEKLY;UNTIL=20191220T235959Z'  #COUNT=1'
                    ],
                    'attendees': [
                        {
                            'email': '*****@*****.**'
                        },
                        {
                            'email': '*****@*****.**'
                        },
                    ],
                    'reminders': {
                        'useDefault':
                        False,
                        'overrides': [
                            {
                                'method': 'email',
                                'minutes': 24 * 60
                            },
                            {
                                'method': 'popup',
                                'minutes': 10
                            },
                        ],
                    },
                }

                event = service.events().insert(calendarId='primary',
                                                body=event).execute()
                print('Event created: %s' % event.get('htmlLink'))
Ejemplo n.º 5
0
import os
import pickle

from google.auth.transport.requests import Request
from google_auth_oauthlib.flow import InstalledAppFlow

SCOPES = [
    'https://www.googleapis.com/auth/spreadsheets',
    'https://www.googleapis.com/auth/calendar.readonly'
]
creds = None

# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
    with open('token.pickle', 'rb') as token:
        creds = pickle.load(token)

# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
    if creds and creds.expired and creds.refresh_token:
        creds.refresh(Request())
    else:
        flow = InstalledAppFlow.from_client_secrets_file(
            'credentials.json', SCOPES)
        creds = flow.run_local_server()
    # Save the credentials for the next run
    with open('token.pickle', 'wb') as token:
        pickle.dump(creds, token)
Ejemplo n.º 6
0
def make_iap_request(url, client_id, method='GET', **kwargs):
    """Makes a request to an application protected by Identity-Aware Proxy.
    Info: https://github.com/requests/requests/blob/master/requests/api.py"""

    # Set the default timeout, if missing
    if 'timeout' not in kwargs:
        kwargs['timeout'] = 90

    # Figure out what environment we're running in and get some preliminary
    # information about the service account.
    bootstrap_credentials, _ = google.auth.default(scopes=[IAM_SCOPE])
    if isinstance(bootstrap_credentials,
                  google.oauth2.credentials.Credentials):
        raise Exception('make_iap_request is only supported for service '
                        'accounts.')
    elif isinstance(bootstrap_credentials, google.auth.app_engine.Credentials):
        requests_toolbelt.adapters.appengine.monkeypatch()

    # For service account's using the Compute Engine metadata service,
    # service_account_email isn't available until refresh is called.
    bootstrap_credentials.refresh(Request())

    signer_email = bootstrap_credentials.service_account_email
    if isinstance(bootstrap_credentials,
                  google.auth.compute_engine.credentials.Credentials):
        signer = google.auth.iam.Signer(Request(), bootstrap_credentials,
                                        signer_email)
    else:
        # A Signer object can sign a JWT using the service account's key.
        signer = bootstrap_credentials.signer

    # Construct OAuth 2.0 service account credentials using the signer
    # and email acquired from the bootstrap credentials.
    credentials = google.oauth2.service_account.Credentials(
        signer,
        signer_email,
        token_uri=OAUTH_TOKEN_URI,
        additional_claims={'target_audience': client_id})

    # Obtain an OpenID Connect token, which is a JWT signed by Google.
    google_open_id_connect_token = get_google_open_id_connect_token(
        credentials)

    # Fetch the Identity-Aware Proxy-protected URL, including a Bearer token.
    resp = requests.request(
        method,
        url,
        headers={
            'Authorization': 'Bearer {}'.format(google_open_id_connect_token)
        },
        **kwargs)
    if resp.status_code == 403:
        raise Exception(
            'Service account {} does not have permission to '
            'access the IAP-protected application.'.format(signer_email))
    elif resp.status_code != 200:
        raise Exception(
            'Bad response from application: {!r} / {!r} / {!r}'.format(
                resp.status_code, resp.headers, resp.text))
    else:
        return resp.text
Ejemplo n.º 7
0
def main():
    p = Adafruit_Thermal()
    p.setDefault()
    p.setSize('L')
    p.println('Calendar')

    creds = None
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)

        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    now = datetime.utcnow().isoformat() + 'Z'
    timeMax = (datetime.utcnow() +
               timedelta(days=config.calendar_days)).isoformat() + 'Z'
    event_results = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          timeMax=timeMax,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = event_results.get('items', [])

    p.setSize('M')
    if not events:
        p.println("Nothing to do... chill out!")

    for event in events:
        # print(event)
        start_d_in = event['start'].get('date')
        start_dt_in = event['start'].get('dateTime')

        start_t_out = None
        if start_dt_in is not None:
            start_dt = dateutil.parser.parse(start_dt_in)
            start_d_out = start_dt.strftime(config.date_format)
            start_t_out = start_dt.strftime(config.time_format)
        else:
            start_d_out = dateutil.parser.parse(start_d_in).strftime(
                config.date_format)

        p.boldOn()
        p.underlineOn()
        p.justify('L')
        if start_t_out is not None:
            p.print(start_t_out)
            p.print(' ')

        p.println(start_d_out)
        p.boldOff()
        p.underlineOff()

        p.justify('R')
        p.println(event['summary'])

    p.setDefault()
    p.sleep()
Ejemplo n.º 8
0
def index():
    message = ''
    if request.method == 'POST':
        if request.form.get('sheet') == 'Process Google Sheet':

            """Shows basic usage of the Sheets API.
            Prints values from a sample spreadsheet.
            """
            creds = None
            # The file token.pickle stores the user's access and refresh tokens, and is
            # created automatically when the authorization flow completes for the first
            # time.
            if os.path.exists('sheet.pickle'):
                with open('sheet.pickle', 'rb') as token:
                    creds = pickle.load(token)
            # If there are no (valid) credentials available, let the user log in.
            if not creds or not creds.valid:
                if creds and creds.expired and creds.refresh_token:
                    creds.refresh(Request())
                else:
                    flow = InstalledAppFlow.from_client_secrets_file(
                        'credentials.json', SPREADSHEET_SCOPES)
                    creds = flow.run_local_server(port=0)
                # Save the credentials for the next run
                with open('sheet.pickle', 'wb') as token:
                    pickle.dump(creds, token)

            service = build('sheets', 'v4', credentials=creds)

            # Call the Sheets API
            sheet = service.spreadsheets()
            result = sheet.values().get(spreadsheetId=SPREADSHEET_ID,
                                        range=RAW_RANGE).execute()

            values = result.get('values', [])
            sheets, docs, columns = [], [], []

            if values:
                for row in range(len(values)):

                    rows = ''
                    for i in range(len(values[row])):
                        # If customer does not enter anything in the first field it will write "Customer" there in it's place.
                        if i == 0:
                            if not values[row][0]:
                                rows += 'Customer'
                            else:
                                rows += '%s' % values[row][i].strip()

                        else:
                            rows += '\n%s' % values[row][i].strip()

                    sheets.append([rows])

                    # TODO: You need convert list to numpy array and then reshape:

                    columns.append(rows)

                    if (row+1) % 3 == 0:
                        docs.append(columns)
                        columns = []

                if columns:
                    docs.append(columns)

            resource = {
                "majorDimension": "ROWS",
                "values": sheets
            }

            # Reset Sheet
            service.spreadsheets().values().clear(spreadsheetId=SPREADSHEET_ID,
                                                  range=RESULT_RANGE).execute()

            service.spreadsheets().values().update(
                spreadsheetId=SPREADSHEET_ID,
                range=RESULT_RANGE,
                body=resource,
                valueInputOption="USER_ENTERED"
            ).execute()

        # elif request.form.get('docs') == 'Process Google Docs':

            """Shows basic usage of the Docs API.
            Prints the title of a sample document.
            """
            creds = None
            # The file token.pickle stores the user's access and refresh tokens, and is
            # created automatically when the authorization flow completes for the first
            # time.
            if os.path.exists('docs.pickle'):
                with open('docs.pickle', 'rb') as token:
                    creds = pickle.load(token)
            # If there are no (valid) credentials available, let the user log in.
            if not creds or not creds.valid:
                if creds and creds.expired and creds.refresh_token:
                    creds.refresh(Request())
                else:
                    flow = InstalledAppFlow.from_client_secrets_file(
                        'credentials.json', DOCUMENT_SCOPES)
                    creds = flow.run_local_server(port=0)
                # Save the credentials for the next run
                with open('docs.pickle', 'wb') as token:
                    pickle.dump(creds, token)

            service = build('docs', 'v1', credentials=creds)

            resource = {
                "oauth2": creds,
                "documentId": DOCUMENT_ID
            }
            # You can see the retrieved values like this.
            document = gdoctableapp.GetTables(resource)

            # Reset Table
            if document['tables']:
                resource = {
                    "oauth2": creds,
                    "documentId": DOCUMENT_ID,
                    "tableIndex": 0
                }
                gdoctableapp.DeleteTable(resource)

            resource = {
                "oauth2": creds,
                "documentId": DOCUMENT_ID,
                "rows": len(docs),
                "columns": 3,
                "createIndex": 1,
                "values": docs
            }
            gdoctableapp.CreateTable(resource)
            message = 'Google Sheet and Docs Processed. Please see the links below'
        else:
            # pass # unknown
            return render_template("index.html")

    return render_template("index.html", message=message)
def main():
    downloaded = 0
    totalsize = 0
    deletedfiles = 0
    creds = None
    # CREDENTIALS AND LOGIN
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # REQUIRE LOGIN IF CREDENTIAL EXPIRES
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    # DOWNLOAD
    def download_file(service_download, file_id, local_fd):
        request = service_download.files().get_media(fileId=file_id)
        media_request = http.MediaIoBaseDownload(local_fd, request)
        while True:
            try:
                download_progress, done = media_request.next_chunk()
            except errors.HttpError as error:
                print('An error occurred: %s' % error)
                return
            if download_progress:
                print('Download Progress: %d%%' % int(download_progress.progress() * 100))
            if done:
                print('Download Complete')
                return

    # DELETE
    def delete_file(service_del, file_id):
        try:
            service.files().delete(fileId=file_id).execute()
        except errors.HttpError as error:
            print('An error occurred: %s' % error)

    # COUNT ALL FILES
    results_count = service.files().list(q=f"mimeType!='application/vnd.google-apps.folder' ",
                                   pageSize=ammount, fields="nextPageToken, files(id, name, size)").execute()
    counts = results_count.get('files', [])
    alltotalsize = 0
    for count in counts:
        countsize = int(count['size'])
        alltotalsize += countsize
    total = len(counts)

    print(f'{total} files found, {round(float(alltotalsize) / 1048576, 2)}MB')




    # LIST ALL FOLDERS
    folder_results = service.files().list(q="mimeType='application/vnd.google-apps.folder'",
        pageSize=ammount, fields="nextPageToken, files(id, name)").execute()
    folders = folder_results.get('files', [])

    if not folders:
        print('No folder found.')
    else:
        print('Folders:')
        for folder in folders:
            print(f"{folder['name']}")

            # LIST ALL FILES IN FOLDER
            results = service.files().list(q=f"mimeType!='application/vnd.google-apps.folder' andparents in '{folder['id']}' ", pageSize=ammount, fields="nextPageToken, files(id, name, size)").execute()
            items = results.get('files', [])
            if not items:
                print('------ No file found')
            else:
                path = f"{sync_path}\\{folder['name']}"
                if os.path.exists(path):
                    print('')
                else:
                    # print(f'Folder {path} doesnt exists, creating...\n')
                    os.mkdir(path)
                # print(path)

                #print('Files:\n')
                for item in items:
                    print(f"------ ID: {item['id']} | Filename: {item['name']}")
                    file = f"{path}\\{item['name']}"
                    pathfile = file

                    # DOWNLOAD ALL FILES
                    if os.path.exists(pathfile):
                        localfile = open(pathfile, 'r')
                        localfile.seek(0, 2)
                        localsize = int(localfile.tell())
                        remotesize = int(item['size'])

                        # IGNORE EXISTING FILES
                        print(f'Local file size: {localsize} bytes / Remote file size: {remotesize} bytes')
                        if localsize == remotesize:
                            print(
                                f"File {item['name']} already exists with same size, ignoring and deleting remote file...\n")
                            delete_file(service, item['id'])
                            deletedfiles += 1
                        else:
                            # DOWNLOAD INCOMPLETE FILES
                            if localsize == 0:
                                print(f"File {item['name']} already exists with different size, downloading...\n")
                                filedownload = open(pathfile, 'wb')
                                # print(f"Downloading {item['name']}...")
                                try:
                                    download_file(service, item['id'], filedownload)
                                    downloaded += 1
                                    print(f"Deleting {item['name']}...\n")
                                    delete_file(service, item['id'])
                                except:
                                    print('Erro ao baixar')
                    else:
                        filedownload = open(pathfile, 'wb')
                        # print(f"Downloading {item['name']}...")
                        try:
                            download_file(service, item['id'], filedownload)
                            print(f"Deleting {item['name']}...")
                            delete_file(service, item['id'])
                        except:
                            print('Error')

                        remotesize = int(item['size'])
                        downloaded += 1
                        totalsize += remotesize
                        print(f'{downloaded}/{total}')
                        percent = totalsize / alltotalsize * 100
                        print(f'Total: {round(float(totalsize) / 1048576, 2)}MB of {round(float(alltotalsize) / 1048576, 2)}MB downloaded ({round(float(percent), 2)}%)\n\n')

    totalsizeinmb = round(float(totalsize) / 1048576, 2)
    print(f'\nTotal files downloaded: {downloaded} ({totalsizeinmb}MB)')
Ejemplo n.º 10
0
 def __init__(self,
              SCOPES=None,
              token='drive',
              SCOPES_type='default',
              headless=True,
              apiver='v3',
              json_file=None):
     if SCOPES_type != 'default':
         SCOPES = get_scopes(SCOPES_type)
     if SCOPES != None:
         if os.path.exists(tk):
             try:
                 os.remove(tk)
             except:
                 pass
     else:
         SCOPES = ['https://www.googleapis.com/auth/drive']
     creds = None
     json_auth = False
     tk = os.path.join(credentials_dir, token)
     alt_json = os.path.join(credentials_dir, (token + ".json"))
     if os.path.exists(alt_json):
         credentials_json = alt_json
     else:
         credentials_json = os.path.join(credentials_dir,
                                         'credentials.json')
     if json_file != None:
         tk = json_file
     if os.path.exists(tk):
         try:
             with open(tk) as jfile:
                 test = json.load(jfile)
             json_auth = True
             apiver = 'v3'
         except:
             with open(tk, 'rb') as tok:
                 creds = pickle.load(tok)
     # If there are no (valid) credentials available, let the user log in.
     if json_auth == False:
         if not creds or not creds.valid:
             if creds and creds.expired and creds.refresh_token:
                 creds.refresh(Request())
             else:
                 flow = InstalledAppFlow.from_client_secrets_file(
                     credentials_json, SCOPES)
                 if headless == False:
                     creds = flow.run_local_server(port=0)
                 else:
                     creds = flow.run_console()
             # Save the credentials for the next run
             with open(tk, 'wb') as tok:
                 pickle.dump(creds, tok)
         self.drive_service = build('drive', apiver, credentials=creds)
         self.access_token = creds.token
     else:
         if os.path.exists(token):
             creds = ServiceAccountCredentials.from_json_keyfile_name(
                 token, scopes=SCOPES)
         elif os.path.exists(tk):
             creds = ServiceAccountCredentials.from_json_keyfile_name(
                 tk, scopes=SCOPES)
         self.drive_service = build('drive', apiver, credentials=creds)
         self.access_token = None
Ejemplo n.º 11
0
def creat_event_call(new_order):
    SCOPES = ['https://www.googleapis.com/auth/calendar']
    creds = None
    cal_id = None
    ciklorama = ''  #cal_id_1
    objects = ''  #cal_id_2
    if str(new_order['service']) == 'Аренда Циклорамы':
        cal_id = ciklorama
    elif str(new_order['service']) == 'Аренда Темного зала':
        cal_id = objects
    else:
        cal_id = ''  #cal_id_3

    if os.path.exists('token.json'):
        creds = Credentials.from_authorized_user_file('token.json', SCOPES)
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'client_secret.json', SCOPES)
            creds = flow.run_local_server(port=0)
        with open('token.json', 'w') as token:
            token.write(creds.to_json())
    service = build('calendar', 'v3', credentials=creds)
    if new_order['who_admin']:
        admin_name = str(new_order['who_admin'])
        admin_object = Administrators.objects.get(name=admin_name)
        admin_mail = admin_object.email
        attendees = {'email': admin_mail}
    else:
        admin_name = ''
        admin_mail = ''
        attendees = None
    start_order = str(
        new_order['date_ordering'].strftime("%Y-%m-%dT%H:%M:%S+03:00"))
    raw_finish_order = new_order['date_ordering'] + timedelta(
        hours=new_order['order'])
    finish_order = str(raw_finish_order.strftime("%Y-%m-%dT%H:%M:%S+03:00"))

    event_info = {
        'start': {
            'dateTime': start_order,
        },
        'end': {
            'dateTime': finish_order,
        },
        'attendees': [
            attendees,
        ],
        'title':
        'Аренда',
        "summary":
        "Аренда  " + str(new_order['tel']),  # Title of the event.
        'description':
        "Клиент: " + str(new_order['tel']) + ' грн\nПредоплата: ' +
        str(new_order['prepay']) + '\nАдминистратор: ' + admin_name,
    }
    event_add = service.events().insert(calendarId=cal_id,
                                        body=event_info).execute()
    htmlLink = 'https://www.googleapis.com/calendar/v3/calendars/' + cal_id + '/events'
    print('Event created: %s' % (event_add.get('htmlLink')))
Ejemplo n.º 12
0
def main():
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    readschedule()
    print(readschedule())

    yearmon = readschedule()[0]
    a = yearmon.split(".")
    year = int(a[0])
    mon = int(a[1])

    if mon == 1 or mon == 3 or mon == 5 or mon == 7 or mon == 8 or mon == 10 or mon == 12:
        num_days = 31
    elif mon == 2:
        num_days = 28
    else:
        num_days = 30

    for i in readschedule():
        s = i.split(' ')
        if (len(s) == 1): continue

        d_s = int(s[0])
        d_e = int(s[0])
        m_s = mon
        m_e = mon
        y_s = year
        y_e = year

        if (mon == 12 and d_e == 31):
            y_e = year + 1

        if (num_days == d_e):
            d_e = 1
            if mon == 12: m_e = 1
            else: m_e = m_e + 1

        event = {
            'summary': '{}'.format(s[1]),
            'location': 'unchi',
            'description': '{}'.format(s[1]),
            'start': {
                'date': '{}-{}-{}'.format(y_s, m_s, d_s),
                'timeZone': 'Japan',
            },
            'end': {
                'date': '{}-{}-{}'.format(y_e, m_e, d_e),
                'timeZone': 'Japan',
            },
        }
        event = service.events().insert(
            calendarId='*****@*****.**', body=event).execute()
        print(event['id'])
def main():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'D:\\Users\\s84908\\OneDrive - The Siam Commercial Bank PCL\\Working\\google_calendar_api\\credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    # service = build('calendar', 'v3', credentials=creds)
    # Call the Calendar API
    # now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
    # print('Getting the upcoming 10 events')
    # events_result = service.events().list(calendarId='primary', timeMin=now,
    #                                     maxResults=10, singleEvents=True,
    #                                     orderBy='startTime').execute()
    # events = events_result.get('items', [])

    # if not events:
    #     print('No upcoming events found.')
    # for event in events:
    #     start = event['start'].get('dateTime', event['start'].get('date'))
    #     print(start, event['summary'])

    service = build('calendar', 'v3', credentials=creds)
    event = {
        'summary': 'Deploy UAT 2',
        'start': {
            'date': '2019-12-18',
            'timeZone': 'Asia/Bangkok',
        },
        'end': {
            'date': '2019-12-19',
            'timeZone': 'Asia/Bangkok',
        },
        'reminders': {
            'useDefault': False,
            'overrides': [
                {'methods': 'popup', 'minutes': 840},
            ],
        },
    }
    #print (event)
     
    #print ('Event created: %s' % (event.get('htmlLink')))
    from googleapiclient.errors import HttpError
    help(HttpError)
    try:
        event = service.events().insert(calendarId='primary', body=event).execute()
    except HttpError as err:
        print (err.content)
    finally:
        print ('Event created: %s' % (event.get('htmlLink')))
Ejemplo n.º 14
0
def main():
    """Shows basic usage of the Drive v3 API.
    Prints the names and ids of the first 10 files the user has access to.
    """
    creds = None

    dir_path = os.path.dirname(os.path.realpath(__file__)) + "/"

    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists(dir_path + 'token.pickle'):
        with open(dir_path + 'token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                dir_path + 'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open(dir_path + 'token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    # Create the directory if it doesn't exist
    file_dir = "%s/Documents/Takeout" % os.environ['HOME']

    if not os.path.exists(file_dir):
        os.makedirs(file_dir)

    # Call the Drive v3 API
    page_token = None
    while True:
        response = service.files().list(
            q="name contains 'takeout' and trashed = false and mimeType contains 'zip'",
            spaces='drive',
            fields='nextPageToken, files(id, name)',
            pageToken=page_token).execute()
        for file in response.get('files', []):
            # Process change
            file_id = file.get('id')
            file_name = file.get('name')

            print('Found file: %s (%s)' % (file_name, file_id))

            file_path = "%s/%s" % (file_dir, file_name)

            if not os.path.exists(file_path):
                file_count = len([name for name in os.listdir(
                    file_dir) if os.path.isfile(os.path.join(file_dir, name))])
                if file_count > 0:
                    os.system(
                        """osascript -e 'display notification "You need to take care of this" with title "No room for new Google Drive backup"'""")
                    break

                request = service.files().get_media(fileId=file_id)

                fh = io.FileIO(file_path, 'wb')
                downloader = MediaIoBaseDownload(fh, request)
                done = False
                while done is False:
                    status, done = downloader.next_chunk()
                    print("Download and wrote %d%%." %
                          int(status.progress() * 100))

                    os.system(
                        """osascript -e 'display notification "You are good to go" with title "Data pulled from Google Drive"'""")

            else:
                print('File already exists: %s (%s)' % (file_name, file_id))

            service.files().update(
                fileId=file_id, body={
                    'trashed': True}).execute()

            print('Trashed file: %s (%s)' % (file_name, file_id))

        page_token = response.get('nextPageToken', None)
        if page_token is None:
            break
Ejemplo n.º 15
0
def main():
    """Shows basic usage of the Drive v3 API.
    Prints the names and ids of the first 10 files the user has access to.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server()
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    # # Call the Drive v3 API
    # results = service.files().list(
    #     pageSize=1000, fields="nextPageToken, files(id, name)").execute()
    # items = results.get('files', [])
    # all_files = []
    # if not items:
    #     print('No files found.')
    # else:
    #     print('Files:')
    #     for item in items:
    #         print(u'{0} ({1})'.format(item['name'], item['id']))
    #         all_files.append((item['name'], item['id']))

    # return all_files
    page_token = None
    all_files = []
    while True:
        response = service.files().list(
            pageSize=1000,
            q="'184b0EFkuuI1nbeXQZACxQEcJlwzzPyvh' in parents",
            pageToken=page_token,
            fields="nextPageToken, files(id, name)").execute()
        items = response.get('files', [])
        if not items:
            print('No files found')
        else:
            for item in items:
                # print('Found file: %s (%s)' % (item['name'], item['id']))
                all_files.append((item['name'], item['id']))
            all_files = list(set(all_files))
            print(len(all_files))
            page_token = response.get('nextPageToken', None)
            print('next page token', page_token)
        if page_token is None:
            break
    return all_files
Ejemplo n.º 16
0
def reminder(date, slot):
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    event = {
        'summary': 'appointment with doctor',
        'location': '',
        'description': 'visit doctor and get yourself checked',
        'start': {
            'dateTime': '2020-05-11T09:00:30-00:30',
            'timeZone': 'America/Los_Angeles',
        },
        'end': {
            'dateTime': '2020-05-11T09:00:30-00:30',
            'timeZone': 'America/Los_Angeles',
        },
        'recurrence': [''],
        'attendees': [{
            'email': '*****@*****.**'
        }],
        'reminders': {
            'useDefault':
            False,
            'overrides': [
                {
                    'method': 'email',
                    'minutes': 24 * 60
                },
                {
                    'method': 'popup',
                    'minutes': 10
                },
            ],
        },
    }

    try:
        event = service.events().insert(calendarId='primary',
                                        body=event).execute()
        print('Event created: %s' % (event.get('htmlLink')))
        return "event created"
    except:
        return "something went wrong"
Ejemplo n.º 17
0
def default_google_access_token():
    # get token for google-based auth use, assumes application default credentials work for specified environment
    credentials, _ = google.auth.default(scopes=['openid', 'email', 'profile'])
    credentials.refresh(Request())

    return credentials.token
Ejemplo n.º 18
0
 def refresh_token(self):
     if self.credentials.expired:
         self.credentials.refresh(Request())
         _l.debug('token refreshed')
Ejemplo n.º 19
0
def create_calendar(data):
    # AUTHENTICATION
    SCOPES = ['https://www.googleapis.com/auth/calendar']

    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.

    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)

    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server()
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # CREATE CALENDAR
    calendar = {
        'summary': settings['GOOGLE-CalendarName'],
        'timeZone': 'Asia/Manila'
    }

    created_calendar = service.calendars().insert(body=calendar).execute()
    print("CREATED CALENDAR:", created_calendar['id'])
    for i in data:
        print(i)
        start_time = (datetime.datetime.strptime(
            DAYS_OF_THE_WEEK[i['day']][1], '%Y-%m-%dT%H:%M:%S%z').replace(
                hour=int(i['start-time'][0]),
                minute=int(i['start-time'][1]),
                second=int(i['start-time'][2]))).isoformat()
        end_time = (datetime.datetime.strptime(
            DAYS_OF_THE_WEEK[i['day']][1], '%Y-%m-%dT%H:%M:%S%z').replace(
                hour=int(i['end-time'][0]),
                minute=int(i['end-time'][1]),
                second=int(i['end-time'][2]))).isoformat()
        try:
            event = {
                'summary':
                i['description'],
                'description':
                f"{i['subject_code']} ",  #@{i['room']}
                # 'location': i['location'],
                'start': {
                    'dateTime': start_time,
                    'timeZone': settings['GOOGLE-TimeZone'],
                },
                'end': {
                    'dateTime': end_time,
                    'timeZone': settings['GOOGLE-TimeZone'],
                },
                'reminders': {
                    'useDefault': False,
                    'overrides': [
                        {
                            'method': 'popup',
                            'minutes': 60
                        },
                    ],
                },
                'recurrence': [
                    'RRULE:FREQ=WEEKLY;UNTIL=%s' % (settings['SIS-SemEnd']),
                ]
            }
            event = service.events().insert(calendarId=created_calendar['id'],
                                            body=event).execute()
            print('Event created: %s' % (event.get('htmlLink')))
        except Exception as e:
            service.calendars().delete(
                calendarId=created_calendar['id']).execute()
            raise e
    def __init__(self, mongo, logger):
        """ METHOD SETS ATTRIBUTES AND CONNECTS TO GMAIL API

        Args:
            mongo ([object]): MONGODB OBJECT
            logger ([object]): LOGGER OBJECT
        """

        self.SCOPES = ["https://mail.google.com/"]

        self.logger = logger

        self.creds = None

        self.service = None

        self.users = mongo.users

        self.emails = mongo.emails

        self.ids_to_delete = []

        self.token_file = f"{THIS_FOLDER}/creds/token.json"

        self.creds_file = f"{THIS_FOLDER}/creds/credentials.json"

        try:

            self.logger.INFO("CONNECTING TO GMAIL...")

            if os.path.exists(self.token_file):

                with open(self.token_file, 'r') as token:

                    self.creds = Credentials.from_authorized_user_file(
                        self.token_file, self.SCOPES)

            # If there are no (valid) credentials available, let the user log in.
            if not self.creds or not self.creds.valid:

                if self.creds and self.creds.expired and self.creds.refresh_token:

                    self.creds.refresh(Request())

                else:

                    flow = InstalledAppFlow.from_client_secrets_file(
                        self.creds_file, self.SCOPES)

                    self.creds = flow.run_local_server(port=0)

                # Save the credentials for the next run
                with open(self.token_file, 'w') as token:

                    token.write(self.creds.to_json())

            self.service = build('gmail', 'v1', credentials=self.creds)

            self.logger.INFO("CONNECTED TO GMAIL!\n")

        except Exception as e:
            print(e)
            self.logger.CRITICAL("FAILED TO CONNECT TO GMAIL!\n")
def createMeeting(group_name, emails, date, startTime, endTime):
    #group_name will be a string
    #emails will be a list of strings
    #date will be a string in yyyy-mm-dd format
    #starttime will be in hh:mm:ss
    #timezone will awlays be eastern

    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('virtualstuddybuddy/token.pickle'):
        with open('virtualstuddybuddy/token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'virtualstuddybuddy/oldCredentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('virtualstuddybuddy/token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    event = {
        'summary': group_name + " meeting",
        'attendees': [{
            'email': str(e)
        } for e in emails],
        'start': {
            'dateTime': str(date) + "T" + str(startTime),
            'timeZone': 'America/New_York'
        },
        'end': {
            'dateTime': str(date) + "T" + str(endTime),
            'timeZone': 'America/New_York'
        },
        'conferenceData': {
            'createRequest': {
                "conferenceSolutionKey": {
                    "type": "hangoutsMeet"
                },
                "requestId": group_name,
            }
        }
    }

    event = service.events().insert(calendarId='primary',
                                    sendUpdates="all",
                                    body=event,
                                    conferenceDataVersion=1).execute()


# g = "vsb test"
# emails = ["*****@*****.**"]#, "*****@*****.**","*****@*****.**",]
# date = "2020-11-24"
# startTime = "10:00:00"
# endTime = "14:00:00"
# createMeeting(g, emails, date, startTime, endTime)
from google.auth.transport.requests import Request
from oauth2client.service_account import ServiceAccountCredentials

# Google Sheets Auth
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
TOKEN_FILE = 'token.pickle'
CREDENTIAL_FILE = 'credentials.json'
credentials = None

if os.path.exists(TOKEN_FILE):
    with open(TOKEN_FILE, 'rb') as token:
        credentials = pickle.load(token)

if not credentials or not credentials.valid:
    if credentials and credentials.expired and credentials.refresh_token:
        credentials.refresh(Request())
    else:
        flow = InstalledAppFlow.from_client_secrets_file(
            CREDENTIAL_FILE, SCOPES)
        credentials = flow.run_local_server(port=10800)
    # Save the credentials for the next run
    with open(TOKEN_FILE, 'wb') as token:
        pickle.dump(credentials, token)

service = discovery.build('sheets', 'v4', credentials=credentials)

#########################################################################################################


def exportreport(AdvertiserName, AdvertiserID, spreadsheet_ID):
    print("Checking for open sales")
Ejemplo n.º 23
0
def check_deploy_status(args):
    logging.info("check deployment status")
    # Figure out what environment we're running in and get some preliminary
    # information about the service account.
    credentials, _ = google.auth.default(scopes=[IAM_SCOPE])
    if isinstance(credentials, google.oauth2.credentials.Credentials):
        raise Exception('make_iap_request is only supported for service '
                        'accounts.')

    # For service account's using the Compute Engine metadata service,
    # service_account_email isn't available until refresh is called.
    credentials.refresh(Request())

    signer_email = credentials.service_account_email
    if isinstance(credentials,
                  google.auth.compute_engine.credentials.Credentials):
        signer = google.auth.iam.Signer(Request(), credentials, signer_email)
    else:
        # A Signer object can sign a JWT using the service account's key.
        signer = credentials.signer

    # Construct OAuth 2.0 service account credentials using the signer
    # and email acquired from the bootstrap credentials.
    service_account_credentials = google.oauth2.service_account.Credentials(
        signer,
        signer_email,
        token_uri=OAUTH_TOKEN_URI,
        additional_claims={'target_audience': may_get_env_var("CLIENT_ID")})

    google_open_id_connect_token = get_google_open_id_connect_token(
        service_account_credentials)
    # Wait up to 30 minutes for IAP access test.
    retry_credit = 180
    status_code = 0
    while retry_credit > 0:
        retry_credit -= 1
        sleep(10)
        try:
            resp = requests.request(
                METHOD,
                "https://%s.endpoints.%s.cloud.goog" %
                (args.deployment, args.project),
                headers={
                    'Authorization':
                    'Bearer {}'.format(google_open_id_connect_token)
                })
            status_code = resp.status_code
            if resp.status_code == 200:
                break
        except Exception:
            logging.info("IAP not ready, exception caught, retry credit: %s" %
                         retry_credit)
            continue
        logging.info("IAP not ready, retry credit: %s" % retry_credit)

    if status_code != 200:
        raise RuntimeError(
            "IAP endpoint not ready after 30 minutes, time out...")
    else:
        # Optionally upload ssl cert
        if os.listdir(SSL_DIR) == []:
            for sec in ["envoy-ingress-tls", "letsencrypt-prod-secret"]:
                os.system("kubectl get secret %s -n kubeflow -o yaml > %s" %
                          (sec, os.path.join(SSL_DIR, sec + ".yaml")))
            os.system("gsutil cp %s/* gs://%s/%s/" %
                      (SSL_DIR, SSL_BUCKET, args.cert_group))
Ejemplo n.º 24
0
def main():
    """Shows basic usage of the Admin SDK Directory API.
    Prints the emails and names of the first 10 users in the domain.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    # delete unused users
    service = build('admin', 'directory_v1', credentials=creds)

    page_token = None
    params = {'customer': 'my_customer'}

    page = 0
    users_total = 0
    while True:
        try:
            if page_token:
                params['pageToken'] = page_token
            current_page = service.users().list(**params).execute()

            users = current_page.get('users', [])
            if not users:
                print('No users in the domain.')
                break
            else:
                users_total = users_total + len(users)
                print('Users page: ', page)
                for user in users:
                    last_login_time = datetime.strptime(user['lastLoginTime'], '%Y-%m-%dT%H:%M:%S.%fz')
                    # here go the date
                    if last_login_time < datetime(2016, 1, 1):
                        print('delete mail')
                        print(user['primaryEmail'])
                        service.users().delete(userKey=user['id']).execute()

            page_token = current_page.get('nextPageToken')
            page = page + 1

            if not page_token:
                break

        except:
            print('errors')
            break

    print(users_total)
Ejemplo n.º 25
0
Archivo: 1.py Proyecto: yl980106/test
def serviceaccountfactory(credentials='credentials.json',
                          token='token.pickle',
                          path=None,
                          list_projects=False,
                          list_sas=None,
                          create_projects=None,
                          max_projects=12,
                          enable_services=None,
                          services=['iam', 'drive'],
                          create_sas=None,
                          delete_sas=None,
                          download_keys=None):
    selected_projects = []
    proj_id = loads(open(credentials, 'r').read())['installed']['project_id']
    creds = None
    if os.path.exists(token):
        with open(token, 'rb') as t:
            creds = pickle.load(t)

    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                credentials, SCOPES)
            creds = flow.run_local_server(port=0)
        with open(token, 'wb') as t:
            pickle.dump(creds, t)

    cloud = build('cloudresourcemanager', 'v1', credentials=creds)
    iam = build('iam', 'v1', credentials=creds)
    serviceusage = build('serviceusage', 'v1', credentials=creds)

    projs = None
    while projs == None:
        try:
            projs = _get_projects(cloud)
        except HttpError as e:
            if loads(e.content.decode(
                    'utf-8'))['error']['status'] == 'PERMISSION_DENIED':
                try:
                    serviceusage.services().enable(
                        name=
                        'projects/%s/services/cloudresourcemanager.googleapis.com'
                        % proj_id).execute()
                except HttpError as e:
                    print(e._get_reason())
                    input('Press Enter to retry.')
    if list_projects:
        return _get_projects(cloud)
    if list_sas:
        return _list_sas(iam, list_sas)
    if create_projects:
        if create_projects > 0:
            current_count = len(_get_projects(cloud))
            if current_count + create_projects < max_projects:
                print('Creating %d projects' % create_projects)
                nprjs = _create_projects(cloud, create_projects)
                selected_projects = nprjs
            else:
                print('%d projects already exist!' % current_count)
        else:
            print('Please specify a number larger than 0.')
    if enable_services:
        ste = []
        ste.append(enable_services)
        if enable_services == '~':
            ste = selected_projects
        elif enable_services == '*':
            ste = _get_projects(cloud)
        services = [i + '.googleapis.com' for i in services]
        print('Enabling services')
        _enable_services(serviceusage, ste, services)
    if create_sas:
        stc = []
        stc.append(create_sas)
        if create_sas == '~':
            stc = selected_projects
        elif create_sas == '*':
            stc = _get_projects(cloud)
        for i in stc:
            _create_remaining_accounts(iam, i)
    if download_keys:
        try:
            os.mkdir(path)
        except FileExistsError:
            pass
        std = []
        std.append(download_keys)
        if download_keys == '~':
            std = selected_projects
        elif download_keys == '*':
            std = _get_projects(cloud)
        _create_sa_keys(iam, std, path)
    if delete_sas:
        std = []
        std.append(delete_sas)
        if delete_sas == '~':
            std = selected_projects
        elif delete_sas == '*':
            std = _get_projects(cloud)
        for i in std:
            print('Deleting service accounts in %s' % i)
            _delete_sas(iam, i)
Ejemplo n.º 26
0
def main():
    # A quick check to see if the token already exists.
    if (not (path.exists("token.pickle"))):
        tkinter.messagebox.showinfo( "Excel to Google Event", "You will be prompted to login & give permission to Google Cal")
    
    #This is taken directly from the Google API Quickstart guide
    """Shows basic usage of the Google Calendar API.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)


    
    #Here the service is built with credentials & we can move on to creating the event
    service = build('calendar', 'v3', credentials=creds)

    #Build gspread
    client = gspread.authorize(creds)

    #Open gspread
    gsheet = client.open("Nexus Recording Schedule - Master")
    gworksheet = gsheet.worksheet("2-Schedule Recording-Instructional Day")

    #Init & print list of Cals
    calHolder = []
    page_token = None
    while True:
      calendar_list = service.calendarList().list(pageToken=page_token).execute()
      for calendar_list_entry in calendar_list['items']:
        print (calendar_list_entry['summary'])
        calHolder.append({"in": calendar_list_entry['summary'], "cal_id":calendar_list_entry['id']})
      page_token = calendar_list.get('nextPageToken')
      if not page_token:
        break

    #Append to single string in order to display in msgbox
    cal_msg = "Please match the numbers for the Calendars for each location, if no match exists enter \'-1\' " + '\n' +  "Calanders on your account: " + '\n'
    cal_msg2 = '\n'
    index = 0
    for dicts in calHolder:
        msg = '\n' + ' [ ' + str(index) + ' ]:   ' + dicts["in"] + '          '
        cal_msg2 += msg
        index += 1

    #Adding on sheets service
    sheets_service = build('sheets', 'v4', credentials=creds)



    
    #Prompt user for selection via messagebox
    cal_msg2 += '\n'
    print(cal_msg)
    print(cal_msg2)
    USER_INP = -1

    #Yes this is janky/bad code, but I mean cmon
    sheet3 = sheets_service.spreadsheets()
    result3 = sheet3.values().get(spreadsheetId=SPREADSHEET_ID, range=INSTRUCTORS_SHEET_RANGE).execute()
    values3 = result3.get('values', [])
    for row in values3:
        if (len(row) >= 5):
            print(row[4])
        if (len(row) >= 16):
            print("    MGR: " + row[15])
        if (len(row) >= 17):
            print("    IPS: " + row[16])
        if (len(row) >= 18):
            print("    MA: " + row[17])
        if (len(row) >= 19):
            print("    Other: " + row[18])
        if (len(row) >= 21):
            print("         " + row[19] + ": " + row[20])
        if (len(row) >= 22):
            print("    CCBK: " + row[21])
        if (len(row) >= 23):
            print("    o-CC: " + row[22])
        if (len(row) >= 25):
            print("    nc-mary: " + row[24])
        if (len(row) >= 26):
            print("    nc-dru: " + row[25])
        if (len(row) >= 27):
            print("    nc-paul: " + row[26])

    dict_of_locations = {}
    if path.exists("calconfig.pickle"):
        with open('calconfig.pickle', 'rb') as handle:
            dict_of_locations = pickle.load(handle)
    else:
        if not values3:
            print('No data found.')
        else:
            for row in values3:
                if (len(row) >= 5):
                    if row[4]:
                        dict_of_locations[row[4]] = -1
        with open('calconfig.pickle', 'wb') as handle:
            pickle.dump(dict_of_locations, handle, protocol=pickle.HIGHEST_PROTOCOL)
    
    list_of_variables = []

    master = Tk()
    def deleter():
        sys.exit(1)
    master.protocol("WM_DELETE_WINDOW", deleter)

    def callback():
        for i, location in enumerate(dict_of_locations):
            print(list_of_variables[i].get())
            dict_of_locations[location] = list_of_variables[i].get()
        master.destroy()
        master.quit()
        print(dict_of_locations)
        with open('calconfig.pickle', 'wb') as handle:
            pickle.dump(dict_of_locations, handle, protocol=pickle.HIGHEST_PROTOCOL)

    
    for i in range(len(dict_of_locations)):
        list_of_variables.append(IntVar(master))
        
    tk.Label(master, text=cal_msg, padx = 10, pady = 5, anchor = 'center').grid(row=0)
    tk.Label(master, text=cal_msg2, padx = 10, pady = 5, justify = 'left').grid(row=1)
    endrow = 0
    for i, location in enumerate(dict_of_locations):
        tk.Label(master, text=location, padx = 10, pady = 5).grid(row=i+2)
        ee = tk.Entry(master, textvariable=list_of_variables[i])
        ee.delete(0, END)
        ee.insert(0, dict_of_locations[location])
        ee.grid(row = i+2, column = 1)
        endrow = i+2

    endrow += 2

    
    b = Button(master, text="Submit", width=10, command=callback)
    b.grid(row=endrow+2, column=0)
    master.mainloop()

    #Convert indexes to CAL ID
    for i, location in enumerate(dict_of_locations):
        for j, cal in enumerate(calHolder):
            if (dict_of_locations[location] == j):
                dict_of_locations[location] = cal["cal_id"]
                
    print(dict_of_locations)



    #Get what kind of method to select events
    search_method = 0
    window = Tk()
    def deleter():
        sys.exit(1)
    window.protocol("WM_DELETE_WINDOW", deleter)
    v = IntVar(window)
    v.set(0)

    def ShowChoice():
        print(v.get())
        if (v.get() == 0):
            sys.exit(1)
        search_method = v.get()
        window.destroy()
        window.quit()

    tk.Label(window, 
             text="""Choose method for selecting events:""",
             padx = 20, pady = 5).pack()
    tk.Radiobutton(window, 
                  text="By Date RANGE (MM/DD/YYYY)",
                  indicatoron = 0,
                  width = 20,
                  padx = 20, 
                  variable=v, 
                  command=ShowChoice,
                  value=1).pack()
    tk.Radiobutton(window, 
                  text="By Row In RANGE ex: (1-9999)",
                  indicatoron = 0,
                  width = 20,
                  padx = 20, 
                  variable=v, 
                  command=ShowChoice,
                  value=2).pack()
    tk.Radiobutton(window, 
                  text="By Row in LIST ex: (64, 65, 77, 81)",
                  indicatoron = 0,
                  width = 20,
                  padx = 20, 
                  variable=v, 
                  command=ShowChoice,
                  value=3).pack()
        
    window.mainloop()

    search_method = v.get()
    print(search_method)
    #sys.exit(1)

    # Call the Sheets API
    sheet = sheets_service.spreadsheets()
    result = sheet.values().get(spreadsheetId=SPREADSHEET_ID,
                                range=SAMPLE_RANGE_NAME).execute()
    values = result.get('values', [])
    print (len(values))

    #Prompt & convert date range
    search = []
    search_indexes = []
    if search_method == 1:
        START_DATE = simpledialog.askstring(title="Date From (inclusive)", prompt="Enter the start of the date range (MM/DD/YYYY)" )
        RANGE_START = datetime.datetime.strptime(START_DATE, '%m/%d/%Y')
        END_DATE = simpledialog.askstring(title="Date Until (inclusive)", prompt="Enter the end of the date range (MM/DD/YYYY)" )
        RANGE_END = datetime.datetime.strptime(END_DATE, '%m/%d/%Y')
    if search_method == 2:
        START_ROW = simpledialog.askinteger(title="First Row (Inclusive):", prompt="Enter the first row:" )
        END_ROW = simpledialog.askinteger(title="Last Row (Inclusive):", prompt="Enter the Last row:" )
        if (START_ROW > END_ROW):
            print("startstop error 1")
            sys.exit(1)
    if search_method == 3:
        USER_LIST = simpledialog.askstring(title="Enter List of Rows:", prompt="Enter list of rows seperated by Commas. Ex: (16, 22, 2, 1999)" )
        ROW_LIST = USER_LIST.split(",")

    #Search for valid entries within range
    s_index = 0
    if not values:
        print('No data found.')
    else:
        for row in values:
            print(row)
            if not (len(row) > 26):
                continue
            if search_method == 1:
                TEST_DATE = datetime.datetime.strptime(row[0], '%m/%d/%Y')
                if (RANGE_START <= TEST_DATE <= RANGE_END):
                    search.append(row)
                    search_indexes.append(s_index)
                    print('0 ' + row[0] + ' 1 ' + row[1] + ' 4 ' + row[4] + ' 5 ' + row[5] + ' 6 ' + row[6] + ' 7 ' + row[7] + ' 8 ' + row[8] + ' 9 ' + row[9] + ' 10 ' + row[10] + ' 11 ' + row[11] + ' 12 ' + row[12] + ' 13 ' + row[13] + ' 18 ' + row[18] + ' 19 ' + row[19] + ' 25 ' + row[25] + ' 26 ' + row[26])
            if search_method == 2:
                if (START_ROW <= int(row[26]) <= END_ROW):
                    search.append(row)
                    search_indexes.append(s_index)
                    print('0 ' + row[0] + ' 1 ' + row[1] + ' 4 ' + row[4] + ' 5 ' + row[5] + ' 6 ' + row[6] + ' 7 ' + row[7] + ' 8 ' + row[8] + ' 9 ' + row[9] + ' 10 ' + row[10] + ' 11 ' + row[11] + ' 12 ' + row[12] + ' 13 ' + row[13] + ' 18 ' + row[18]  + ' 19 ' + row[19] + ' 25 ' + row[25] + ' 26 ' + row[26])
            if search_method == 3:
                for rowval in ROW_LIST:
                    if (int(rowval) == int(row[26])):
                        search.append(row)
                        search_indexes.append(s_index)
                        print('0 ' + row[0] + ' 1 ' + row[1] + ' 4 ' + row[4] + ' 5 ' + row[5] + ' 6 ' + row[6] + ' 7 ' + row[7] + ' 8 ' + row[8] + ' 9 ' + row[9] + ' 10 ' + row[10] + ' 11 ' + row[11] + ' 12 ' + row[12] + ' 13 ' + row[13] + ' 18 ' + row[18]  + ' 19 ' + row[19] + ' 25 ' + row[25] + ' 26 ' + row[26])
            s_index += 1

    #Read in instructor emails
    inst_to_email = {}
    for row in values3:
        if (len(row) >= 2):
            if row[1]:
                print('0: ' + row[0] + " 20: " + row[1])
                inst_to_email[row[0]] = row[1]
            else:
                print('0: ' + row[0] + " 20: [email protected]")
                inst_to_email[row[0]] = "*****@*****.**"
        else:
            print('0: ' + row[0] + " 20: [email protected]")
            inst_to_email[row[0]] = "*****@*****.**"

    #Read in staff emails
    staff_to_email = {}
    for row in values3:
        if (len(row) >= 21):
            if row[20]:
                print('0: ' + row[19] + " 1: " + row[20])
                staff_to_email[row[19]] = row[20]
            else:
                print('0: ' + row[19] + " 1: [email protected]")
                staff_to_email[row[19]] = "*****@*****.**"
        else:
            if (len(row) >= 20):
                if row[19]:
                    print('0: ' + row[19] + " 1: [email protected]")
                    staff_to_email[row[19]] = "*****@*****.**"

    #Setup list of events for printing
    event_printlist = []
    event_skiplist = []

    #Begin creating & sending events
    s_index = 0
    for row in search:
        #skip if the event was already made
        if row[25] != 'N':
            event_skiplist.append("Row " + row[26] + ":  " + row[10] + " " + row[0] + " was already marked as: " + row[25])
            print("skipped " + row[10] + " " + row[0])
            continue
        #Convert location to CAL ID
        if dict_of_locations[row[1]] == -1:
            event_skiplist.append("Row " + row[26] + ":  "  + row[10] + " " + row[0] + " Calendar for location " + row[1] + " was not found.")
            print("skipped " + row[10] + " " + row[0] + " No Cal")
            continue
        
        gworksheet.update_cell(int(row[26]), 26, "Y")
        s_index += 1
        
        #Get Title/Summary
        summary_in = (row[10] + " - " + row[9])
        if row[11]:
            summary_in += (" - " + row[11] + " MGR ")
        if row[12]:
            summary_in += (" - " + row[12] + " IPS ")
        if row[13]:
            summary_in += (" - " + row[13] + " MA ")
        if row[14]:
            summary_in += (" - " + row[14] + " Backup ")

        #Get Location
        loc_in = (row[1])
        if (row[1] == "Chrysler Studio"):
            loc_in = "Chrysler Studio 109 B"
        #Get Desc
        desc_in = (row[10])

        #Get Start Time and Date
        start_dts = row[0] + ' ' + row[5]

        #Get End Time and Date
        end_dts = row[0] + ' ' + row[6]

        #Date & timestamp stuff is janky because the JSON object "event" wants RCF formatted time,
        #whereas the Excel file could have any kind of time input, so using strptime with concacted strings is probably the most
        #flexible approach for now
        dto_start = datetime.datetime.strptime(start_dts, '%m/%d/%Y %I:%M %p')
        dto_end = datetime.datetime.strptime(end_dts, '%m/%d/%Y %I:%M %p')

        #Get Attendees 
        #List of attendees is a "list of dicts" which is the input the JSON object "event" wants
        instructor = inst_to_email[row[9]]
        print(instructor)

        #Staff
        list_of_emailed_names = []
        list_of_emailed_names.append(row[9])
        list_of_attendees = [
            {'email': instructor}
            ]
        if row[11]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[11]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[11])
                print(their_email)
        if row[12]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[12]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[12])
                print(their_email)
        if row[13]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[13]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[13])
                print(their_email)
        if row[14]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[14]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[14])
                print(their_email)

        #Credit/Noncredit list(?) WIP
        print(row[18])
        if (row[18] == "Credit"):
            print("It's a Credit Course. ")
            for roww in values3:
                if (len(roww) >= 23):
                    if (roww[22]):                        
                        if not (roww[22] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[22])
                            their_email = staff_to_email[roww[22]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})

        else:
            print("It's a Non-Credit Course. ")
            if (row[11] == "Mary Lynn"):
                for roww in values3:
                    if (len(roww) >= 25):
                        if not (roww[24] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[24])
                            their_email = staff_to_email[roww[24]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})
            if (row[11] == "Dru"):
                for roww in values3:
                    if (len(roww) >= 26):
                        if not (roww[25] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[25])
                            their_email = staff_to_email[roww[25]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})
            if (row[11] == "Paul"):
                    if (len(roww) >= 27):
                        if not (roww[26] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[26])
                            their_email = staff_to_email[roww[26]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})



        #The actual JSON style event object, time zone is static just because not really necessary 
        event = {
          'summary': summary_in,
          'location': loc_in,
          'description': desc_in,
          'start': {
            'dateTime': dto_start.isoformat("T"),
            'timeZone': 'US/Eastern',
          },
          'end': {
            'dateTime': dto_end.isoformat("T"),
            'timeZone': 'US/Eastern',
          },
         # 'recurrence': [
         #   'RRULE:FREQ=DAILY;COUNT=2'
         # ],
          'attendees': list_of_attendees,
          'reminders': {
            'useDefault': False,
            'overrides': [
              {'method': 'email', 'minutes': 24 * 60},
              {'method': 'popup', 'minutes': 10},
            ],
          },
        }

        print(dict_of_locations[row[1]])
        #Uses the service to insert the event
        #event = service.events().insert(calendarId=dict_of_locations[row[1]], body=event, sendUpdates='all').execute()
        #print ('Event created: %s' % (event.get('htmlLink')))
        #event_link = event.get('htmlLink')
        event_link = "google.com"
        event_printlist.append({'summary':summary_in, 'date':row[0], 'link':event_link})
        print(event)


    f = open("CreatedEvents.html", 'w')
    f.write("<h1>Created the Following Events:</h1>" + '\n' + "<blockquote>")
    for event in event_printlist:
        f.write('\n' + "<p>" + event['summary'] + ' ' + event['date'] + ':' + "</p>")
        f.write('\n' + "<p><a href=\"" + event['link'] + "\">" + event['link'] + "</a></p>")
    f.write('\n' + "</blockquote>")
    f.write('\n' + "<h3>Skipped the Following Events:</h3>" + '\n')
    for event in event_skiplist:
        f.write('\n' + "<p>" + event + "</p>")
    f.close()
    os.startfile("CreatedEvents.html")
    sys.exit(1)
def make_iap_request(url, client_id, method='GET', **kwargs):
    """Makes a request to an application protected by Identity-Aware Proxy.

    Args:
      url: The Identity-Aware Proxy-protected URL to fetch.
      client_id: The client ID used by Identity-Aware Proxy.
      method: The request method to use
              ('GET', 'OPTIONS', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE')
      **kwargs: Any of the parameters defined for the request function:
                https://github.com/requests/requests/blob/master/requests/api.py

    Returns:
      The page body, or raises an exception if the page couldn't be retrieved.
    """
    # Figure out what environment we're running in and get some preliminary
    # information about the service account.
    bootstrap_credentials, _ = google.auth.default(
        scopes=[IAM_SCOPE])
    if isinstance(bootstrap_credentials,
                  google.oauth2.credentials.Credentials):
        raise Exception('make_iap_request is only supported for service '
                        'accounts.')
    elif isinstance(bootstrap_credentials,
                    google.auth.app_engine.Credentials):
        requests_toolbelt.adapters.appengine.monkeypatch()

    # For service account's using the Compute Engine metadata service,
    # service_account_email isn't available until refresh is called.
    bootstrap_credentials.refresh(Request())

    signer_email = bootstrap_credentials.service_account_email
    if isinstance(bootstrap_credentials,
                  google.auth.compute_engine.credentials.Credentials):
        # Since the Compute Engine metadata service doesn't expose the service
        # account key, we use the IAM signBlob API to sign instead.
        # In order for this to work:
        #
        # 1. Your VM needs the https://www.googleapis.com/auth/iam scope.
        #    You can specify this specific scope when creating a VM
        #    through the API or gcloud. When using Cloud Console,
        #    you'll need to specify the "full access to all Cloud APIs"
        #    scope. A VM's scopes can only be specified at creation time.
        #
        # 2. The VM's default service account needs the "Service Account Actor"
        #    role. This can be found under the "Project" category in Cloud
        #    Console, or roles/iam.serviceAccountActor in gcloud.
        signer = google.auth.iam.Signer(
            Request(), bootstrap_credentials, signer_email)
    else:
        # A Signer object can sign a JWT using the service account's key.
        signer = bootstrap_credentials.signer

    # Construct OAuth 2.0 service account credentials using the signer
    # and email acquired from the bootstrap credentials.
    service_account_credentials = google.oauth2.service_account.Credentials(
        signer, signer_email, token_uri=OAUTH_TOKEN_URI, additional_claims={
            'target_audience': client_id
        })

    # service_account_credentials gives us a JWT signed by the service
    # account. Next, we use that to obtain an OpenID Connect token,
    # which is a JWT signed by Google.
    google_open_id_connect_token = get_google_open_id_connect_token(
        service_account_credentials)

    # Fetch the Identity-Aware Proxy-protected URL, including an
    # Authorization header containing "Bearer " followed by a
    # Google-issued OpenID Connect token for the service account.
    resp = requests.request(
        method, url,
        headers={'Authorization': 'Bearer {}'.format(
            google_open_id_connect_token)}, **kwargs)
    if resp.status_code == 403:
        raise Exception('Service account {} does not have permission to '
                        'access the IAP-protected application.'.format(
                            signer_email))
    elif resp.status_code != 200:
        raise Exception(
            'Bad response from application: {!r} / {!r} / {!r}'.format(
                resp.status_code, resp.headers, resp.text))
    else:
        return resp.text
Ejemplo n.º 28
0
def getCalendarEvents():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('.credentials/token.pickle'):
        with open('.credentials/token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                '.credentials/credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('.credentials/token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    now = datetime.datetime.utcnow().isoformat(
    ) + 'Z'  # 'Z' indicates UTC time
    # print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          maxResults=10,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])

    if not events:
        print('No upcoming events found.')
    for event in events:
        startTimeStr = (event['start'].get('dateTime',
                                           event['start'].get('date')))[:-6]
        descLength = len(event['description'])
        # get the meeting ID
        meetingIDIndexStart = int(event['description'].find('Meeting ID:'))
        meetingIDIndexEnd = meetingIDIndexStart + len('Meeting ID:')
        meetingIDStarted = False
        meetingIDExists = False
        meetingID = ''
        while meetingIDIndexEnd < descLength:
            # read only number occuring after meetingIDIndexEnd

            nextOccurringCharacter = event['description'][meetingIDIndexEnd]
            meetingIDIndexEnd += 1
            if nextOccurringCharacter.isnumeric():
                meetingIDStarted = True
                meetingID += nextOccurringCharacter

            elif nextOccurringCharacter is '<' and meetingIDStarted is True:
                break

        if meetingID is not '':
            meetingIDExists = True
            meetingID = int(meetingID)

        # getting meeting password
        meetingPasswordIndexStart = int(event['description'].find('Passcode:'))
        meetingPasswordIndexEnd = meetingPasswordIndexStart + len('Passcode:')
        meetingPasswordStarted = False
        meetingPasswordExists = False
        meetingPassword = ''

        while meetingPasswordIndexEnd < descLength:
            # read only alphanumeric occuring after meetingPasswordIndexEnd

            nextOccurringCharacter = event['description'][
                meetingPasswordIndexEnd]
            meetingPasswordIndexEnd += 1
            if nextOccurringCharacter.isalnum():
                meetingPasswordStarted = True
                meetingPassword += nextOccurringCharacter

            elif nextOccurringCharacter is '\n' and meetingPasswordStarted is True:
                break

        # getting meeting password
        meetingLinkIndexStart = int(
            event['description'].find('Join Zoom Meeting'))
        meetingLinkIndexEnd = meetingLinkIndexStart + len(
            'Join Zoom Meeting') + 13
        meetingLinkStarted = False
        meetingLinkExists = False
        meetingLink = ''

        # print(event['description'])

        while meetingLinkIndexEnd < meetingIDIndexStart:
            # read only alphanumeric occuring after meetingLinkIndexEnd

            nextOccurringCharacter = event['description'][meetingLinkIndexEnd]
            meetingLinkIndexEnd += 1
            if nextOccurringCharacter == '<':
                while event['description'][meetingLinkIndexEnd] != '>':
                    meetingLinkIndexEnd += 1
                meetingLinkIndexEnd += 1
            elif nextOccurringCharacter == '"' and meetingLinkStarted is True:
                break
            else:
                meetingLinkStarted = True
                meetingLink += nextOccurringCharacter

        print(meetingLink)

        if meetingIDExists is True:
            meetings[event['summary']] = {
                'Time':
                datetime.datetime.strptime(startTimeStr, '%Y-%m-%dT%H:%M:%S'),
                'Meeting ID':
                meetingID,
                'Link':
                meetingLink,
                'Password':
                meetingPassword
            }
Ejemplo n.º 29
0
 def wrapper(*args):
     self: SheetHandler = args[0]
     if stripped_utcnow() > self.expiry:
         self.auth.refresh(Request())
     return func(*args)
    def authorization_drive(self, save_credential=True, verbose=False):
        """
		This function gives access to Google drive and currently Google doc
		The path to access the token and credential can be locally
		or in Googe Drive. By default, the token is stored as 'token' and the
		credential as 'credentials.json'. They need to have this name.

		The scope tells the app what action it can do. ie read only, write, etc

		path_json is where the token is stored. If Google cannot find the token
		is the defined path, then he will search for the credential. After that
		you will be prompt to give Google access to your "scope". The token is
		stored in the same folder as the credential. Feel free to move the
		token anywhere you want, and point the init to this path in the future.

		ADD ERROR MESSAGE
		"""
        creds = None
        updated = False

        path_pickle = os.path.join(self.path_credential_drive, 'token.pickle')
        if os.path.exists(path_pickle):
            with open(path_pickle, 'rb') as token:
                creds = pickle.load(token)
        # If there are no (valid) credentials available, let the user log in.

        if not creds or not creds.valid:
            updated = True
            if creds and creds.expired and creds.refresh_token:
                creds.refresh(Request())
            else:
                flow = InstalledAppFlow.from_client_secrets_file(
                    'credentials.json',
                    self.scope,
                    redirect_uri='urn:ietf:wg:oauth:2.0:oob')
                creds = flow.run_local_server()

    # Save the credentials for the next run
        if save_credential:

            with open('token.pickle', 'wb') as token:
                pickle.dump(creds, token)

            shutil.move(os.path.join(os.getcwd(), 'token.pickle'), path_pickle)
            ### Move the credential to the same path_credential_drive

        service = build('drive', 'v3', credentials=creds)
        service_doc = build('docs', 'v1', credentials=creds)
        service_excel = build('sheets', 'v4', credentials=creds)
        service = {
            "drive": service,
            "doc": service_doc,
            "sheet": service_excel
        }
        if self.verbose:
            try:
                if creds.valid and not updated:
                    print("The statut credential from {} is valid".format(
                        path_pickle))
                else:
                    print("""
					The statut credential from {} is not valid.\n
				A new credential has been created/updated
					""".format(path_pickle))
            except:
                pass
            print("""
			Service Google Drive and Docs, Sheet are now connected.\n
			'Service Google Drive is stored as {} and accessible with "drive"\n
			'Service Google Doc is stored as {} and accessible with "doc"\n
			'Service Google Sheet is stored as {}and accessible with "sheet""".format(
                service["drive"], service["doc"], service["sheet"]))
        return service