Example #1
0
def reminder(date, slot):
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    event = {
        'summary': 'appointment with doctor',
        'location': '',
        'description': 'visit doctor and get yourself checked',
        'start': {
            'dateTime': '2020-05-11T09:00:30-00:30',
            'timeZone': 'America/Los_Angeles',
        },
        'end': {
            'dateTime': '2020-05-11T09:00:30-00:30',
            'timeZone': 'America/Los_Angeles',
        },
        'recurrence': [''],
        'attendees': [{
            'email': '*****@*****.**'
        }],
        'reminders': {
            'useDefault':
            False,
            'overrides': [
                {
                    'method': 'email',
                    'minutes': 24 * 60
                },
                {
                    'method': 'popup',
                    'minutes': 10
                },
            ],
        },
    }

    try:
        event = service.events().insert(calendarId='primary',
                                        body=event).execute()
        print('Event created: %s' % (event.get('htmlLink')))
        return "event created"
    except:
        return "something went wrong"
Example #2
0
def save(data):
    emails = []
    SCOPES = ['https://www.googleapis.com/auth/drive.file']

    print('\nPlease select a method to save your lecture links:')
    if os.path.exists('token.json'):
        with open('token.json', 'r') as preferences:
            users = json.load(preferences)
        emails = users["emails"]
        for user in range(len(emails)):
            print(
                f'{user + 1}. Upload to Google Drive as a Spreadsheet ({emails[user]})'
            )
    choice = int(
        input(
            f'{len(emails) + 1}. Upload to Google Drive as a Spreadsheet{" (Use another account)" if emails else ""}\n{len(emails) + 2}. Save locally as a CSV file\nChoose: '
        ))

    if choice <= len(emails):
        creds = Credentials.from_authorized_user_info(
            users['tokens'][choice - 1], SCOPES)
        if creds.expired and creds.refresh_token:
            creds.refresh(Request())
            users['tokens'][choice - 1] = json.loads(creds.to_json())
            with open('token.json', 'w') as token:
                token.write(json.dumps(users))
        service = build('drive', 'v3', credentials=creds)
    elif choice == 1 or choice == (len(emails) + 1):
        print('Opening authorization window...')
        client_credentials = {
            "installed": {
                "client_id":
                "292011036249-a6r9097a7q4216d0copqhdbkon3flsa5.apps.googleusercontent.com",
                "project_id": "lecture-links-collector",
                "auth_uri": "https://accounts.google.com/o/oauth2/auth",
                "token_uri": "https://oauth2.googleapis.com/token",
                "auth_provider_x509_cert_url":
                "https://www.googleapis.com/oauth2/v1/certs",
                "client_secret": "qRsAEyh_9ddbzoLGMIM989_m",
                "redirect_uris":
                ["urn:ietf:wg:oauth:2.0:oob", "http://localhost"]
            }
        }
        flow = InstalledAppFlow.from_client_config(client_credentials, SCOPES)
        creds = flow.run_local_server(
            port=0,
            authorization_prompt_message=
            'Please authorize Lecture Links Collector at this URL: {url}',
            success_message=
            'Thank you for the authorization. Please close this window and return to the program.'
        )
        print('Authorization Completed')
        service = build('drive', 'v3', credentials=creds)
        email_address = service.about().get(
            fields='user(emailAddress)').execute()['user']['emailAddress']
        if emails:
            users['emails'].append(email_address)
            users['tokens'].append(json.loads(creds.to_json()))
        else:
            users = {
                'tokens': [json.loads(creds.to_json())],
                'emails': [email_address]
            }
        with open('token.json', 'w') as token:
            token.write(json.dumps(users))
    else:
        filename = input(f"Save As(Enter filename/path without extension): "
                         ).strip().replace('\\\\', '\\')
        with open(filename + '.csv', 'w') as f:
            f.write(data)
        print(
            f'File \'{filename}\' has been successfully locally saved as a CSV File.'
        )
        return
    print(
        'NOTE: You will now be asked to enter a desired name for your lecture links file. If you want to update an older file instead of creating a new file, just write the name of that old file.'
    )
    filename = input("Save As(Enter filename without extension): ").strip()
    media = MediaIoBaseUpload(io.BytesIO(data.encode('utf-8')),
                              mimetype='text/csv')
    files_on_drive = service.files().list(q = f"name = '{filename}'",
                                        fields = 'files(id, name, webViewLink, createdTime, viewedByMeTime, owners(emailAddress))') \
                                        .execute().get('files')
    if files_on_drive:
        print(
            'Some existing files with the same name have been found in your drive. Enter the corresponding number to the file description',
            'to update that particular file or create a new file.')
        for f in range(len(files_on_drive)):
            print((
                f'{f + 1}. NAME: {files_on_drive[f]["name"]}  OWNER: {files_on_drive[f]["owners"][0]["emailAddress"]}\n'
                f'   CREATED ON: {convert_to_ist(files_on_drive[f]["createdTime"].rstrip("Z"))}  '
                f'LAST VIEWED BY YOU ON: {convert_to_ist(files_on_drive[f]["viewedByMeTime"].rstrip("Z"))}\n'
                f'   LINK: {files_on_drive[f]["webViewLink"]}'))
        choice = int(
            input(f'{len(files_on_drive) + 1}. Create new file\nChoose: '))
        if choice != (len(files_on_drive) + 1):
            print('Updating file...')
            result = service.files().update(
                fileId=files_on_drive[choice - 1]['id'],
                media_body=media,
                fields='id, name, createdTime, webViewLink').execute()
            print(
                f'UPDATE SUCCESSFUL\nFile \'{result["name"]}\' that was created on {convert_to_ist(result["createdTime"].rstrip("Z"))} has been successfully updated on your drive account. \
                    \nLink to the file: {result["webViewLink"]}')
            return
    print('Uploading file...')
    metadata = {
        'name': filename,
        'mimeType': 'application/vnd.google-apps.spreadsheet'
    }
    result = service.files().create(
        body=metadata,
        media_body=media,
        fields='id, name, webViewLink, createdTime').execute()
    print(
        f'UPLOAD SUCCESSFUL\nFile \'{result["name"]}\' has been successfully created on your drive account on {convert_to_ist(result["createdTime"].rstrip("Z"))}. \
        \nLink to the file: {result["webViewLink"]}')
Example #3
0
def cal():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    now = datetime.datetime.utcnow().isoformat() + 'Z'  # 'Z' indicates UTC time


# -------------  Calling the calendar --------------

    print('Printing events from', cal_name)

    events_result = service.events().list(calendarId=calid, timeMin=now,
                                          maxResults=100, singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])


#---------------- Deleting ALL EVENTS ----------------------

    for event in events:
        service.events().delete(calendarId=cal_id,
                                eventId=event['id']).execute()


#---------------- Creating EVENTS ----------------------

    event = {
        'summary': ' Papa',
        'description': 'TNM',
        'location': 'TNM',
        'start': {
            'dateTime': '2020-01-27T09:00:00',
            'timeZone': 'America/Toronto',
        },

        'end': {
            'dateTime': '2020-01-27T17:10:00',
            'timeZone': 'America/Toronto',
        }
    }

    event = service.events().insert(calendarId=cal_id, body=event).execute()
    # print('Event created: %s' % (event.get('htmlLink')))


#---------------- Printing EVENTS ----------------------

    if not events:
        print('No upcoming events found.')
    for event in events:
        start = event['start'].get('dateTime', event['start'].get('date'))
Example #4
0
def callCalenderAPI():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    now = datetime.utcnow().isoformat() + 'Z'  # 'Z' indicates UTC time
    print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          maxResults=10,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])

    today_events = []

    if not events:
        print('No upcoming events found.')
    for event in events:
        start = event['start'].get('dateTime', event['start'].get('date'))
        # print(start.split("T"), event['summary'], event["colorId"])
        event_date = start.split("T")[0].split("-")
        # print(event_date)

        dt = datetime.today()
        if (str(dt.year) == event_date[0] and str(dt.month) == event_date[1]
                and str(dt.day) == event_date[2]):
            # if (event_date[2] == '25'):
            print("added todays event")
            colour = None

            try:
                colour = event["colorId"]
            except:
                colour = '7'

            today_events.append({
                "datetime": {
                    "date": start.split("T")[0],
                    "time": start.split("T")[1]
                },
                "event": event['summary'],
                "colour": colour
            })

        if not today_events:
            today_events = None
    return today_events
Example #5
0
    def __init__(self,
                 credentials_file=os.path.join(os.path.expanduser('~'),
                                               '.config',
                                               'google-oauthlib-tool',
                                               'credentials.json'),
                 device_config=os.path.join(os.path.expanduser('~'), '.config',
                                            'googlesamples-assistant',
                                            'device_config.json'),
                 language='en-US',
                 play_response=True,
                 tts_plugin=None,
                 tts_args=None,
                 **kwargs):
        """
        :param credentials_file: Path to the Google OAuth credentials file
            (default: ~/.config/google-oauthlib-tool/credentials.json).
            See https://developers.google.com/assistant/sdk/guides/library/python/embed/install-sample#generate_credentials
            for instructions to get your own credentials file.
        :type credentials_file: str

        :param device_config: Path to device_config.json. Register your device
            (see https://developers.google.com/assistant/sdk/guides/library/python/embed/register-device)
            and create a project, then run the pushtotalk.py script from
            googlesamples to create your device_config.json
        :type device_config: str

        :param language: Assistant language (default: en-US)
        :type language: str

        :param play_response: If True (default) then the plugin will play the assistant response upon processed
            response. Otherwise nothing will be played - but you may want to handle the ``ResponseEvent`` manually.
        :type play_response: bool

        :param tts_plugin: Optional text-to-speech plugin to be used to process response text.
        :type tts_plugin: str

        :param tts_args: Optional arguments for the TTS plugin ``say`` method.
        :type tts_args: dict
        """

        import googlesamples.assistant.grpc.audio_helpers as audio_helpers
        super().__init__(**kwargs)

        self.audio_sample_rate = audio_helpers.DEFAULT_AUDIO_SAMPLE_RATE
        self.audio_sample_width = audio_helpers.DEFAULT_AUDIO_SAMPLE_WIDTH
        self.audio_iter_size = audio_helpers.DEFAULT_AUDIO_ITER_SIZE
        self.audio_block_size = audio_helpers.DEFAULT_AUDIO_DEVICE_BLOCK_SIZE
        self.audio_flush_size = audio_helpers.DEFAULT_AUDIO_DEVICE_FLUSH_SIZE

        self.language = language
        self.credentials_file = credentials_file
        self.device_config = device_config
        self.play_response = play_response
        self.tts_plugin = tts_plugin
        self.tts_args = tts_args or {}
        self.assistant = None
        self.interactions = []

        with open(self.device_config) as f:
            device = json.load(f)
            self.device_id = device['id']
            self.device_model_id = device['model_id']

        # Load OAuth 2.0 credentials.
        try:
            from google.oauth2.credentials import Credentials
            from google.auth.transport.requests import Request

            with open(self.credentials_file, 'r') as f:
                self.credentials = Credentials(token=None, **json.load(f))
                self.http_request = Request()
                self.credentials.refresh(self.http_request)
        except Exception as ex:
            self.logger.error('Error loading credentials: %s', str(ex))
            self.logger.error('Run google-oauthlib-tool to initialize '
                              'new OAuth 2.0 credentials.')
            raise

        self.grpc_channel = None
        self.conversation_stream = None
def make_iap_request(url, client_id, method='GET', **kwargs):
    """Makes a request to an application protected by Identity-Aware Proxy.

    Args:
      url: The Identity-Aware Proxy-protected URL to fetch.
      client_id: The client ID used by Identity-Aware Proxy.
      method: The request method to use
              ('GET', 'OPTIONS', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE')
      **kwargs: Any of the parameters defined for the request function:
                https://github.com/requests/requests/blob/master/requests/api.py

    Returns:
      The page body, or raises an exception if the page couldn't be retrieved.
    """
    # Figure out what environment we're running in and get some preliminary
    # information about the service account.
    bootstrap_credentials, _ = google.auth.default(
        scopes=[IAM_SCOPE])
    if isinstance(bootstrap_credentials,
                  google.oauth2.credentials.Credentials):
        raise Exception('make_iap_request is only supported for service '
                        'accounts.')
    elif isinstance(bootstrap_credentials,
                    google.auth.app_engine.Credentials):
        requests_toolbelt.adapters.appengine.monkeypatch()

    # For service account's using the Compute Engine metadata service,
    # service_account_email isn't available until refresh is called.
    bootstrap_credentials.refresh(Request())

    signer_email = bootstrap_credentials.service_account_email
    if isinstance(bootstrap_credentials,
                  google.auth.compute_engine.credentials.Credentials):
        # Since the Compute Engine metadata service doesn't expose the service
        # account key, we use the IAM signBlob API to sign instead.
        # In order for this to work:
        #
        # 1. Your VM needs the https://www.googleapis.com/auth/iam scope.
        #    You can specify this specific scope when creating a VM
        #    through the API or gcloud. When using Cloud Console,
        #    you'll need to specify the "full access to all Cloud APIs"
        #    scope. A VM's scopes can only be specified at creation time.
        #
        # 2. The VM's default service account needs the "Service Account Actor"
        #    role. This can be found under the "Project" category in Cloud
        #    Console, or roles/iam.serviceAccountActor in gcloud.
        signer = google.auth.iam.Signer(
            Request(), bootstrap_credentials, signer_email)
    else:
        # A Signer object can sign a JWT using the service account's key.
        signer = bootstrap_credentials.signer

    # Construct OAuth 2.0 service account credentials using the signer
    # and email acquired from the bootstrap credentials.
    service_account_credentials = google.oauth2.service_account.Credentials(
        signer, signer_email, token_uri=OAUTH_TOKEN_URI, additional_claims={
            'target_audience': client_id
        })

    # service_account_credentials gives us a JWT signed by the service
    # account. Next, we use that to obtain an OpenID Connect token,
    # which is a JWT signed by Google.
    google_open_id_connect_token = get_google_open_id_connect_token(
        service_account_credentials)

    # Fetch the Identity-Aware Proxy-protected URL, including an
    # Authorization header containing "Bearer " followed by a
    # Google-issued OpenID Connect token for the service account.
    resp = requests.request(
        method, url,
        headers={'Authorization': 'Bearer {}'.format(
            google_open_id_connect_token)}, **kwargs)
    if resp.status_code == 403:
        raise Exception('Service account {} does not have permission to '
                        'access the IAP-protected application.'.format(
                            signer_email))
    elif resp.status_code != 200:
        raise Exception(
            'Bad response from application: {!r} / {!r} / {!r}'.format(
                resp.status_code, resp.headers, resp.text))
    else:
        return resp.text
Example #7
0
 def wrapper(*args):
     self: SheetHandler = args[0]
     if stripped_utcnow() > self.expiry:
         self.auth.refresh(Request())
     return func(*args)
Example #8
0
def getCalendarEvents():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('.credentials/token.pickle'):
        with open('.credentials/token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                '.credentials/credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('.credentials/token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    now = datetime.datetime.utcnow().isoformat(
    ) + 'Z'  # 'Z' indicates UTC time
    # print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          maxResults=10,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])

    if not events:
        print('No upcoming events found.')
    for event in events:
        startTimeStr = (event['start'].get('dateTime',
                                           event['start'].get('date')))[:-6]
        descLength = len(event['description'])
        # get the meeting ID
        meetingIDIndexStart = int(event['description'].find('Meeting ID:'))
        meetingIDIndexEnd = meetingIDIndexStart + len('Meeting ID:')
        meetingIDStarted = False
        meetingIDExists = False
        meetingID = ''
        while meetingIDIndexEnd < descLength:
            # read only number occuring after meetingIDIndexEnd

            nextOccurringCharacter = event['description'][meetingIDIndexEnd]
            meetingIDIndexEnd += 1
            if nextOccurringCharacter.isnumeric():
                meetingIDStarted = True
                meetingID += nextOccurringCharacter

            elif nextOccurringCharacter is '<' and meetingIDStarted is True:
                break

        if meetingID is not '':
            meetingIDExists = True
            meetingID = int(meetingID)

        # getting meeting password
        meetingPasswordIndexStart = int(event['description'].find('Passcode:'))
        meetingPasswordIndexEnd = meetingPasswordIndexStart + len('Passcode:')
        meetingPasswordStarted = False
        meetingPasswordExists = False
        meetingPassword = ''

        while meetingPasswordIndexEnd < descLength:
            # read only alphanumeric occuring after meetingPasswordIndexEnd

            nextOccurringCharacter = event['description'][
                meetingPasswordIndexEnd]
            meetingPasswordIndexEnd += 1
            if nextOccurringCharacter.isalnum():
                meetingPasswordStarted = True
                meetingPassword += nextOccurringCharacter

            elif nextOccurringCharacter is '\n' and meetingPasswordStarted is True:
                break

        # getting meeting password
        meetingLinkIndexStart = int(
            event['description'].find('Join Zoom Meeting'))
        meetingLinkIndexEnd = meetingLinkIndexStart + len(
            'Join Zoom Meeting') + 13
        meetingLinkStarted = False
        meetingLinkExists = False
        meetingLink = ''

        # print(event['description'])

        while meetingLinkIndexEnd < meetingIDIndexStart:
            # read only alphanumeric occuring after meetingLinkIndexEnd

            nextOccurringCharacter = event['description'][meetingLinkIndexEnd]
            meetingLinkIndexEnd += 1
            if nextOccurringCharacter == '<':
                while event['description'][meetingLinkIndexEnd] != '>':
                    meetingLinkIndexEnd += 1
                meetingLinkIndexEnd += 1
            elif nextOccurringCharacter == '"' and meetingLinkStarted is True:
                break
            else:
                meetingLinkStarted = True
                meetingLink += nextOccurringCharacter

        print(meetingLink)

        if meetingIDExists is True:
            meetings[event['summary']] = {
                'Time':
                datetime.datetime.strptime(startTimeStr, '%Y-%m-%dT%H:%M:%S'),
                'Meeting ID':
                meetingID,
                'Link':
                meetingLink,
                'Password':
                meetingPassword
            }
Example #9
0
def main():
    """Shows basic usage of the Drive v3 API.
    Prints the names and ids of the first 10 files the user has access to.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server()
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    # # Call the Drive v3 API
    # results = service.files().list(
    #     pageSize=1000, fields="nextPageToken, files(id, name)").execute()
    # items = results.get('files', [])
    # all_files = []
    # if not items:
    #     print('No files found.')
    # else:
    #     print('Files:')
    #     for item in items:
    #         print(u'{0} ({1})'.format(item['name'], item['id']))
    #         all_files.append((item['name'], item['id']))

    # return all_files
    page_token = None
    all_files = []
    while True:
        response = service.files().list(
            pageSize=1000,
            q="'184b0EFkuuI1nbeXQZACxQEcJlwzzPyvh' in parents",
            pageToken=page_token,
            fields="nextPageToken, files(id, name)").execute()
        items = response.get('files', [])
        if not items:
            print('No files found')
        else:
            for item in items:
                # print('Found file: %s (%s)' % (item['name'], item['id']))
                all_files.append((item['name'], item['id']))
            all_files = list(set(all_files))
            print(len(all_files))
            page_token = response.get('nextPageToken', None)
            print('next page token', page_token)
        if page_token is None:
            break
    return all_files
Example #10
0
def main():
    """Shows basic usage of the Admin SDK Directory API.
    Prints the emails and names of the first 10 users in the domain.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    # delete unused users
    service = build('admin', 'directory_v1', credentials=creds)

    page_token = None
    params = {'customer': 'my_customer'}

    page = 0
    users_total = 0
    while True:
        try:
            if page_token:
                params['pageToken'] = page_token
            current_page = service.users().list(**params).execute()

            users = current_page.get('users', [])
            if not users:
                print('No users in the domain.')
                break
            else:
                users_total = users_total + len(users)
                print('Users page: ', page)
                for user in users:
                    last_login_time = datetime.strptime(user['lastLoginTime'], '%Y-%m-%dT%H:%M:%S.%fz')
                    # here go the date
                    if last_login_time < datetime(2016, 1, 1):
                        print('delete mail')
                        print(user['primaryEmail'])
                        service.users().delete(userKey=user['id']).execute()

            page_token = current_page.get('nextPageToken')
            page = page + 1

            if not page_token:
                break

        except:
            print('errors')
            break

    print(users_total)
Example #11
0
def main():
    # A quick check to see if the token already exists.
    if (not (path.exists("token.pickle"))):
        tkinter.messagebox.showinfo( "Excel to Google Event", "You will be prompted to login & give permission to Google Cal")
    
    #This is taken directly from the Google API Quickstart guide
    """Shows basic usage of the Google Calendar API.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)


    
    #Here the service is built with credentials & we can move on to creating the event
    service = build('calendar', 'v3', credentials=creds)

    #Build gspread
    client = gspread.authorize(creds)

    #Open gspread
    gsheet = client.open("Nexus Recording Schedule - Master")
    gworksheet = gsheet.worksheet("2-Schedule Recording-Instructional Day")

    #Init & print list of Cals
    calHolder = []
    page_token = None
    while True:
      calendar_list = service.calendarList().list(pageToken=page_token).execute()
      for calendar_list_entry in calendar_list['items']:
        print (calendar_list_entry['summary'])
        calHolder.append({"in": calendar_list_entry['summary'], "cal_id":calendar_list_entry['id']})
      page_token = calendar_list.get('nextPageToken')
      if not page_token:
        break

    #Append to single string in order to display in msgbox
    cal_msg = "Please match the numbers for the Calendars for each location, if no match exists enter \'-1\' " + '\n' +  "Calanders on your account: " + '\n'
    cal_msg2 = '\n'
    index = 0
    for dicts in calHolder:
        msg = '\n' + ' [ ' + str(index) + ' ]:   ' + dicts["in"] + '          '
        cal_msg2 += msg
        index += 1

    #Adding on sheets service
    sheets_service = build('sheets', 'v4', credentials=creds)



    
    #Prompt user for selection via messagebox
    cal_msg2 += '\n'
    print(cal_msg)
    print(cal_msg2)
    USER_INP = -1

    #Yes this is janky/bad code, but I mean cmon
    sheet3 = sheets_service.spreadsheets()
    result3 = sheet3.values().get(spreadsheetId=SPREADSHEET_ID, range=INSTRUCTORS_SHEET_RANGE).execute()
    values3 = result3.get('values', [])
    for row in values3:
        if (len(row) >= 5):
            print(row[4])
        if (len(row) >= 16):
            print("    MGR: " + row[15])
        if (len(row) >= 17):
            print("    IPS: " + row[16])
        if (len(row) >= 18):
            print("    MA: " + row[17])
        if (len(row) >= 19):
            print("    Other: " + row[18])
        if (len(row) >= 21):
            print("         " + row[19] + ": " + row[20])
        if (len(row) >= 22):
            print("    CCBK: " + row[21])
        if (len(row) >= 23):
            print("    o-CC: " + row[22])
        if (len(row) >= 25):
            print("    nc-mary: " + row[24])
        if (len(row) >= 26):
            print("    nc-dru: " + row[25])
        if (len(row) >= 27):
            print("    nc-paul: " + row[26])

    dict_of_locations = {}
    if path.exists("calconfig.pickle"):
        with open('calconfig.pickle', 'rb') as handle:
            dict_of_locations = pickle.load(handle)
    else:
        if not values3:
            print('No data found.')
        else:
            for row in values3:
                if (len(row) >= 5):
                    if row[4]:
                        dict_of_locations[row[4]] = -1
        with open('calconfig.pickle', 'wb') as handle:
            pickle.dump(dict_of_locations, handle, protocol=pickle.HIGHEST_PROTOCOL)
    
    list_of_variables = []

    master = Tk()
    def deleter():
        sys.exit(1)
    master.protocol("WM_DELETE_WINDOW", deleter)

    def callback():
        for i, location in enumerate(dict_of_locations):
            print(list_of_variables[i].get())
            dict_of_locations[location] = list_of_variables[i].get()
        master.destroy()
        master.quit()
        print(dict_of_locations)
        with open('calconfig.pickle', 'wb') as handle:
            pickle.dump(dict_of_locations, handle, protocol=pickle.HIGHEST_PROTOCOL)

    
    for i in range(len(dict_of_locations)):
        list_of_variables.append(IntVar(master))
        
    tk.Label(master, text=cal_msg, padx = 10, pady = 5, anchor = 'center').grid(row=0)
    tk.Label(master, text=cal_msg2, padx = 10, pady = 5, justify = 'left').grid(row=1)
    endrow = 0
    for i, location in enumerate(dict_of_locations):
        tk.Label(master, text=location, padx = 10, pady = 5).grid(row=i+2)
        ee = tk.Entry(master, textvariable=list_of_variables[i])
        ee.delete(0, END)
        ee.insert(0, dict_of_locations[location])
        ee.grid(row = i+2, column = 1)
        endrow = i+2

    endrow += 2

    
    b = Button(master, text="Submit", width=10, command=callback)
    b.grid(row=endrow+2, column=0)
    master.mainloop()

    #Convert indexes to CAL ID
    for i, location in enumerate(dict_of_locations):
        for j, cal in enumerate(calHolder):
            if (dict_of_locations[location] == j):
                dict_of_locations[location] = cal["cal_id"]
                
    print(dict_of_locations)



    #Get what kind of method to select events
    search_method = 0
    window = Tk()
    def deleter():
        sys.exit(1)
    window.protocol("WM_DELETE_WINDOW", deleter)
    v = IntVar(window)
    v.set(0)

    def ShowChoice():
        print(v.get())
        if (v.get() == 0):
            sys.exit(1)
        search_method = v.get()
        window.destroy()
        window.quit()

    tk.Label(window, 
             text="""Choose method for selecting events:""",
             padx = 20, pady = 5).pack()
    tk.Radiobutton(window, 
                  text="By Date RANGE (MM/DD/YYYY)",
                  indicatoron = 0,
                  width = 20,
                  padx = 20, 
                  variable=v, 
                  command=ShowChoice,
                  value=1).pack()
    tk.Radiobutton(window, 
                  text="By Row In RANGE ex: (1-9999)",
                  indicatoron = 0,
                  width = 20,
                  padx = 20, 
                  variable=v, 
                  command=ShowChoice,
                  value=2).pack()
    tk.Radiobutton(window, 
                  text="By Row in LIST ex: (64, 65, 77, 81)",
                  indicatoron = 0,
                  width = 20,
                  padx = 20, 
                  variable=v, 
                  command=ShowChoice,
                  value=3).pack()
        
    window.mainloop()

    search_method = v.get()
    print(search_method)
    #sys.exit(1)

    # Call the Sheets API
    sheet = sheets_service.spreadsheets()
    result = sheet.values().get(spreadsheetId=SPREADSHEET_ID,
                                range=SAMPLE_RANGE_NAME).execute()
    values = result.get('values', [])
    print (len(values))

    #Prompt & convert date range
    search = []
    search_indexes = []
    if search_method == 1:
        START_DATE = simpledialog.askstring(title="Date From (inclusive)", prompt="Enter the start of the date range (MM/DD/YYYY)" )
        RANGE_START = datetime.datetime.strptime(START_DATE, '%m/%d/%Y')
        END_DATE = simpledialog.askstring(title="Date Until (inclusive)", prompt="Enter the end of the date range (MM/DD/YYYY)" )
        RANGE_END = datetime.datetime.strptime(END_DATE, '%m/%d/%Y')
    if search_method == 2:
        START_ROW = simpledialog.askinteger(title="First Row (Inclusive):", prompt="Enter the first row:" )
        END_ROW = simpledialog.askinteger(title="Last Row (Inclusive):", prompt="Enter the Last row:" )
        if (START_ROW > END_ROW):
            print("startstop error 1")
            sys.exit(1)
    if search_method == 3:
        USER_LIST = simpledialog.askstring(title="Enter List of Rows:", prompt="Enter list of rows seperated by Commas. Ex: (16, 22, 2, 1999)" )
        ROW_LIST = USER_LIST.split(",")

    #Search for valid entries within range
    s_index = 0
    if not values:
        print('No data found.')
    else:
        for row in values:
            print(row)
            if not (len(row) > 26):
                continue
            if search_method == 1:
                TEST_DATE = datetime.datetime.strptime(row[0], '%m/%d/%Y')
                if (RANGE_START <= TEST_DATE <= RANGE_END):
                    search.append(row)
                    search_indexes.append(s_index)
                    print('0 ' + row[0] + ' 1 ' + row[1] + ' 4 ' + row[4] + ' 5 ' + row[5] + ' 6 ' + row[6] + ' 7 ' + row[7] + ' 8 ' + row[8] + ' 9 ' + row[9] + ' 10 ' + row[10] + ' 11 ' + row[11] + ' 12 ' + row[12] + ' 13 ' + row[13] + ' 18 ' + row[18] + ' 19 ' + row[19] + ' 25 ' + row[25] + ' 26 ' + row[26])
            if search_method == 2:
                if (START_ROW <= int(row[26]) <= END_ROW):
                    search.append(row)
                    search_indexes.append(s_index)
                    print('0 ' + row[0] + ' 1 ' + row[1] + ' 4 ' + row[4] + ' 5 ' + row[5] + ' 6 ' + row[6] + ' 7 ' + row[7] + ' 8 ' + row[8] + ' 9 ' + row[9] + ' 10 ' + row[10] + ' 11 ' + row[11] + ' 12 ' + row[12] + ' 13 ' + row[13] + ' 18 ' + row[18]  + ' 19 ' + row[19] + ' 25 ' + row[25] + ' 26 ' + row[26])
            if search_method == 3:
                for rowval in ROW_LIST:
                    if (int(rowval) == int(row[26])):
                        search.append(row)
                        search_indexes.append(s_index)
                        print('0 ' + row[0] + ' 1 ' + row[1] + ' 4 ' + row[4] + ' 5 ' + row[5] + ' 6 ' + row[6] + ' 7 ' + row[7] + ' 8 ' + row[8] + ' 9 ' + row[9] + ' 10 ' + row[10] + ' 11 ' + row[11] + ' 12 ' + row[12] + ' 13 ' + row[13] + ' 18 ' + row[18]  + ' 19 ' + row[19] + ' 25 ' + row[25] + ' 26 ' + row[26])
            s_index += 1

    #Read in instructor emails
    inst_to_email = {}
    for row in values3:
        if (len(row) >= 2):
            if row[1]:
                print('0: ' + row[0] + " 20: " + row[1])
                inst_to_email[row[0]] = row[1]
            else:
                print('0: ' + row[0] + " 20: [email protected]")
                inst_to_email[row[0]] = "*****@*****.**"
        else:
            print('0: ' + row[0] + " 20: [email protected]")
            inst_to_email[row[0]] = "*****@*****.**"

    #Read in staff emails
    staff_to_email = {}
    for row in values3:
        if (len(row) >= 21):
            if row[20]:
                print('0: ' + row[19] + " 1: " + row[20])
                staff_to_email[row[19]] = row[20]
            else:
                print('0: ' + row[19] + " 1: [email protected]")
                staff_to_email[row[19]] = "*****@*****.**"
        else:
            if (len(row) >= 20):
                if row[19]:
                    print('0: ' + row[19] + " 1: [email protected]")
                    staff_to_email[row[19]] = "*****@*****.**"

    #Setup list of events for printing
    event_printlist = []
    event_skiplist = []

    #Begin creating & sending events
    s_index = 0
    for row in search:
        #skip if the event was already made
        if row[25] != 'N':
            event_skiplist.append("Row " + row[26] + ":  " + row[10] + " " + row[0] + " was already marked as: " + row[25])
            print("skipped " + row[10] + " " + row[0])
            continue
        #Convert location to CAL ID
        if dict_of_locations[row[1]] == -1:
            event_skiplist.append("Row " + row[26] + ":  "  + row[10] + " " + row[0] + " Calendar for location " + row[1] + " was not found.")
            print("skipped " + row[10] + " " + row[0] + " No Cal")
            continue
        
        gworksheet.update_cell(int(row[26]), 26, "Y")
        s_index += 1
        
        #Get Title/Summary
        summary_in = (row[10] + " - " + row[9])
        if row[11]:
            summary_in += (" - " + row[11] + " MGR ")
        if row[12]:
            summary_in += (" - " + row[12] + " IPS ")
        if row[13]:
            summary_in += (" - " + row[13] + " MA ")
        if row[14]:
            summary_in += (" - " + row[14] + " Backup ")

        #Get Location
        loc_in = (row[1])
        if (row[1] == "Chrysler Studio"):
            loc_in = "Chrysler Studio 109 B"
        #Get Desc
        desc_in = (row[10])

        #Get Start Time and Date
        start_dts = row[0] + ' ' + row[5]

        #Get End Time and Date
        end_dts = row[0] + ' ' + row[6]

        #Date & timestamp stuff is janky because the JSON object "event" wants RCF formatted time,
        #whereas the Excel file could have any kind of time input, so using strptime with concacted strings is probably the most
        #flexible approach for now
        dto_start = datetime.datetime.strptime(start_dts, '%m/%d/%Y %I:%M %p')
        dto_end = datetime.datetime.strptime(end_dts, '%m/%d/%Y %I:%M %p')

        #Get Attendees 
        #List of attendees is a "list of dicts" which is the input the JSON object "event" wants
        instructor = inst_to_email[row[9]]
        print(instructor)

        #Staff
        list_of_emailed_names = []
        list_of_emailed_names.append(row[9])
        list_of_attendees = [
            {'email': instructor}
            ]
        if row[11]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[11]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[11])
                print(their_email)
        if row[12]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[12]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[12])
                print(their_email)
        if row[13]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[13]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[13])
                print(their_email)
        if row[14]:
            if not (row[11] in list_of_emailed_names):
                their_email = staff_to_email[row[14]]
                list_of_attendees.append({'email': their_email})
                list_of_emailed_names.append(row[14])
                print(their_email)

        #Credit/Noncredit list(?) WIP
        print(row[18])
        if (row[18] == "Credit"):
            print("It's a Credit Course. ")
            for roww in values3:
                if (len(roww) >= 23):
                    if (roww[22]):                        
                        if not (roww[22] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[22])
                            their_email = staff_to_email[roww[22]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})

        else:
            print("It's a Non-Credit Course. ")
            if (row[11] == "Mary Lynn"):
                for roww in values3:
                    if (len(roww) >= 25):
                        if not (roww[24] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[24])
                            their_email = staff_to_email[roww[24]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})
            if (row[11] == "Dru"):
                for roww in values3:
                    if (len(roww) >= 26):
                        if not (roww[25] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[25])
                            their_email = staff_to_email[roww[25]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})
            if (row[11] == "Paul"):
                    if (len(roww) >= 27):
                        if not (roww[26] in list_of_emailed_names):
                            list_of_emailed_names.append(roww[26])
                            their_email = staff_to_email[roww[26]]
                            list_of_attendees.append({'email': their_email, 'optional': 1})



        #The actual JSON style event object, time zone is static just because not really necessary 
        event = {
          'summary': summary_in,
          'location': loc_in,
          'description': desc_in,
          'start': {
            'dateTime': dto_start.isoformat("T"),
            'timeZone': 'US/Eastern',
          },
          'end': {
            'dateTime': dto_end.isoformat("T"),
            'timeZone': 'US/Eastern',
          },
         # 'recurrence': [
         #   'RRULE:FREQ=DAILY;COUNT=2'
         # ],
          'attendees': list_of_attendees,
          'reminders': {
            'useDefault': False,
            'overrides': [
              {'method': 'email', 'minutes': 24 * 60},
              {'method': 'popup', 'minutes': 10},
            ],
          },
        }

        print(dict_of_locations[row[1]])
        #Uses the service to insert the event
        #event = service.events().insert(calendarId=dict_of_locations[row[1]], body=event, sendUpdates='all').execute()
        #print ('Event created: %s' % (event.get('htmlLink')))
        #event_link = event.get('htmlLink')
        event_link = "google.com"
        event_printlist.append({'summary':summary_in, 'date':row[0], 'link':event_link})
        print(event)


    f = open("CreatedEvents.html", 'w')
    f.write("<h1>Created the Following Events:</h1>" + '\n' + "<blockquote>")
    for event in event_printlist:
        f.write('\n' + "<p>" + event['summary'] + ' ' + event['date'] + ':' + "</p>")
        f.write('\n' + "<p><a href=\"" + event['link'] + "\">" + event['link'] + "</a></p>")
    f.write('\n' + "</blockquote>")
    f.write('\n' + "<h3>Skipped the Following Events:</h3>" + '\n')
    for event in event_skiplist:
        f.write('\n' + "<p>" + event + "</p>")
    f.close()
    os.startfile("CreatedEvents.html")
    sys.exit(1)
from google.auth.transport.requests import Request
from oauth2client.service_account import ServiceAccountCredentials

# Google Sheets Auth
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
TOKEN_FILE = 'token.pickle'
CREDENTIAL_FILE = 'credentials.json'
credentials = None

if os.path.exists(TOKEN_FILE):
    with open(TOKEN_FILE, 'rb') as token:
        credentials = pickle.load(token)

if not credentials or not credentials.valid:
    if credentials and credentials.expired and credentials.refresh_token:
        credentials.refresh(Request())
    else:
        flow = InstalledAppFlow.from_client_secrets_file(
            CREDENTIAL_FILE, SCOPES)
        credentials = flow.run_local_server(port=10800)
    # Save the credentials for the next run
    with open(TOKEN_FILE, 'wb') as token:
        pickle.dump(credentials, token)

service = discovery.build('sheets', 'v4', credentials=credentials)

#########################################################################################################


def exportreport(AdvertiserName, AdvertiserID, spreadsheet_ID):
    print("Checking for open sales")
    def __init__(self, mongo, logger):
        """ METHOD SETS ATTRIBUTES AND CONNECTS TO GMAIL API

        Args:
            mongo ([object]): MONGODB OBJECT
            logger ([object]): LOGGER OBJECT
        """

        self.SCOPES = ["https://mail.google.com/"]

        self.logger = logger

        self.creds = None

        self.service = None

        self.users = mongo.users

        self.emails = mongo.emails

        self.ids_to_delete = []

        self.token_file = f"{THIS_FOLDER}/creds/token.json"

        self.creds_file = f"{THIS_FOLDER}/creds/credentials.json"

        try:

            self.logger.INFO("CONNECTING TO GMAIL...")

            if os.path.exists(self.token_file):

                with open(self.token_file, 'r') as token:

                    self.creds = Credentials.from_authorized_user_file(
                        self.token_file, self.SCOPES)

            # If there are no (valid) credentials available, let the user log in.
            if not self.creds or not self.creds.valid:

                if self.creds and self.creds.expired and self.creds.refresh_token:

                    self.creds.refresh(Request())

                else:

                    flow = InstalledAppFlow.from_client_secrets_file(
                        self.creds_file, self.SCOPES)

                    self.creds = flow.run_local_server(port=0)

                # Save the credentials for the next run
                with open(self.token_file, 'w') as token:

                    token.write(self.creds.to_json())

            self.service = build('gmail', 'v1', credentials=self.creds)

            self.logger.INFO("CONNECTED TO GMAIL!\n")

        except Exception as e:
            print(e)
            self.logger.CRITICAL("FAILED TO CONNECT TO GMAIL!\n")
Example #14
0
 def refresh_token(self):
     if self.credentials.expired:
         self.credentials.refresh(Request())
         _l.debug('token refreshed')
Example #15
0
File: 1.py Project: yl980106/test
def serviceaccountfactory(credentials='credentials.json',
                          token='token.pickle',
                          path=None,
                          list_projects=False,
                          list_sas=None,
                          create_projects=None,
                          max_projects=12,
                          enable_services=None,
                          services=['iam', 'drive'],
                          create_sas=None,
                          delete_sas=None,
                          download_keys=None):
    selected_projects = []
    proj_id = loads(open(credentials, 'r').read())['installed']['project_id']
    creds = None
    if os.path.exists(token):
        with open(token, 'rb') as t:
            creds = pickle.load(t)

    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                credentials, SCOPES)
            creds = flow.run_local_server(port=0)
        with open(token, 'wb') as t:
            pickle.dump(creds, t)

    cloud = build('cloudresourcemanager', 'v1', credentials=creds)
    iam = build('iam', 'v1', credentials=creds)
    serviceusage = build('serviceusage', 'v1', credentials=creds)

    projs = None
    while projs == None:
        try:
            projs = _get_projects(cloud)
        except HttpError as e:
            if loads(e.content.decode(
                    'utf-8'))['error']['status'] == 'PERMISSION_DENIED':
                try:
                    serviceusage.services().enable(
                        name=
                        'projects/%s/services/cloudresourcemanager.googleapis.com'
                        % proj_id).execute()
                except HttpError as e:
                    print(e._get_reason())
                    input('Press Enter to retry.')
    if list_projects:
        return _get_projects(cloud)
    if list_sas:
        return _list_sas(iam, list_sas)
    if create_projects:
        if create_projects > 0:
            current_count = len(_get_projects(cloud))
            if current_count + create_projects < max_projects:
                print('Creating %d projects' % create_projects)
                nprjs = _create_projects(cloud, create_projects)
                selected_projects = nprjs
            else:
                print('%d projects already exist!' % current_count)
        else:
            print('Please specify a number larger than 0.')
    if enable_services:
        ste = []
        ste.append(enable_services)
        if enable_services == '~':
            ste = selected_projects
        elif enable_services == '*':
            ste = _get_projects(cloud)
        services = [i + '.googleapis.com' for i in services]
        print('Enabling services')
        _enable_services(serviceusage, ste, services)
    if create_sas:
        stc = []
        stc.append(create_sas)
        if create_sas == '~':
            stc = selected_projects
        elif create_sas == '*':
            stc = _get_projects(cloud)
        for i in stc:
            _create_remaining_accounts(iam, i)
    if download_keys:
        try:
            os.mkdir(path)
        except FileExistsError:
            pass
        std = []
        std.append(download_keys)
        if download_keys == '~':
            std = selected_projects
        elif download_keys == '*':
            std = _get_projects(cloud)
        _create_sa_keys(iam, std, path)
    if delete_sas:
        std = []
        std.append(delete_sas)
        if delete_sas == '~':
            std = selected_projects
        elif delete_sas == '*':
            std = _get_projects(cloud)
        for i in std:
            print('Deleting service accounts in %s' % i)
            _delete_sas(iam, i)
Example #16
0
def default_google_access_token():
    # get token for google-based auth use, assumes application default credentials work for specified environment
    credentials, _ = google.auth.default(scopes=['openid', 'email', 'profile'])
    credentials.refresh(Request())

    return credentials.token
Example #17
0
def main():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    now = datetime.utcnow().isoformat() + 'Z'  # 'Z' indicates UTC time
    print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          maxResults=10,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])

    d = datetime.now().date()
    tomorrow = datetime(d.year, d.month, d.day, 10) + timedelta(days=1)
    start = tomorrow.isoformat()
    end = (tomorrow + timedelta(hours=1)).isoformat()

    event_result = service.events().insert(
        calendarId='primary',
        body={
            "summary": 'Automating calendar',
            "description":
            'This is a tutorial example of automating google calendar with python',
            "start": {
                "dateTime": start,
                "timeZone": 'Asia/Kolkata'
            },
            "end": {
                "dateTime": end,
                "timeZone": 'Asia/Kolkata'
            },
        }).execute()

    print("created event")
    print("id: ", event_result['id'])
    print("summary: ", event_result['summary'])
    print("starts at: ", event_result['start']['dateTime'])
    print("ends at: ", event_result['end']['dateTime'])
    if not events:
        print('No upcoming events found.')
    for event in events:
        start = event['start'].get('dateTime', event['start'].get('date'))
        print(start, event['summary'])
Example #18
0
def make_iap_request(url, client_id, method='GET', **kwargs):
    """Makes a request to an application protected by Identity-Aware Proxy.
    Info: https://github.com/requests/requests/blob/master/requests/api.py"""

    # Set the default timeout, if missing
    if 'timeout' not in kwargs:
        kwargs['timeout'] = 90

    # Figure out what environment we're running in and get some preliminary
    # information about the service account.
    bootstrap_credentials, _ = google.auth.default(scopes=[IAM_SCOPE])
    if isinstance(bootstrap_credentials,
                  google.oauth2.credentials.Credentials):
        raise Exception('make_iap_request is only supported for service '
                        'accounts.')
    elif isinstance(bootstrap_credentials, google.auth.app_engine.Credentials):
        requests_toolbelt.adapters.appengine.monkeypatch()

    # For service account's using the Compute Engine metadata service,
    # service_account_email isn't available until refresh is called.
    bootstrap_credentials.refresh(Request())

    signer_email = bootstrap_credentials.service_account_email
    if isinstance(bootstrap_credentials,
                  google.auth.compute_engine.credentials.Credentials):
        signer = google.auth.iam.Signer(Request(), bootstrap_credentials,
                                        signer_email)
    else:
        # A Signer object can sign a JWT using the service account's key.
        signer = bootstrap_credentials.signer

    # Construct OAuth 2.0 service account credentials using the signer
    # and email acquired from the bootstrap credentials.
    credentials = google.oauth2.service_account.Credentials(
        signer,
        signer_email,
        token_uri=OAUTH_TOKEN_URI,
        additional_claims={'target_audience': client_id})

    # Obtain an OpenID Connect token, which is a JWT signed by Google.
    google_open_id_connect_token = get_google_open_id_connect_token(
        credentials)

    # Fetch the Identity-Aware Proxy-protected URL, including a Bearer token.
    resp = requests.request(
        method,
        url,
        headers={
            'Authorization': 'Bearer {}'.format(google_open_id_connect_token)
        },
        **kwargs)
    if resp.status_code == 403:
        raise Exception(
            'Service account {} does not have permission to '
            'access the IAP-protected application.'.format(signer_email))
    elif resp.status_code != 200:
        raise Exception(
            'Bad response from application: {!r} / {!r} / {!r}'.format(
                resp.status_code, resp.headers, resp.text))
    else:
        return resp.text
def main():
    """Shows basic usage of the Docs API.
    Prints the title of a sample document.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    drive_service = build('drive', 'v3', credentials=creds)
    doc_service = build('docs', 'v1', credentials=creds)
    spreadsheet_service = build('sheets', 'v4', credentials=creds)

    # create external folder
    file_metadata = {
        'name': 'ParenText Moa',
        'mimeType': 'application/vnd.google-apps.folder'
    }
    folder_file = drive_service.files().create(body=file_metadata,
                                               fields='id').execute()
    parent_id = folder_file.get('id')
    parentext_folder_id = parent_id

    # get names of the JSON files for docs

    doc_file_names = [
        f for f in listdir('./JSON_files') if isfile(join('./JSON_files', f))
    ]
    doc_flows_names = [
        line.strip('\n').replace('PLH - ', '') for line in doc_file_names
    ]
    doc_flows_names = [line[:-5]
                       for line in doc_flows_names]  # remove .json from string

    # get names of the csv files for sheets

    sheet_file_names = [
        f for f in listdir('./csv_files') if isfile(join('./csv_files', f))
    ]
    sheet_flows_names = [line.strip('\n') for line in sheet_file_names]
    sheet_flows_names = [line[:-4] for line in sheet_flows_names
                         ]  # remove .csv from string

    # combine the 2 lists
    flows_names = doc_flows_names + sheet_flows_names
    # create list for building folder structure from the names of the files
    folders_names_in_strings = []
    max_depth = 3

    for name in flows_names:
        name_list = name.split(" - ")
        name_list.pop()

        if len(name_list) > max_depth:
            name_list.pop()

        folders_names_in_strings.append(' - '.join(name_list))

    folders_names_in_strings = list(set(folders_names_in_strings))
    folders_in_lists = list(
        fol.split(" - ") for fol in folders_names_in_strings)

    # variable for saving the IDs of the created folders and files
    folders_IDs = {}
    files_IDs = {}
    files_urls = {}

    parent_folders_path = ""

    # function to create the folder structure
    def create_layer_of_folders(folders_in_lists, parent_folders_path,
                                parent_id):
        curr_layer_folders_to_create = list(
            set([folder[0] for folder in folders_in_lists]))

        for folder in curr_layer_folders_to_create:

            file_metadata = {
                'name': folder,
                'mimeType': 'application/vnd.google-apps.folder',
                'parents': [parent_id]
            }
            folder_file = drive_service.files().create(body=file_metadata,
                                                       fields='id').execute()
            folders_IDs[parent_folders_path + folder +
                        ' - '] = folder_file.get('id')

            new_folders_in_lists = list(
                filter(lambda fol: (len(fol) > 0 and fol[0] == folder),
                       folders_in_lists))

            for fol in new_folders_in_lists:
                fol.pop(0)

            new_folders_in_lists = list(
                filter(lambda fol: len(fol) > 0, new_folders_in_lists))

            if len(new_folders_in_lists) != 0:
                new_parents_folder_path = parent_folders_path + folder + ' - '
                create_layer_of_folders(new_folders_in_lists,
                                        new_parents_folder_path,
                                        folder_file.get('id'))

    # create the folder structure
    create_layer_of_folders(folders_in_lists, parent_folders_path, parent_id)

    # save the IDS of the created folders
    folders_IDs['ParenText'] = parentext_folder_id

    with open('./folders_IDs.json', 'w') as outfile:
        json.dump(folders_IDs, outfile, indent=4)

##################################################################################

# functions to create spreadsheets

    def export_csv_file(file_path: str, name: str):
        if not os.path.exists(file_path):
            print("file path does not exists")
            return
        try:
            file_metadata = {
                'name': name,
                'mimeType': 'application/vnd.google-apps.spreadsheet',
                'properties': {
                    'title': title
                }
            }

            media = MediaFileUpload(filename=file_path, mimetype='text/csv')
            response = drive_service.files().create(
                media_body=media, body=file_metadata).execute()

            return response
        except Exception as e:
            print(e)
            return

    # create spreadsheets #############################################

    for fl in range(len(sheet_flows_names)):
        time.sleep(6)
        # define title
        curr_flow = sheet_flows_names[fl]
        curr_flow_split = curr_flow.split(" - ")
        title = curr_flow_split[-1]
        curr_flow_split.pop()
        curr_flow_path = ' - '.join(curr_flow_split)

        csv_file_path = './csv_files/' + sheet_file_names[fl]

        spreadsheet_file = export_csv_file(csv_file_path, title)
        print('Created spreadsheet with title: ' + title)

        DOCUMENT_ID = spreadsheet_file.get('id')

        files_IDs[curr_flow] = DOCUMENT_ID
        files_urls[
            curr_flow] = "https://docs.google.com/spreadsheets/d/" + DOCUMENT_ID + "/edit#gid=0"

        #formatting of the file (column width, background color, text wrapping)
        ranges = []
        # True if grid data should be returned.
        # This parameter is ignored if a field mask was set in the request.
        include_grid_data = False

        request = spreadsheet_service.spreadsheets().get(
            spreadsheetId=DOCUMENT_ID, includeGridData=include_grid_data)
        response = request.execute()

        sheetId = response.get("sheets")[0].get("properties").get("sheetId")

        n_rows = response.get("sheets")[0].get("properties").get(
            "gridProperties").get("rowCount")

        formatting_requests = []

        formatting_requests.append({
            "repeatCell": {
                "range": {
                    "sheetId": sheetId,
                    "startRowIndex": 1,
                    "endRowIndex": n_rows - 1,
                    "startColumnIndex": 4,
                    "endColumnIndex": 5
                },
                "cell": {
                    'userEnteredFormat': {
                        "backgroundColor": {
                            "red": 0.39,
                            "green": 0.65,
                            "blue": 0.39,
                            "alpha": 1
                        }
                    }
                },
                "fields": 'userEnteredFormat.backgroundColor'
            }
        })

        formatting_requests.append({
            "repeatCell": {
                "range": {
                    "sheetId": sheetId,
                    "startRowIndex": 0,
                    "startColumnIndex": 0
                },
                "cell": {
                    'userEnteredFormat': {
                        "wrapStrategy": "WRAP"
                    }
                },
                "fields": 'userEnteredFormat.wrapStrategy'
            }
        })

        formatting_requests.append(
            {
                "updateDimensionProperties": {
                    "range": {
                        "sheetId": sheetId,
                        "dimension": "COLUMNS",
                        "startIndex": 4,
                        "endIndex": 5
                    },
                    "properties": {
                        "pixelSize": 300
                    },
                    "fields": "pixelSize"
                }
            }, )

        formatting_requests.append(
            {
                "updateDimensionProperties": {
                    "range": {
                        "sheetId": sheetId,
                        "dimension": "COLUMNS",
                        "startIndex": 0,
                        "endIndex": 2
                    },
                    "properties": {
                        "pixelSize": 150
                    },
                    "fields": "pixelSize"
                }
            }, )

        formatting_requests.append(
            {
                "updateDimensionProperties": {
                    "range": {
                        "sheetId": sheetId,
                        "dimension": "COLUMNS",
                        "startIndex": 5,
                        "endIndex": 16
                    },
                    "properties": {
                        "pixelSize": 200
                    },
                    "fields": "pixelSize"
                }
            }, )

        spreadsheet_service.spreadsheets().batchUpdate(
            spreadsheetId=DOCUMENT_ID, body={
                'requests': formatting_requests
            }).execute()
        print('Sent requests to document: {0}'.format(
            len(formatting_requests)))

        # move document to correct folder
        folder_id = folders_IDs[curr_flow_path + ' - ']
        # Retrieve the existing parents to remove
        file = drive_service.files().get(fileId=DOCUMENT_ID,
                                         fields='parents').execute()
        previous_parents = ",".join(file.get('parents'))
        # Move the file to the new folder
        file = drive_service.files().update(fileId=DOCUMENT_ID,
                                            addParents=folder_id,
                                            removeParents=previous_parents,
                                            fields='id, parents').execute()

    ##################################################################################

    # functions to create google docs

    def insert_text(text, style, first=False):
        requests = [{
            'insertText': {
                'location': {
                    'index': 1,
                },
                'text': text if first else "\n" + text
            }
        }]
        if style:
            requests.append({
                'updateParagraphStyle': {
                    'range': {
                        'startIndex': 1 if first else 2,
                        'endIndex': len(text)
                    },
                    'paragraphStyle': {
                        'namedStyleType': style,
                    },
                    'fields': 'namedStyleType'
                }
            })
        return requests

    def make_requests(key, value, level, requests):
        requests.append(insert_text(text=key, style='HEADING_' + str(level)))
        if isinstance(value, str):
            req = insert_text(text=value, style='')
            requests.append(req)
        elif isinstance(value, dict):
            for i in value:
                make_requests(i, value[i], level=level + 1, requests=requests)
        elif isinstance(value, list):
            for item in value:
                if isinstance(item, dict):
                    for i in item:
                        make_requests(i,
                                      item[i],
                                      level=level + 1,
                                      requests=requests)
                elif isinstance(item, str):
                    req = insert_text(text=item, style='')
                    requests.append(req)

        requests

    # create google docs #############################################

    for fl in range(len(doc_flows_names)):
        time.sleep(6)
        # initialise the doc
        curr_flow = doc_flows_names[fl]
        curr_flow_split = curr_flow.split(" - ")
        title = curr_flow_split[-1]
        curr_flow_split.pop()
        curr_flow_path = ' - '.join(curr_flow_split)

        body = {
            "title": title,
        }

        doc = doc_service.documents().create(body=body).execute()
        print('Created document with title: {0}'.format(doc.get('title')))
        DOCUMENT_ID = doc.get('documentId')

        files_IDs[curr_flow] = DOCUMENT_ID
        files_urls[
            curr_flow] = "https://docs.google.com/document/d/" + DOCUMENT_ID + "/edit"

        # load json file
        with open('./JSON_files/' + doc_file_names[fl],
                  encoding="utf8") as json_file:
            data = json.load(json_file)

        requests = []

        for i in data:
            make_requests(i, data[i], level=1, requests=requests)

        requests.reverse()

        result = doc_service.documents().batchUpdate(documentId=DOCUMENT_ID,
                                                     body={
                                                         'requests': requests
                                                     }).execute()
        print('Sent requests to document: {0}'.format(len(requests)))

        # move document to correct folder
        folder_id = folders_IDs[curr_flow_path + ' - ']
        # Retrieve the existing parents to remove
        file = drive_service.files().get(fileId=DOCUMENT_ID,
                                         fields='parents').execute()
        previous_parents = ",".join(file.get('parents'))
        # Move the file to the new folder
        file = drive_service.files().update(fileId=DOCUMENT_ID,
                                            addParents=folder_id,
                                            removeParents=previous_parents,
                                            fields='id, parents').execute()

    #create files with files IDS and urls
    with open('./files_IDs.json', 'w') as outfile:
        json.dump(files_IDs, outfile)

    with open('./files_urls.json', 'w') as outfile:
        json.dump(files_urls, outfile)
Example #20
0
def create_calendar(data):
    # AUTHENTICATION
    SCOPES = ['https://www.googleapis.com/auth/calendar']

    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.

    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)

    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server()
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # CREATE CALENDAR
    calendar = {
        'summary': settings['GOOGLE-CalendarName'],
        'timeZone': 'Asia/Manila'
    }

    created_calendar = service.calendars().insert(body=calendar).execute()
    print("CREATED CALENDAR:", created_calendar['id'])
    for i in data:
        print(i)
        start_time = (datetime.datetime.strptime(
            DAYS_OF_THE_WEEK[i['day']][1], '%Y-%m-%dT%H:%M:%S%z').replace(
                hour=int(i['start-time'][0]),
                minute=int(i['start-time'][1]),
                second=int(i['start-time'][2]))).isoformat()
        end_time = (datetime.datetime.strptime(
            DAYS_OF_THE_WEEK[i['day']][1], '%Y-%m-%dT%H:%M:%S%z').replace(
                hour=int(i['end-time'][0]),
                minute=int(i['end-time'][1]),
                second=int(i['end-time'][2]))).isoformat()
        try:
            event = {
                'summary':
                i['description'],
                'description':
                f"{i['subject_code']} ",  #@{i['room']}
                # 'location': i['location'],
                'start': {
                    'dateTime': start_time,
                    'timeZone': settings['GOOGLE-TimeZone'],
                },
                'end': {
                    'dateTime': end_time,
                    'timeZone': settings['GOOGLE-TimeZone'],
                },
                'reminders': {
                    'useDefault': False,
                    'overrides': [
                        {
                            'method': 'popup',
                            'minutes': 60
                        },
                    ],
                },
                'recurrence': [
                    'RRULE:FREQ=WEEKLY;UNTIL=%s' % (settings['SIS-SemEnd']),
                ]
            }
            event = service.events().insert(calendarId=created_calendar['id'],
                                            body=event).execute()
            print('Event created: %s' % (event.get('htmlLink')))
        except Exception as e:
            service.calendars().delete(
                calendarId=created_calendar['id']).execute()
            raise e
Example #21
0
def get_google(calendarId, token_name, start, end, event_count=0):
    creds = None
    token_path = f'./tokens/{token_name}.pickle'  # If this path doesn't work, try the absolute path
    creds_path = './assets/credentials.json'  # If this path doesn't work, try the absolute path

    if os.path.exists(token_path):
        with open(token_path, 'rb') as token:
            creds = pickle.load(token)

    if not creds or not creds.valid:
        print(f'Verify Google Calendar for profile "{token_name}"')
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                creds_path, SCOPES)
            creds = flow.run_local_server(port=0)

        with open(token_path, 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    timeMin = start.isoformat() + 'Z'
    timeMax = end.isoformat() + 'Z'

    data = {}
    data_events = {}

    events_result = service.events().list(calendarId=calendarId,
                                          timeMin=timeMin,
                                          timeMax=timeMax,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = events_result.get('items', [])

    if not events:
        return data, None

    for event in events:
        if 'description' in event and '#no#' in event['description']:
            continue

        st = event['start'].get('dateTime', event['start'].get('date'))
        en = event['end'].get('dateTime', event['end'].get('date'))

        # TODO: Beautify next if
        if ':' in st:
            st_time = datetime.datetime.strptime(st[:-3] + st[-2:],
                                                 google_datetime)
            en_time = datetime.datetime.strptime(en[:-3] + en[-2:],
                                                 google_datetime)
        else:
            st_time = datetime.datetime.strptime(
                st, google_date).replace(tzinfo=utc)
            en_time = datetime.datetime.strptime(en, google_date)
            en_time = (en_time +
                       datetime.timedelta(minutes=-1)).replace(tzinfo=utc)

        if event_count > 0:
            if 'summary' in event:
                if st_time not in data_events:
                    data_events[st_time] = []
                data_events[st_time] += [event['summary']]
                event_count -= 1

        if st_time.day == en_time.day:
            st_ret = st_time.hour + st_time.minute / 60.0
            en_ret = en_time.hour + en_time.minute / 60.0

            if st_time.weekday() in data:
                data[st_time.weekday()] += [(st_ret, en_ret)]
            else:
                data[st_time.weekday()] = [(st_ret, en_ret)]
        else:
            # TODO: Beautify this part too
            day = start

            if st_time >= utc.localize(day):
                day = st_time

            i = 0

            while day.day != en_time.day:
                if i == 5:
                    break

                if day.weekday() in data:
                    data[day.weekday()] += [(0, 23.999)]
                else:
                    data[day.weekday()] = [(0, 23.999)]

                day += datetime.timedelta(days=1)
                i += 1

            if i != 5:
                en_ret = en_time.hour + en_time.minute / 60.0
                if en_time.weekday() in data:
                    data[en_time.weekday()] += [(0, en_ret)]
                else:
                    data[en_time.weekday()] = [(0, en_ret)]

    return data, data_events
Example #22
0
def main():
    p = Adafruit_Thermal()
    p.setDefault()
    p.setSize('L')
    p.println('Calendar')

    creds = None
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)

        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    now = datetime.utcnow().isoformat() + 'Z'
    timeMax = (datetime.utcnow() +
               timedelta(days=config.calendar_days)).isoformat() + 'Z'
    event_results = service.events().list(calendarId='primary',
                                          timeMin=now,
                                          timeMax=timeMax,
                                          singleEvents=True,
                                          orderBy='startTime').execute()
    events = event_results.get('items', [])

    p.setSize('M')
    if not events:
        p.println("Nothing to do... chill out!")

    for event in events:
        # print(event)
        start_d_in = event['start'].get('date')
        start_dt_in = event['start'].get('dateTime')

        start_t_out = None
        if start_dt_in is not None:
            start_dt = dateutil.parser.parse(start_dt_in)
            start_d_out = start_dt.strftime(config.date_format)
            start_t_out = start_dt.strftime(config.time_format)
        else:
            start_d_out = dateutil.parser.parse(start_d_in).strftime(
                config.date_format)

        p.boldOn()
        p.underlineOn()
        p.justify('L')
        if start_t_out is not None:
            p.print(start_t_out)
            p.print(' ')

        p.println(start_d_out)
        p.boldOff()
        p.underlineOff()

        p.justify('R')
        p.println(event['summary'])

    p.setDefault()
    p.sleep()
Example #23
0
def main():
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'client_secret_179357125629-hi519qg1ili8qqpttoqdasaor75ou49t.apps.googleusercontent.com.json',
                SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('gmail', 'v1', credentials=creds)

    # Call the Gmail API to fetch INBOX
    results = service.users().messages().list(userId='me',
                                              labelIds=['INBOX']).execute()
    messages = results.get('messages', [])

    # Lue viestien otsikot ja IDt listaan
    for message in messages:
        messageheader = service.users().messages().get(
            userId="me", id=message["id"]).execute()

        gmailtime = int(messageheader["internalDate"]) / 1000
        date = datetime.fromtimestamp(gmailtime)

        headers = messageheader["payload"]["headers"]
        ID = messageheader["id"]
        subject = str([i['value'] for i in headers if i["name"] == "Subject"])
        sender = str([i['value'] for i in headers if i["name"] == "From"])
        print("Otsikko:", subject)
        print("Saapumisaika:", date)
        print("Lähettäjä:", sender)

        lähettäjä = sender.split("<")
        lähettäjä = lähettäjä[1].split(">")
        lähettäjä = lähettäjä[0].replace('[', '').replace(']', '')
        subject = subject.replace('[', '').replace(']', '')

        # Poista välit, jotta muuttuja menee läpi
        otsikko = subject.replace(' ', '')

        # Lähetä varmistusviesti
        # Kopio "Reports.py":stä. Pitää viestis olla projekti hei..
        project = ""
        if "ncl" in otsikko:
            project = "NCL"
        if "icon" in otsikko:
            project = "ICON"
        if "seabourn" in otsikko:
            project = "Seabourn"
        if project != "":
            IDsend(otsikko, lähettäjä, project, teksti)

        sekunnit = 900
        # Ajetaan jos maili on semituore
        if time.time() - gmailtime < sekunnit:
            subprocess.call('start python Reports.py %s %s' %
                            (otsikko, lähettäjä),
                            shell=True)
        else:
            print("Liian vanha prosessoitavaks:", time.time() - gmailtime)
        # Archivaa maili
        try:
            service.users().messages().modify(userId="me",
                                              id=message["id"],
                                              body={
                                                  'removeLabelIds': ['INBOX'],
                                                  'addLabelIds': ['STARRED'],
                                                  'ids': ID
                                              }).execute()
            print('Email archived.')
        except:
            print('An error occurred while archiving email.')
            exit()
def createMeeting(group_name, emails, date, startTime, endTime):
    #group_name will be a string
    #emails will be a list of strings
    #date will be a string in yyyy-mm-dd format
    #starttime will be in hh:mm:ss
    #timezone will awlays be eastern

    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    if os.path.exists('virtualstuddybuddy/token.pickle'):
        with open('virtualstuddybuddy/token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'virtualstuddybuddy/oldCredentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('virtualstuddybuddy/token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    event = {
        'summary': group_name + " meeting",
        'attendees': [{
            'email': str(e)
        } for e in emails],
        'start': {
            'dateTime': str(date) + "T" + str(startTime),
            'timeZone': 'America/New_York'
        },
        'end': {
            'dateTime': str(date) + "T" + str(endTime),
            'timeZone': 'America/New_York'
        },
        'conferenceData': {
            'createRequest': {
                "conferenceSolutionKey": {
                    "type": "hangoutsMeet"
                },
                "requestId": group_name,
            }
        }
    }

    event = service.events().insert(calendarId='primary',
                                    sendUpdates="all",
                                    body=event,
                                    conferenceDataVersion=1).execute()


# g = "vsb test"
# emails = ["*****@*****.**"]#, "*****@*****.**","*****@*****.**",]
# date = "2020-11-24"
# startTime = "10:00:00"
# endTime = "14:00:00"
# createMeeting(g, emails, date, startTime, endTime)
if __name__ == '__main__':
	"""Shows basic usage of the Sheets API.
	Prints values from a sample spreadsheet.
	"""
	creds = None
	# The file token.pickle stores the user's access and refresh tokens, and is
	# created automatically when the authorization flow completes for the first
	# time.
	if os.path.exists('token.pickle'):
		with open('token.pickle', 'rb') as token:
			creds = pickle.load(token)
	# If there are no (valid) credentials available, let the user log in.
	if not creds or not creds.valid:
		if creds and creds.expired and creds.refresh_token:
			creds.refresh(Request())
		else:
			flow = InstalledAppFlow.from_client_secrets_file('credentials_web.json', SCOPES)
			creds = flow.run_local_server()
			# Save the credentials for the next run
		with open('token.pickle', 'wb') as token:
			pickle.dump(creds, token)


	service = build('sheets', 'v4', credentials=creds)
	#Only needed when need to upload all of the images onto Qualtrics
	if len(sys.argv) == 3:
		flag = sys.argv[2]
		upload_all_images(DATACENTER, APITOKEN, service,flag)
	else:
		flag = sys.argv[1]
Example #26
0
def main():
    """Shows basic usage of the Google Calendar API.
    Prints the start and name of the next 10 events on the user's calendar.
    """
    creds = None
    # The file token.pickle stores the user's access and refresh tokens, and is
    # created automatically when the authorization flow completes for the first
    # time.
    """if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)"""
    # If there are no (valid) credentials available, let the user log in.
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials (1).json', SCOPES)
            creds = flow.run_local_server("127.0.0.1")
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('calendar', 'v3', credentials=creds)

    # Call the Calendar API
    """now = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC time
    print('Getting the upcoming 10 events')
    events_result = service.events().list(calendarId='primary', timeMin=now,
                                        maxResults=10, singleEvents=True,
                                        orderBy='startTime').execute()
    events = events_result.get('items', [])

    if not events:
        print('No upcoming events found.')
    for event in events:
        start = event['start'].get('dateTime', event['start'].get('date'))
        print(start, event['summary'])"""

    #Extract data from textprocessing
    data = main_func(
        inpramesh
    )  #Later, instead of impful we will connect it to web to read data from there
    print(data)
    current_date, time = ret_date_time(str(datetime.today()))
    dates_till_six_days = find_date_from_a_week(current_date)
    weekdays = generate_weekday_lst(datetime.today().weekday())
    dictionary_of_weekdays = {'M': 0, 'Tu': 1, 'W': 2, 'Th': 3, 'F': 4}
    dictionary_of_dates = {}
    for i in range(len(weekdays)):
        dictionary_of_dates[weekdays[i]] = dates_till_six_days[i]

    #Adding an Event to Calendar
    for d in data:
        if 'TBA' not in d and 'TBA' not in d[1]:
            for days in d[1]:
                start_time = ""
                end_time = ""
                if d[2][len(d[2]) - 1] == 'P' and d[2][:2] != "12":
                    #print(d[2][:2])
                    get_colon_index = 0
                    while d[2][get_colon_index] is not ":":
                        get_colon_index += 1
                    start_time = str(int(d[2][:get_colon_index]) +
                                     12) + d[2][get_colon_index:len(d[2]) -
                                                1] + ":00"
                else:
                    start_time = d[2][:len(d[2]) - 1] + ":00"

                if d[3][len(d[3]) - 1] == 'P' and d[3][:2] != "12":
                    get_colon_index = 0
                    while d[3][get_colon_index] is not ":":
                        get_colon_index += 1
                    end_time = str(int(d[3][:get_colon_index]) +
                                   12) + d[3][get_colon_index:len(d[3]) -
                                              1] + ":00"
                else:
                    end_time = d[3][:len(d[3]) - 1] + ":00"

                event = {
                    'summary':
                    d[0],
                    'location':
                    None,
                    'description':
                    None,
                    'start': {
                        'dateTime':
                        dictionary_of_dates[dictionary_of_weekdays[days]] +
                        "T" + start_time,
                        'timeZone':
                        'America/Los_Angeles',
                    },
                    'end': {
                        'dateTime':
                        dictionary_of_dates[dictionary_of_weekdays[days]] +
                        "T" + end_time,
                        'timeZone':
                        'America/Los_Angeles',
                    },
                    'recurrence': [
                        'RRULE:FREQ=WEEKLY;UNTIL=20191220T235959Z'  #COUNT=1'
                    ],
                    'attendees': [
                        {
                            'email': '*****@*****.**'
                        },
                        {
                            'email': '*****@*****.**'
                        },
                    ],
                    'reminders': {
                        'useDefault':
                        False,
                        'overrides': [
                            {
                                'method': 'email',
                                'minutes': 24 * 60
                            },
                            {
                                'method': 'popup',
                                'minutes': 10
                            },
                        ],
                    },
                }

                event = service.events().insert(calendarId='primary',
                                                body=event).execute()
                print('Event created: %s' % event.get('htmlLink'))
Example #27
0
 def api_from_token_pickle(cls, token_path: str):
     with open(token_path, 'rb') as token:
         creds = pickle.load(token)
     if creds and creds.expired and creds.refresh_token:
         creds.refresh(Request())
     return build('gmail', 'v1', credentials=creds)
Example #28
0
def check_deploy_status(args):
    logging.info("check deployment status")
    # Figure out what environment we're running in and get some preliminary
    # information about the service account.
    credentials, _ = google.auth.default(scopes=[IAM_SCOPE])
    if isinstance(credentials, google.oauth2.credentials.Credentials):
        raise Exception('make_iap_request is only supported for service '
                        'accounts.')

    # For service account's using the Compute Engine metadata service,
    # service_account_email isn't available until refresh is called.
    credentials.refresh(Request())

    signer_email = credentials.service_account_email
    if isinstance(credentials,
                  google.auth.compute_engine.credentials.Credentials):
        signer = google.auth.iam.Signer(Request(), credentials, signer_email)
    else:
        # A Signer object can sign a JWT using the service account's key.
        signer = credentials.signer

    # Construct OAuth 2.0 service account credentials using the signer
    # and email acquired from the bootstrap credentials.
    service_account_credentials = google.oauth2.service_account.Credentials(
        signer,
        signer_email,
        token_uri=OAUTH_TOKEN_URI,
        additional_claims={'target_audience': may_get_env_var("CLIENT_ID")})

    google_open_id_connect_token = get_google_open_id_connect_token(
        service_account_credentials)
    # Wait up to 30 minutes for IAP access test.
    retry_credit = 180
    status_code = 0
    while retry_credit > 0:
        retry_credit -= 1
        sleep(10)
        try:
            resp = requests.request(
                METHOD,
                "https://%s.endpoints.%s.cloud.goog" %
                (args.deployment, args.project),
                headers={
                    'Authorization':
                    'Bearer {}'.format(google_open_id_connect_token)
                })
            status_code = resp.status_code
            if resp.status_code == 200:
                break
        except Exception:
            logging.info("IAP not ready, exception caught, retry credit: %s" %
                         retry_credit)
            continue
        logging.info("IAP not ready, retry credit: %s" % retry_credit)

    if status_code != 200:
        raise RuntimeError(
            "IAP endpoint not ready after 30 minutes, time out...")
    else:
        # Optionally upload ssl cert
        if os.listdir(SSL_DIR) == []:
            for sec in ["envoy-ingress-tls", "letsencrypt-prod-secret"]:
                os.system("kubectl get secret %s -n kubeflow -o yaml > %s" %
                          (sec, os.path.join(SSL_DIR, sec + ".yaml")))
            os.system("gsutil cp %s/* gs://%s/%s/" %
                      (SSL_DIR, SSL_BUCKET, args.cert_group))
def Snagajobimport():  # data, context
    creds = None
    storage_client = storage.Client()
    if storage_client.get_bucket('hc_tokens_scripts').blob('Tokens/Reporting-token.pickle').exists():
        with gcsfs.GCSFileSystem(project="hireclix").open('hc_tokens_scripts/Tokens/Reporting-token.pickle',
                                                          'rb') as token:
            creds = pickle.load(token)
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        with gcsfs.GCSFileSystem(project="hireclix").open('hc_tokens_scripts/Tokens/Reporting-token.pickle',
                                                          'wb') as token:
            pickle.dump(creds, token)

    service = build('gmail', 'v1', credentials=creds)
    userId = 'me'
    labelid = 'Label_4319400225407627751'
    query = '*****@*****.**'
    messages = service.users().messages().list(userId=userId, q=query, labelIds=labelid).execute()

    def multiple_replace(dict, text):
        regex = re.compile("(%s)" % "|".join(map(re.escape, dict.keys())))
        return regex.sub(lambda mo: dict[mo.string[mo.start():mo.end()]], text)

    regexes = {
        'Snag Campaign Leads ': '',
        'Salesforce ': '',
        'Timestamp ': '',
        ' ': '_'
    }

    for m_id in messages['messages']:
        messagemeta = service.users().messages().get(userId=userId, id=m_id['id']).execute()

        dates = parser.parse(re.sub("^.*,|-.*$", "", messagemeta['payload']['headers'][1]['value']).strip()).date()

        today = datetime.today().date() #- timedelta(1)

        if dates == today:

            attachment = messagemeta['payload']['parts'][1]['body']['attachmentId']
            attachments = service.users().messages().attachments().get(userId=userId, messageId=messagemeta['id'],
                                                                       id=attachment).execute()
            f = base64.urlsafe_b64decode(attachments['data'])
            toread = io.BytesIO()
            toread.write(f)
            toread.seek(0)

            dataframe = pd.read_excel(toread, header=0)

            pd.set_option('display.max_rows', 500)
            pd.set_option('display.max_columns', 500)
            pd.set_option('display.width', 1000)

            reformattedcolumns = []

            for column in dataframe.columns:
                reformattedcolumns.append(multiple_replace(regexes, column))

            dataframe.columns = reformattedcolumns
            dataframe['Date'] = pd.to_datetime(dataframe['Date'],
                                               errors='coerce').dt.date

            dataframe.drop(['Account_Name'], inplace=True, axis=1)
            dataframe.dropna(how='any', inplace=True)

            pandas_gbq.to_gbq(dataframe, 'snagajob.Snagajob_spend', project_id='hireclix',
                              if_exists='append', table_schema=[
                                    {'name': 'Date', 'type': 'DATE'}
                                     ])
            dataframe.to_csv('gs://hc_snagajob/snagajob_daily_'+str(today), index=False)
            print(dataframe)
def main():
    downloaded = 0
    totalsize = 0
    deletedfiles = 0
    creds = None
    # CREDENTIALS AND LOGIN
    if os.path.exists('token.pickle'):
        with open('token.pickle', 'rb') as token:
            creds = pickle.load(token)
    # REQUIRE LOGIN IF CREDENTIAL EXPIRES
    if not creds or not creds.valid:
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            flow = InstalledAppFlow.from_client_secrets_file(
                'credentials.json', SCOPES)
            creds = flow.run_local_server(port=0)
        # Save the credentials for the next run
        with open('token.pickle', 'wb') as token:
            pickle.dump(creds, token)

    service = build('drive', 'v3', credentials=creds)

    # DOWNLOAD
    def download_file(service_download, file_id, local_fd):
        request = service_download.files().get_media(fileId=file_id)
        media_request = http.MediaIoBaseDownload(local_fd, request)
        while True:
            try:
                download_progress, done = media_request.next_chunk()
            except errors.HttpError as error:
                print('An error occurred: %s' % error)
                return
            if download_progress:
                print('Download Progress: %d%%' % int(download_progress.progress() * 100))
            if done:
                print('Download Complete')
                return

    # DELETE
    def delete_file(service_del, file_id):
        try:
            service.files().delete(fileId=file_id).execute()
        except errors.HttpError as error:
            print('An error occurred: %s' % error)

    # COUNT ALL FILES
    results_count = service.files().list(q=f"mimeType!='application/vnd.google-apps.folder' ",
                                   pageSize=ammount, fields="nextPageToken, files(id, name, size)").execute()
    counts = results_count.get('files', [])
    alltotalsize = 0
    for count in counts:
        countsize = int(count['size'])
        alltotalsize += countsize
    total = len(counts)

    print(f'{total} files found, {round(float(alltotalsize) / 1048576, 2)}MB')




    # LIST ALL FOLDERS
    folder_results = service.files().list(q="mimeType='application/vnd.google-apps.folder'",
        pageSize=ammount, fields="nextPageToken, files(id, name)").execute()
    folders = folder_results.get('files', [])

    if not folders:
        print('No folder found.')
    else:
        print('Folders:')
        for folder in folders:
            print(f"{folder['name']}")

            # LIST ALL FILES IN FOLDER
            results = service.files().list(q=f"mimeType!='application/vnd.google-apps.folder' andparents in '{folder['id']}' ", pageSize=ammount, fields="nextPageToken, files(id, name, size)").execute()
            items = results.get('files', [])
            if not items:
                print('------ No file found')
            else:
                path = f"{sync_path}\\{folder['name']}"
                if os.path.exists(path):
                    print('')
                else:
                    # print(f'Folder {path} doesnt exists, creating...\n')
                    os.mkdir(path)
                # print(path)

                #print('Files:\n')
                for item in items:
                    print(f"------ ID: {item['id']} | Filename: {item['name']}")
                    file = f"{path}\\{item['name']}"
                    pathfile = file

                    # DOWNLOAD ALL FILES
                    if os.path.exists(pathfile):
                        localfile = open(pathfile, 'r')
                        localfile.seek(0, 2)
                        localsize = int(localfile.tell())
                        remotesize = int(item['size'])

                        # IGNORE EXISTING FILES
                        print(f'Local file size: {localsize} bytes / Remote file size: {remotesize} bytes')
                        if localsize == remotesize:
                            print(
                                f"File {item['name']} already exists with same size, ignoring and deleting remote file...\n")
                            delete_file(service, item['id'])
                            deletedfiles += 1
                        else:
                            # DOWNLOAD INCOMPLETE FILES
                            if localsize == 0:
                                print(f"File {item['name']} already exists with different size, downloading...\n")
                                filedownload = open(pathfile, 'wb')
                                # print(f"Downloading {item['name']}...")
                                try:
                                    download_file(service, item['id'], filedownload)
                                    downloaded += 1
                                    print(f"Deleting {item['name']}...\n")
                                    delete_file(service, item['id'])
                                except:
                                    print('Erro ao baixar')
                    else:
                        filedownload = open(pathfile, 'wb')
                        # print(f"Downloading {item['name']}...")
                        try:
                            download_file(service, item['id'], filedownload)
                            print(f"Deleting {item['name']}...")
                            delete_file(service, item['id'])
                        except:
                            print('Error')

                        remotesize = int(item['size'])
                        downloaded += 1
                        totalsize += remotesize
                        print(f'{downloaded}/{total}')
                        percent = totalsize / alltotalsize * 100
                        print(f'Total: {round(float(totalsize) / 1048576, 2)}MB of {round(float(alltotalsize) / 1048576, 2)}MB downloaded ({round(float(percent), 2)}%)\n\n')

    totalsizeinmb = round(float(totalsize) / 1048576, 2)
    print(f'\nTotal files downloaded: {downloaded} ({totalsizeinmb}MB)')